# HG changeset patch # User Sylvain Thénault # Date 1466079560 -7200 # Node ID 19fcce6dc6d11031e558ffcc28d48a3b82387386 # Parent 814f54d6183b4ac00e6e3a49fdeeddea2b5c47df# Parent a4fcee1e9789c280ac011a9cfe2e2018a3fedbc9 backport changes from 3.21 diff -r a4fcee1e9789 -r 19fcce6dc6d1 .hgignore --- a/.hgignore Thu Mar 24 09:43:25 2016 +0100 +++ b/.hgignore Thu Jun 16 14:19:20 2016 +0200 @@ -13,9 +13,7 @@ ^doc/book/en/apidoc$ \.old$ syntax: regexp -.*/data.*/database/.*\.sqlite -.*/data.*/database/.*\.config -.*/data/database/tmpdb.* +.*/data.*/database/.* .*/data/ldapdb/.* .*/data/uicache/ .*/data/cubes/.*/i18n/.*\.po @@ -24,5 +22,3 @@ ^doc/book/en/devweb/js_api/ ^doc/_build ^doc/js_api/ -data/pgdb/ -data.*/pgdb.* diff -r a4fcee1e9789 -r 19fcce6dc6d1 .hgtags --- a/.hgtags Thu Mar 24 09:43:25 2016 +0100 +++ b/.hgtags Thu Jun 16 14:19:20 2016 +0200 @@ -542,3 +542,12 @@ b3cbbb7690b6e193570ffe4846615d372868a923 3.21.6 b3cbbb7690b6e193570ffe4846615d372868a923 debian/3.21.6-1 b3cbbb7690b6e193570ffe4846615d372868a923 centos/3.21.6-1 +de472896fc0a18d6b831e6fed0eeda5921ec522c 3.22.0 +de472896fc0a18d6b831e6fed0eeda5921ec522c debian/3.22.0-1 +de472896fc0a18d6b831e6fed0eeda5921ec522c centos/3.22.0-1 +d0d86803a804854be0a1b2d49079a94d1c193ee9 3.22.1 +d0d86803a804854be0a1b2d49079a94d1c193ee9 debian/3.22.1-1 +d0d86803a804854be0a1b2d49079a94d1c193ee9 centos/3.22.1-1 +1b93ff37755b0588081f6fcb93da0dde772a6adb 3.22.2 +1b93ff37755b0588081f6fcb93da0dde772a6adb debian/3.22.2-1 +1b93ff37755b0588081f6fcb93da0dde772a6adb centos/3.22.2-1 diff -r a4fcee1e9789 -r 19fcce6dc6d1 README --- a/README Thu Mar 24 09:43:25 2016 +0100 +++ b/README Thu Jun 16 14:19:20 2016 +0200 @@ -14,7 +14,7 @@ Install ------- -More details at http://docs.cubicweb.org/book/admin/setup +More details at https://docs.cubicweb.org/book/admin/setup Getting started --------------- @@ -26,12 +26,12 @@ cubicweb-ctl start -D myblog sensible-browser http://localhost:8080/ -Details at http://docs.cubicweb.org/tutorials/base/blog-in-five-minutes +Details at https://docs.cubicweb.org/tutorials/base/blog-in-five-minutes Documentation ------------- -Look in the doc/ subdirectory or read http://docs.cubicweb.org/ +Look in the doc/ subdirectory or read https://docs.cubicweb.org/ -It includes the Entypo pictograms by Daniel Bruce — www.entypo.com +CubicWeb includes the Entypo pictograms by Daniel Bruce — www.entypo.com diff -r a4fcee1e9789 -r 19fcce6dc6d1 __init__.py --- a/__init__.py Thu Mar 24 09:43:25 2016 +0100 +++ b/__init__.py Thu Jun 16 14:19:20 2016 +0200 @@ -22,23 +22,26 @@ # ignore the pygments UserWarnings import warnings -import cPickle import zlib warnings.filterwarnings('ignore', category=UserWarning, message='.*was already imported', module='.*pygments') -import __builtin__ -# '_' is available in builtins to mark internationalized string but should -# not be used to do the actual translation -if not hasattr(__builtin__, '_'): - __builtin__._ = unicode +from six import PY2, binary_type, text_type +from six.moves import builtins CW_SOFTWARE_ROOT = __path__[0] -import sys, os, logging -from StringIO import StringIO +import sys +import logging +if PY2: + # http://bugs.python.org/issue10211 + from StringIO import StringIO as BytesIO +else: + from io import BytesIO + +from six.moves import cPickle as pickle from logilab.common.deprecation import deprecated from logilab.common.logging_ext import set_log_methods @@ -56,6 +59,14 @@ from cubicweb._exceptions import * from logilab.common.registry import ObjectNotFound, NoSelectableObject, RegistryNotFound + +# '_' is available to mark internationalized string but should not be used to +# do the actual translation +_ = text_type +if not hasattr(builtins, '_'): + builtins._ = deprecated("[3.22] Use 'from cubicweb import _'")(_) + + # convert eid to the right type, raise ValueError if it's not a valid eid @deprecated('[3.17] typed_eid() was removed. replace it with int() when needed.') def typed_eid(eid): @@ -66,17 +77,21 @@ #import threading #threading.settrace(log_thread) -class Binary(StringIO): - """customize StringIO to make sure we don't use unicode""" - def __init__(self, buf=''): - assert isinstance(buf, (str, buffer, bytearray)), \ - "Binary objects must use raw strings, not %s" % buf.__class__ - StringIO.__init__(self, buf) +class Binary(BytesIO): + """class to hold binary data. Use BytesIO to prevent use of unicode data""" + _allowed_types = (binary_type, bytearray, buffer if PY2 else memoryview) + + def __init__(self, buf=b''): + assert isinstance(buf, self._allowed_types), \ + "Binary objects must use bytes/buffer objects, not %s" % buf.__class__ + # don't call super, BytesIO may be an old-style class (on python < 2.7.4) + BytesIO.__init__(self, buf) def write(self, data): - assert isinstance(data, (str, buffer, bytearray)), \ - "Binary objects must use raw strings, not %s" % data.__class__ - StringIO.write(self, data) + assert isinstance(data, self._allowed_types), \ + "Binary objects must use bytes/buffer objects, not %s" % data.__class__ + # don't call super, BytesIO may be an old-style class (on python < 2.7.4) + BytesIO.write(self, data) def to_file(self, fobj): """write a binary to disk @@ -132,22 +147,22 @@ def zpickle(cls, obj): """ return a Binary containing a gzipped pickle of obj """ retval = cls() - retval.write(zlib.compress(cPickle.dumps(obj, protocol=2))) + retval.write(zlib.compress(pickle.dumps(obj, protocol=2))) return retval def unzpickle(self): """ decompress and loads the stream before returning it """ - return cPickle.loads(zlib.decompress(self.getvalue())) + return pickle.loads(zlib.decompress(self.getvalue())) def check_password(eschema, value): - return isinstance(value, (str, Binary)) + return isinstance(value, (binary_type, Binary)) BASE_CHECKERS['Password'] = check_password def str_or_binary(value): if isinstance(value, Binary): return value - return str(value) + return binary_type(value) BASE_CONVERTERS['Password'] = str_or_binary @@ -255,4 +270,3 @@ not be processed, a memory allocation error occurred during processing, etc. """ - diff -r a4fcee1e9789 -r 19fcce6dc6d1 __pkginfo__.py --- a/__pkginfo__.py Thu Mar 24 09:43:25 2016 +0100 +++ b/__pkginfo__.py Thu Jun 16 14:19:20 2016 +0200 @@ -22,13 +22,13 @@ modname = distname = "cubicweb" -numversion = (3, 21, 6) -version = '.'.join(str(num) for num in numversion) +numversion = (3, 22, 2) +version = '.'.join(str(num) for num in numversion) + '.dev0' description = "a repository of entities / relations for knowledge management" author = "Logilab" author_email = "contact@logilab.fr" -web = 'http://www.cubicweb.org' +web = 'https://www.cubicweb.org' license = 'LGPL' classifiers = [ @@ -39,17 +39,19 @@ ] __depends__ = { + 'six': '>= 1.4.0', 'logilab-common': '>= 0.63.1', 'logilab-mtconverter': '>= 0.8.0', - 'rql': '>= 0.31.2, < 0.34', - 'yams': '>= 0.40.0', + 'rql': '>= 0.34.0', + 'yams': '>= 0.42.0', #gettext # for xgettext, msgcat, etc... # web dependencies 'lxml': '', # XXX graphviz # server dependencies - 'logilab-database': '>= 1.13.0, < 1.15', + 'logilab-database': '>= 1.15.0', 'passlib': '', + 'pytz': '', 'Markdown': '' } @@ -61,7 +63,7 @@ 'vobject': '>= 0.6.0', # for ical view 'rdflib': None, # 'pyzmq': None, - 'Twisted': '', + 'Twisted': '< 16.0.0', #'Products.FCKeditor':'', #'SimpleTAL':'>= 4.1.6', } diff -r a4fcee1e9789 -r 19fcce6dc6d1 _exceptions.py --- a/_exceptions.py Thu Mar 24 09:43:25 2016 +0100 +++ b/_exceptions.py Thu Jun 16 14:19:20 2016 +0200 @@ -21,6 +21,8 @@ from warnings import warn +from six import PY3, text_type + from logilab.common.decorators import cachedproperty from yams import ValidationError @@ -30,23 +32,24 @@ class CubicWebException(Exception): """base class for cubicweb server exception""" msg = "" - def __str__(self): + def __unicode__(self): if self.msg: if self.args: return self.msg % tuple(self.args) else: return self.msg else: - return u' '.join(unicode(arg) for arg in self.args) + return u' '.join(text_type(arg) for arg in self.args) + __str__ = __unicode__ if PY3 else lambda self: self.__unicode__().encode('utf-8') class ConfigurationError(CubicWebException): """a misconfiguration error""" class InternalError(CubicWebException): - """base class for exceptions which should not occurs""" + """base class for exceptions which should not occur""" class SecurityError(CubicWebException): - """base class for cubicweb server security exception""" + """base class for cubicweb server security exceptions""" class RepositoryError(CubicWebException): """base class for repository exceptions""" @@ -114,19 +117,19 @@ """raised when a user tries to perform an action without sufficient credentials """ - msg = 'You are not allowed to perform this operation' - msg1 = 'You are not allowed to perform %s operation on %s' + msg = u'You are not allowed to perform this operation' + msg1 = u'You are not allowed to perform %s operation on %s' var = None - def __str__(self): + def __unicode__(self): try: if self.args and len(self.args) == 2: return self.msg1 % self.args if self.args: - return ' '.join(self.args) + return u' '.join(self.args) return self.msg except Exception as ex: - return str(ex) + return text_type(ex) class Forbidden(SecurityError): """raised when a user tries to perform a forbidden action @@ -185,7 +188,7 @@ commit time. :type txuuix: int - :param txuuid: Unique identifier of the partialy undone transaction + :param txuuid: Unique identifier of the partially undone transaction :type errors: list :param errors: List of errors occurred during undoing @@ -204,4 +207,3 @@ # pylint: disable=W0611 from logilab.common.clcommands import BadCommandUsage - diff -r a4fcee1e9789 -r 19fcce6dc6d1 _gcdebug.py --- a/_gcdebug.py Thu Mar 24 09:43:25 2016 +0100 +++ b/_gcdebug.py Thu Jun 16 14:19:20 2016 +0200 @@ -15,6 +15,7 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . +from __future__ import print_function import gc, types, weakref @@ -68,7 +69,7 @@ except KeyError: ocounters[key] = 1 if isinstance(obj, viewreferrersclasses): - print ' ', obj, referrers(obj, showobjs, maxlevel) + print(' ', obj, referrers(obj, showobjs, maxlevel)) garbage = [repr(obj) for obj in gc.garbage] return counters, ocounters, garbage diff -r a4fcee1e9789 -r 19fcce6dc6d1 crypto.py --- a/crypto.py Thu Mar 24 09:43:25 2016 +0100 +++ b/crypto.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,9 +18,10 @@ """Simple cryptographic routines, based on python-crypto.""" __docformat__ = "restructuredtext en" -from pickle import dumps, loads from base64 import b64encode, b64decode +from six.moves import cPickle as pickle + from Crypto.Cipher import Blowfish @@ -34,7 +35,7 @@ def encrypt(data, seed): - string = dumps(data) + string = pickle.dumps(data) string = string + '*' * (8 - len(string) % 8) string = b64encode(_cypherer(seed).encrypt(string)) return unicode(string) @@ -43,4 +44,4 @@ def decrypt(string, seed): # pickle ignores trailing characters so we do not need to strip them off string = _cypherer(seed).decrypt(b64decode(string)) - return loads(string) + return pickle.loads(string) diff -r a4fcee1e9789 -r 19fcce6dc6d1 cubicweb.spec --- a/cubicweb.spec Thu Mar 24 09:43:25 2016 +0100 +++ b/cubicweb.spec Thu Jun 16 14:19:20 2016 +0200 @@ -5,30 +5,33 @@ %define python python %define __python /usr/bin/python %endif +%{!?python_sitelib: %define python_sitelib %(%{__python} -c "from distutils.sysconfig import get_python_lib; print get_python_lib()")} Name: cubicweb -Version: 3.21.6 +Version: 3.22.2 Release: logilab.1%{?dist} Summary: CubicWeb is a semantic web application framework -Source0: http://download.logilab.org/pub/cubicweb/cubicweb-%{version}.tar.gz +Source0: https://pypi.python.org/packages/source/c/cubicweb/cubicweb-%{version}.tar.gz License: LGPLv2+ Group: Development/Languages/Python Vendor: Logilab -Url: http://www.cubicweb.org/project/cubicweb +Url: https://www.cubicweb.org/project/cubicweb BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot BuildArch: noarch Requires: %{python} +Requires: %{python}-six >= 1.4.0 Requires: %{python}-logilab-common >= 0.63.1 Requires: %{python}-logilab-mtconverter >= 0.8.0 -Requires: %{python}-rql >= 0.31.2 -Requires: %{python}-yams >= 0.40.0 -Requires: %{python}-logilab-database >= 1.13.0 +Requires: %{python}-rql >= 0.34.0 +Requires: %{python}-yams >= 0.42.0 +Requires: %{python}-logilab-database >= 1.15.0 Requires: %{python}-passlib Requires: %{python}-lxml -Requires: %{python}-twisted-web +Requires: %{python}-twisted-web < 16.0.0 Requires: %{python}-markdown +Requires: pytz # the schema view uses `dot'; at least on el5, png output requires graphviz-gd Requires: graphviz-gd Requires: gettext @@ -55,5 +58,6 @@ %files %defattr(-, root, root) %dir /var/log/cubicweb -/* - +%{_prefix}/share/cubicweb/* +%{python_sitelib}/* +%{_bindir}/* diff -r a4fcee1e9789 -r 19fcce6dc6d1 cwconfig.py --- a/cwconfig.py Thu Mar 24 09:43:25 2016 +0100 +++ b/cwconfig.py Thu Jun 16 14:19:20 2016 +0200 @@ -164,9 +164,9 @@ Directory where pid files will be written """ +from __future__ import print_function __docformat__ = "restructuredtext en" -_ = unicode import sys import os @@ -179,6 +179,8 @@ basename, isdir, dirname, splitext) from warnings import warn, filterwarnings +from six import text_type + from logilab.common.decorators import cached, classproperty from logilab.common.deprecation import deprecated from logilab.common.logging_ext import set_log_methods, init_log @@ -186,7 +188,7 @@ ConfigurationMixIn, merge_options) from cubicweb import (CW_SOFTWARE_ROOT, CW_MIGRATION_MAP, - ConfigurationError, Binary) + ConfigurationError, Binary, _) from cubicweb.toolsutils import create_dir CONFIGURATIONS = [] @@ -350,7 +352,7 @@ }), ('umask', {'type' : 'int', - 'default': 077, + 'default': 0o077, 'help': 'permission umask for files created by the server', 'group': 'main', 'level': 2, }), @@ -503,7 +505,7 @@ deps = {} else: deps = dict( (x[len('cubicweb-'):], v) - for x, v in gendeps.iteritems() + for x, v in gendeps.items() if x.startswith('cubicweb-')) for depcube in deps: try: @@ -600,7 +602,7 @@ cls.cls_adjust_sys_path() for ctlfile in ('web/webctl.py', 'etwist/twctl.py', 'server/serverctl.py', - 'devtools/devctl.py', 'goa/goactl.py'): + 'devtools/devctl.py',): if exists(join(CW_SOFTWARE_ROOT, ctlfile)): try: load_module_from_file(join(CW_SOFTWARE_ROOT, ctlfile)) @@ -650,7 +652,7 @@ self.adjust_sys_path() self.load_defaults() # will be properly initialized later by _gettext_init - self.translations = {'en': (unicode, lambda ctx, msgid: unicode(msgid) )} + self.translations = {'en': (text_type, lambda ctx, msgid: text_type(msgid) )} self._site_loaded = set() # don't register ReStructured Text directives by simple import, avoid pb # with eg sphinx. @@ -960,7 +962,7 @@ i = 1 while exists(path) and i < 100: # arbitrary limit to avoid infinite loop try: - file(path, 'a') + open(path, 'a') break except IOError: path = '%s-%s.log' % (basepath, i) @@ -994,6 +996,13 @@ rtdir = abspath(os.environ.get('CW_RUNTIME_DIR', default)) return join(rtdir, '%s-%s.pid' % (self.appid, self.name)) + # config -> repository + + def repository(self, vreg=None): + from cubicweb.server.repository import Repository + from cubicweb.server.utils import TasksManager + return Repository(self, TasksManager(), vreg=vreg) + # instance methods used to get instance specific resources ############# def __init__(self, appid, debugmode=False, creating=False): @@ -1001,7 +1010,7 @@ # set to true while creating an instance self.creating = creating super(CubicWebConfiguration, self).__init__(debugmode) - fake_gettext = (unicode, lambda ctx, msgid: unicode(msgid)) + fake_gettext = (text_type, lambda ctx, msgid: text_type(msgid)) for lang in self.available_languages(): self.translations[lang] = fake_gettext self._cubes = None @@ -1043,6 +1052,7 @@ if not isinstance(cubes, list): cubes = list(cubes) self._cubes = self.reorder_cubes(list(self._cubes) + cubes) + self.load_site_cubicweb([self.cube_dir(cube) for cube in cubes]) def main_config_file(self): """return instance's control configuration file""" @@ -1050,7 +1060,8 @@ def save(self): """write down current configuration""" - self.generate_config(open(self.main_config_file(), 'w')) + with open(self.main_config_file(), 'w') as fobj: + self.generate_config(fobj) def check_writeable_uid_directory(self, path): """check given directory path exists, belongs to the user running the @@ -1101,7 +1112,7 @@ version = self.cube_version(pkg) infos.append('%s-%s' % (pkg, version)) infos.append('cubicweb-%s' % str(self.cubicweb_version())) - return md5(';'.join(infos)).hexdigest() + return md5((';'.join(infos)).encode('ascii')).hexdigest() def load_configuration(self, **kw): """load instance's configuration files""" @@ -1156,7 +1167,7 @@ def _gettext_init(self): """set language for gettext""" - from cubicweb.gettext import translation + from cubicweb.cwgettext import translation path = join(self.apphome, 'i18n') for language in self.available_languages(): self.info("loading language %s", language) @@ -1181,13 +1192,8 @@ def set_sources_mode(self, sources): if not 'all' in sources: - print 'warning: ignoring specified sources, requires a repository '\ - 'configuration' - - def migration_handler(self): - """return a migration handler instance""" - from cubicweb.migration import MigrationHelper - return MigrationHelper(self, verbosity=self.verbosity) + print('warning: ignoring specified sources, requires a repository ' + 'configuration') def i18ncompile(self, langs=None): from cubicweb import i18n diff -r a4fcee1e9789 -r 19fcce6dc6d1 cwctl.py --- a/cwctl.py Thu Mar 24 09:43:25 2016 +0100 +++ b/cwctl.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,6 +18,7 @@ """the cubicweb-ctl tool, based on logilab.common.clcommands to provide a pluggable commands system. """ +from __future__ import print_function __docformat__ = "restructuredtext en" @@ -28,7 +29,6 @@ from warnings import warn, filterwarnings from os import remove, listdir, system, pathsep from os.path import exists, join, isfile, isdir, dirname, abspath -from urlparse import urlparse try: from os import kill, getpgid @@ -38,9 +38,12 @@ def getpgid(): """win32 getpgid implementation""" +from six.moves.urllib.parse import urlparse + from logilab.common.clcommands import CommandLine from logilab.common.shellutils import ASK from logilab.common.configuration import merge_options +from logilab.common.deprecation import deprecated from cubicweb import ConfigurationError, ExecutionError, BadCommandUsage from cubicweb.utils import support_args @@ -101,38 +104,19 @@ ) actionverb = None + @deprecated('[3.22] startorder is not used any more') def ordered_instances(self): - """return instances in the order in which they should be started, - considering $REGISTRY_DIR/startorder file if it exists (useful when - some instances depends on another as external source). - - Instance used by another one should appears first in the file (one - instance per line) + """return list of known instances """ regdir = cwcfg.instances_dir() - _allinstances = list_instances(regdir) - if isfile(join(regdir, 'startorder')): - allinstances = [] - for line in file(join(regdir, 'startorder')): - line = line.strip() - if line and not line.startswith('#'): - try: - _allinstances.remove(line) - allinstances.append(line) - except ValueError: - print ('ERROR: startorder file contains unexistant ' - 'instance %s' % line) - allinstances += _allinstances - else: - allinstances = _allinstances - return allinstances + return list_instances(regdir) def run(self, args): """run the _method on each argument (a list of instance identifiers) """ if not args: - args = self.ordered_instances() + args = list_instances(cwcfg.instances_dir()) try: askconfirm = not self.config.force except AttributeError: @@ -146,7 +130,7 @@ status = 0 for appid in args: if askconfirm: - print '*'*72 + print('*'*72) if not ASK.confirm('%s instance %r ?' % (self.name, appid)): continue try: @@ -184,13 +168,13 @@ forkcmd = None for appid in args: if askconfirm: - print '*'*72 + print('*'*72) if not ASK.confirm('%s instance %r ?' % (self.name, appid)): continue if forkcmd: status = system('%s %s' % (forkcmd, appid)) if status: - print '%s exited with status %s' % (forkcmd, status) + print('%s exited with status %s' % (forkcmd, status)) else: self.run_arg(appid) @@ -224,19 +208,19 @@ from cubicweb.migration import ConfigurationProblem if mode == 'all': - print 'CubicWeb %s (%s mode)' % (cwcfg.cubicweb_version(), cwcfg.mode) - print + print('CubicWeb %s (%s mode)' % (cwcfg.cubicweb_version(), cwcfg.mode)) + print() if mode in ('all', 'config', 'configurations'): - print 'Available configurations:' + print('Available configurations:') for config in CONFIGURATIONS: - print '*', config.name + print('*', config.name) for line in config.__doc__.splitlines(): line = line.strip() if not line: continue - print ' ', line - print + print(' ', line) + print() if mode in ('all', 'cubes'): cfgpb = ConfigurationProblem(cwcfg) @@ -244,11 +228,11 @@ cubesdir = pathsep.join(cwcfg.cubes_search_path()) namesize = max(len(x) for x in cwcfg.available_cubes()) except ConfigurationError as ex: - print 'No cubes available:', ex + print('No cubes available:', ex) except ValueError: - print 'No cubes available in %s' % cubesdir + print('No cubes available in %s' % cubesdir) else: - print 'Available cubes (%s):' % cubesdir + print('Available cubes (%s):' % cubesdir) for cube in cwcfg.available_cubes(): try: tinfo = cwcfg.cube_pkginfo(cube) @@ -257,59 +241,59 @@ except (ConfigurationError, AttributeError) as ex: tinfo = None tversion = '[missing cube information: %s]' % ex - print '* %s %s' % (cube.ljust(namesize), tversion) + print('* %s %s' % (cube.ljust(namesize), tversion)) if self.config.verbose: if tinfo: descr = getattr(tinfo, 'description', '') if not descr: descr = tinfo.__doc__ if descr: - print ' '+ ' \n'.join(descr.splitlines()) + print(' '+ ' \n'.join(descr.splitlines())) modes = detect_available_modes(cwcfg.cube_dir(cube)) - print ' available modes: %s' % ', '.join(modes) - print + print(' available modes: %s' % ', '.join(modes)) + print() if mode in ('all', 'instances'): try: regdir = cwcfg.instances_dir() except ConfigurationError as ex: - print 'No instance available:', ex - print + print('No instance available:', ex) + print() return instances = list_instances(regdir) if instances: - print 'Available instances (%s):' % regdir + print('Available instances (%s):' % regdir) for appid in instances: modes = cwcfg.possible_configurations(appid) if not modes: - print '* %s (BROKEN instance, no configuration found)' % appid + print('* %s (BROKEN instance, no configuration found)' % appid) continue - print '* %s (%s)' % (appid, ', '.join(modes)) + print('* %s (%s)' % (appid, ', '.join(modes))) try: config = cwcfg.config_for(appid, modes[0]) except Exception as exc: - print ' (BROKEN instance, %s)' % exc + print(' (BROKEN instance, %s)' % exc) continue else: - print 'No instance available in %s' % regdir - print + print('No instance available in %s' % regdir) + print() if mode == 'all': # configuration management problem solving cfgpb.solve() if cfgpb.warnings: - print 'Warnings:\n', '\n'.join('* '+txt for txt in cfgpb.warnings) + print('Warnings:\n', '\n'.join('* '+txt for txt in cfgpb.warnings)) if cfgpb.errors: - print 'Errors:' + print('Errors:') for op, cube, version, src in cfgpb.errors: if op == 'add': - print '* cube', cube, + print('* cube', cube, end=' ') if version: - print ' version', version, - print 'is not installed, but required by %s' % src + print(' version', version, end=' ') + print('is not installed, but required by %s' % src) else: - print '* cube %s version %s is installed, but version %s is required by %s' % ( - cube, cfgpb.cubes[cube], version, src) + print('* cube %s version %s is installed, but version %s is required by %s' % ( + cube, cfgpb.cubes[cube], version, src)) def check_options_consistency(config): if config.automatic and config.config_level > 0: @@ -380,20 +364,20 @@ templdirs = [cwcfg.cube_dir(cube) for cube in cubes] except ConfigurationError as ex: - print ex - print '\navailable cubes:', - print ', '.join(cwcfg.available_cubes()) + print(ex) + print('\navailable cubes:', end=' ') + print(', '.join(cwcfg.available_cubes())) return # create the registry directory for this instance - print '\n'+underline_title('Creating the instance %s' % appid) + print('\n'+underline_title('Creating the instance %s' % appid)) create_dir(config.apphome) # cubicweb-ctl configuration if not self.config.automatic: - print '\n'+underline_title('Configuring the instance (%s.conf)' - % configname) + print('\n'+underline_title('Configuring the instance (%s.conf)' + % configname)) config.input_config('main', self.config.config_level) # configuration'specific stuff - print + print() helper.bootstrap(cubes, self.config.automatic, self.config.config_level) # input for cubes specific options if not self.config.automatic: @@ -402,23 +386,23 @@ and odict.get('level') <= self.config.config_level) for section in sections: if section not in ('main', 'email', 'web'): - print '\n' + underline_title('%s options' % section) + print('\n' + underline_title('%s options' % section)) config.input_config(section, self.config.config_level) # write down configuration config.save() self._handle_win32(config, appid) - print '-> generated config %s' % config.main_config_file() + print('-> generated config %s' % config.main_config_file()) # handle i18n files structure # in the first cube given from cubicweb import i18n langs = [lang for lang, _ in i18n.available_catalogs(join(templdirs[0], 'i18n'))] errors = config.i18ncompile(langs) if errors: - print '\n'.join(errors) + print('\n'.join(errors)) if self.config.automatic \ or not ASK.confirm('error while compiling message catalogs, ' 'continue anyway ?'): - print 'creation not completed' + print('creation not completed') return # create the additional data directory for this instance if config.appdatahome != config.apphome: # true in dev mode @@ -427,9 +411,9 @@ if config['uid']: from logilab.common.shellutils import chown # this directory should be owned by the uid of the server process - print 'set %s as owner of the data directory' % config['uid'] + print('set %s as owner of the data directory' % config['uid']) chown(config.appdatahome, config['uid']) - print '\n-> creation done for %s\n' % repr(config.apphome)[1:-1] + print('\n-> creation done for %s\n' % repr(config.apphome)[1:-1]) if not self.config.no_db_create: helper.postcreate(self.config.automatic, self.config.config_level) @@ -487,7 +471,7 @@ if ex.errno != errno.ENOENT: raise confignames = ', '.join([config.name for config in configs]) - print '-> instance %s (%s) deleted.' % (appid, confignames) + print('-> instance %s (%s) deleted.' % (appid, confignames)) # instance commands ######################################################## @@ -551,7 +535,7 @@ the --force option." raise ExecutionError(msg % (appid, pidf)) if helper.start_server(config) == 1: - print 'instance %s started' % appid + print('instance %s started' % appid) def init_cmdline_log_threshold(config, loglevel): @@ -570,11 +554,6 @@ name = 'stop' actionverb = 'stopped' - def ordered_instances(self): - instances = super(StopInstanceCommand, self).ordered_instances() - instances.reverse() - return instances - def stop_instance(self, appid): """stop the instance's server""" config = cwcfg.config_for(appid) @@ -606,7 +585,7 @@ except OSError: # already removed by twistd pass - print 'instance %s stopped' % appid + print('instance %s stopped' % appid) class RestartInstanceCommand(StartInstanceCommand): @@ -619,29 +598,6 @@ name = 'restart' actionverb = 'restarted' - def run_args(self, args, askconfirm): - regdir = cwcfg.instances_dir() - if not isfile(join(regdir, 'startorder')) or len(args) <= 1: - # no specific startorder - super(RestartInstanceCommand, self).run_args(args, askconfirm) - return - print ('some specific start order is specified, will first stop all ' - 'instances then restart them.') - # get instances in startorder - for appid in args: - if askconfirm: - print '*'*72 - if not ASK.confirm('%s instance %r ?' % (self.name, appid)): - continue - StopInstanceCommand(self.logger).stop_instance(appid) - forkcmd = [w for w in sys.argv if not w in args] - forkcmd[1] = 'start' - forkcmd = ' '.join(forkcmd) - for appid in reversed(args): - status = system('%s %s' % (forkcmd, appid)) - if status: - sys.exit(status) - def restart_instance(self, appid): StopInstanceCommand(self.logger).stop_instance(appid) self.start_instance(appid) @@ -677,14 +633,14 @@ status = 0 for mode in cwcfg.possible_configurations(appid): config = cwcfg.config_for(appid, mode) - print '[%s-%s]' % (appid, mode), + print('[%s-%s]' % (appid, mode), end=' ') try: pidf = config['pid-file'] except KeyError: - print 'buggy instance, pid file not specified' + print('buggy instance, pid file not specified') continue if not exists(pidf): - print "doesn't seem to be running" + print("doesn't seem to be running") status = 1 continue pid = int(open(pidf).read().strip()) @@ -692,10 +648,10 @@ try: getpgid(pid) except OSError: - print "should be running with pid %s but the process can not be found" % pid + print("should be running with pid %s but the process can not be found" % pid) status = 1 continue - print "running with pid %s" % (pid) + print("running with pid %s" % (pid)) return status class UpgradeInstanceCommand(InstanceCommandFork): @@ -756,7 +712,7 @@ ) def upgrade_instance(self, appid): - print '\n' + underline_title('Upgrading the instance %s' % appid) + print('\n' + underline_title('Upgrading the instance %s' % appid)) from logilab.common.changelog import Version config = cwcfg.config_for(appid) instance_running = exists(config['pid-file']) @@ -767,11 +723,11 @@ set_sources_mode(self.config.ext_sources or ('migration',)) # get instance and installed versions for the server and the componants mih = config.migration_handler() - repo = mih.repo_connect() + repo = mih.repo vcconf = repo.get_versions() helper = self.config_helper(config, required=False) if self.config.force_cube_version: - for cube, version in self.config.force_cube_version.iteritems(): + for cube, version in self.config.force_cube_version.items(): vcconf[cube] = Version(version) toupgrade = [] for cube in config.cubes(): @@ -797,30 +753,30 @@ # run cubicweb/componants migration scripts if self.config.fs_only or toupgrade: for cube, fromversion, toversion in toupgrade: - print '-> migration needed from %s to %s for %s' % (fromversion, toversion, cube) + print('-> migration needed from %s to %s for %s' % (fromversion, toversion, cube)) with mih.cnx: with mih.cnx.security_enabled(False, False): mih.migrate(vcconf, reversed(toupgrade), self.config) else: - print '-> no data migration needed for instance %s.' % appid + print('-> no data migration needed for instance %s.' % appid) # rewrite main configuration file mih.rewrite_configuration() mih.shutdown() # handle i18n upgrade if not self.i18nupgrade(config): return - print + print() if helper: helper.postupgrade(repo) - print '-> instance migrated.' + print('-> instance migrated.') if instance_running and not (CWDEV or self.config.nostartstop): # restart instance through fork to get a proper environment, avoid # uicfg pb (and probably gettext catalogs, to check...) forkcmd = '%s start %s' % (sys.argv[0], appid) status = system(forkcmd) if status: - print '%s exited with status %s' % (forkcmd, status) - print + print('%s exited with status %s' % (forkcmd, status)) + print() def i18nupgrade(self, config): # handle i18n upgrade: @@ -832,10 +788,10 @@ langs = [lang for lang, _ in i18n.available_catalogs(join(templdir, 'i18n'))] errors = config.i18ncompile(langs) if errors: - print '\n'.join(errors) + print('\n'.join(errors)) if not ASK.confirm('Error while compiling message catalogs, ' 'continue anyway?'): - print '-> migration not completed.' + print('-> migration not completed.') return False return True @@ -856,10 +812,9 @@ config.quick_start = True if hasattr(config, 'set_sources_mode'): config.set_sources_mode(('migration',)) - repo = config.migration_handler().repo_connect() - vcconf = repo.get_versions() + vcconf = config.repository().get_versions() for key in sorted(vcconf): - print key+': %s.%s.%s' % vcconf[key] + print(key+': %s.%s.%s' % vcconf[key]) class ShellCommand(Command): """Run an interactive migration shell on an instance. This is a python shell @@ -940,9 +895,9 @@ repo = get_repository(appuri) cnx = connect(repo, login=login, password=pwd, mulcnx=False) except AuthenticationError as ex: - print ex + print(ex) except (KeyboardInterrupt, EOFError): - print + print() sys.exit(0) else: break @@ -1003,7 +958,7 @@ config.init_cubes(repo.get_cubes()) errors = config.i18ncompile() if errors: - print '\n'.join(errors) + print('\n'.join(errors)) class ListInstancesCommand(Command): @@ -1015,7 +970,7 @@ """run the command with its specific arguments""" regdir = cwcfg.instances_dir() for appid in sorted(listdir(regdir)): - print appid + print(appid) class ListCubesCommand(Command): @@ -1026,7 +981,7 @@ def run(self, args): """run the command with its specific arguments""" for cube in cwcfg.available_cubes(): - print cube + print(cube) class ConfigureInstanceCommand(InstanceCommand): """Configure instance. @@ -1048,7 +1003,7 @@ def configure_instance(self, appid): if self.config.param is not None: appcfg = cwcfg.config_for(appid) - for key, value in self.config.param.iteritems(): + for key, value in self.config.param.items(): try: appcfg.global_set_option(key, value) except KeyError: @@ -1059,8 +1014,12 @@ # WSGI ######### WSGI_CHOICES = {} -from cubicweb.wsgi import server as stdlib_server -WSGI_CHOICES['stdlib'] = stdlib_server +try: + from cubicweb.wsgi import server as stdlib_server +except ImportError: + pass +else: + WSGI_CHOICES['stdlib'] = stdlib_server try: from cubicweb.wsgi import wz except ImportError: @@ -1078,51 +1037,51 @@ def wsgichoices(): return tuple(WSGI_CHOICES) - -class WSGIStartHandler(InstanceCommand): - """Start an interactive wsgi server """ - name = 'wsgi' - actionverb = 'started' - arguments = '' +if WSGI_CHOICES: + class WSGIStartHandler(InstanceCommand): + """Start an interactive wsgi server """ + name = 'wsgi' + actionverb = 'started' + arguments = '' - @property - def options(self): - return ( - ("debug", - {'short': 'D', 'action': 'store_true', - 'default': False, - 'help': 'start server in debug mode.'}), - ('method', - {'short': 'm', - 'type': 'choice', - 'metavar': '', - 'default': 'stdlib', - 'choices': wsgichoices(), - 'help': 'wsgi utility/method'}), - ('loglevel', - {'short': 'l', - 'type': 'choice', - 'metavar': '', - 'default': None, - 'choices': ('debug', 'info', 'warning', 'error'), - 'help': 'debug if -D is set, error otherwise', - }), - ) + @property + def options(self): + return ( + ("debug", + {'short': 'D', 'action': 'store_true', + 'default': False, + 'help': 'start server in debug mode.'}), + ('method', + {'short': 'm', + 'type': 'choice', + 'metavar': '', + 'default': 'stdlib', + 'choices': wsgichoices(), + 'help': 'wsgi utility/method'}), + ('loglevel', + {'short': 'l', + 'type': 'choice', + 'metavar': '', + 'default': None, + 'choices': ('debug', 'info', 'warning', 'error'), + 'help': 'debug if -D is set, error otherwise', + }), + ) - def wsgi_instance(self, appid): - config = cwcfg.config_for(appid, debugmode=self['debug']) - init_cmdline_log_threshold(config, self['loglevel']) - assert config.name == 'all-in-one' - meth = self['method'] - server = WSGI_CHOICES[meth] - return server.run(config) + def wsgi_instance(self, appid): + config = cwcfg.config_for(appid, debugmode=self['debug']) + init_cmdline_log_threshold(config, self['loglevel']) + assert config.name == 'all-in-one' + meth = self['method'] + server = WSGI_CHOICES[meth] + return server.run(config) + CWCTL.register(WSGIStartHandler) for cmdcls in (ListCommand, CreateInstanceCommand, DeleteInstanceCommand, StartInstanceCommand, StopInstanceCommand, RestartInstanceCommand, - WSGIStartHandler, ReloadConfigurationCommand, StatusCommand, UpgradeInstanceCommand, ListVersionsInstanceCommand, @@ -1138,17 +1097,15 @@ def run(args): """command line tool""" import os - sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0) - sys.stderr = os.fdopen(sys.stderr.fileno(), 'w', 0) filterwarnings('default', category=DeprecationWarning) cwcfg.load_cwctl_plugins() try: CWCTL.run(args) except ConfigurationError as err: - print 'ERROR: ', err + print('ERROR: ', err) sys.exit(1) except ExecutionError as err: - print err + print(err) sys.exit(2) if __name__ == '__main__': diff -r a4fcee1e9789 -r 19fcce6dc6d1 cwgettext.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/cwgettext.py Thu Jun 16 14:19:20 2016 +0200 @@ -0,0 +1,118 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +import gettext + + +class cwGNUTranslations(gettext.GNUTranslations): + # The encoding of a msgctxt and a msgid in a .mo file is + # msgctxt + "\x04" + msgid (gettext version >= 0.15) + CONTEXT_ENCODING = "%s\x04%s" + + def pgettext(self, context, message): + ctxt_msg_id = self.CONTEXT_ENCODING % (context, message) + missing = object() + tmsg = self._catalog.get(ctxt_msg_id, missing) + if tmsg is missing: + if self._fallback: + return self._fallback.pgettext(context, message) + return message + # Encode the Unicode tmsg back to an 8-bit string, if possible + if self._output_charset: + return tmsg.encode(self._output_charset) + elif self._charset: + return tmsg.encode(self._charset) + return tmsg + + def lpgettext(self, context, message): + ctxt_msg_id = self.CONTEXT_ENCODING % (context, message) + missing = object() + tmsg = self._catalog.get(ctxt_msg_id, missing) + if tmsg is missing: + if self._fallback: + return self._fallback.lpgettext(context, message) + return message + if self._output_charset: + return tmsg.encode(self._output_charset) + return tmsg.encode(locale.getpreferredencoding()) + + def npgettext(self, context, msgid1, msgid2, n): + ctxt_msg_id = self.CONTEXT_ENCODING % (context, msgid1) + try: + tmsg = self._catalog[(ctxt_msg_id, self.plural(n))] + if self._output_charset: + return tmsg.encode(self._output_charset) + elif self._charset: + return tmsg.encode(self._charset) + return tmsg + except KeyError: + if self._fallback: + return self._fallback.npgettext(context, msgid1, msgid2, n) + if n == 1: + return msgid1 + else: + return msgid2 + + def lnpgettext(self, context, msgid1, msgid2, n): + ctxt_msg_id = self.CONTEXT_ENCODING % (context, msgid1) + try: + tmsg = self._catalog[(ctxt_msg_id, self.plural(n))] + if self._output_charset: + return tmsg.encode(self._output_charset) + return tmsg.encode(locale.getpreferredencoding()) + except KeyError: + if self._fallback: + return self._fallback.lnpgettext(context, msgid1, msgid2, n) + if n == 1: + return msgid1 + else: + return msgid2 + + def upgettext(self, context, message): + ctxt_message_id = self.CONTEXT_ENCODING % (context, message) + missing = object() + tmsg = self._catalog.get(ctxt_message_id, missing) + if tmsg is missing: + # XXX logilab patch for compat w/ catalog generated by cw < 3.5 + return self.ugettext(message) + if self._fallback: + return self._fallback.upgettext(context, message) + return unicode(message) + return tmsg + + def unpgettext(self, context, msgid1, msgid2, n): + ctxt_message_id = self.CONTEXT_ENCODING % (context, msgid1) + try: + tmsg = self._catalog[(ctxt_message_id, self.plural(n))] + except KeyError: + if self._fallback: + return self._fallback.unpgettext(context, msgid1, msgid2, n) + if n == 1: + tmsg = unicode(msgid1) + else: + tmsg = unicode(msgid2) + return tmsg + + +def translation(domain, localedir=None, languages=None, + class_=None, fallback=False, codeset=None): + if class_ is None: + class_ = cwGNUTranslations + return gettext.translation(domain, localedir=localedir, + languages=languages, class_=class_, + fallback=fallback, codeset=codeset) diff -r a4fcee1e9789 -r 19fcce6dc6d1 cwvreg.py --- a/cwvreg.py Thu Mar 24 09:43:25 2016 +0100 +++ b/cwvreg.py Thu Jun 16 14:19:20 2016 +0200 @@ -20,7 +20,7 @@ """ __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ import sys from os.path import join, dirname, realpath @@ -28,6 +28,8 @@ from datetime import datetime, date, time, timedelta from functools import reduce +from six import text_type, binary_type + from logilab.common.decorators import cached, clear_cache from logilab.common.deprecation import deprecated, class_deprecated from logilab.common.modutils import cleanup_sys_modules @@ -221,9 +223,9 @@ """ obj = self.select(oid, req, rset=rset, **kwargs) res = obj.render(**kwargs) - if isinstance(res, unicode): + if isinstance(res, text_type): return res.encode(req.encoding) - assert isinstance(res, str) + assert isinstance(res, binary_type) return res def possible_views(self, req, rset=None, **kwargs): @@ -382,7 +384,7 @@ return [item for item in super(CWRegistryStore, self).items() if not item[0] in ('propertydefs', 'propertyvalues')] def iteritems(self): - return (item for item in super(CWRegistryStore, self).iteritems() + return (item for item in super(CWRegistryStore, self).items() if not item[0] in ('propertydefs', 'propertyvalues')) def values(self): @@ -492,7 +494,7 @@ """ self.schema = schema for registry, regcontent in self.items(): - for objects in regcontent.itervalues(): + for objects in regcontent.values(): for obj in objects: obj.schema = schema @@ -543,7 +545,7 @@ self.unregister(obj) super(CWRegistryStore, self).initialization_completed() if 'uicfg' in self: # 'uicfg' is not loaded in a pure repository mode - for rtags in self['uicfg'].itervalues(): + for rtags in self['uicfg'].values(): for rtag in rtags: # don't check rtags if we don't want to cleanup_unused_appobjects rtag.init(self.schema, check=self.config.cleanup_unused_appobjects) @@ -576,7 +578,7 @@ if withsitewide: return sorted(k for k in self['propertydefs'] if not k.startswith('sources.')) - return sorted(k for k, kd in self['propertydefs'].iteritems() + return sorted(k for k, kd in self['propertydefs'].items() if not kd['sitewide'] and not k.startswith('sources.')) def register_property(self, key, type, help, default=None, vocabulary=None, @@ -653,4 +655,3 @@ 'TZTime': time, 'Interval': timedelta, }) - diff -r a4fcee1e9789 -r 19fcce6dc6d1 dataimport/csv.py --- a/dataimport/csv.py Thu Mar 24 09:43:25 2016 +0100 +++ b/dataimport/csv.py Thu Jun 16 14:19:20 2016 +0200 @@ -16,18 +16,20 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . """Functions to help importing CSV data""" +from __future__ import absolute_import, print_function -from __future__ import absolute_import - +import codecs import csv as csvmod import warnings import os.path as osp +from six import PY2, PY3, string_types + from logilab.common import shellutils def count_lines(stream_or_filename): - if isinstance(stream_or_filename, basestring): + if isinstance(stream_or_filename, string_types): f = open(stream_or_filename) else: f = stream_or_filename @@ -48,10 +50,8 @@ if quote is not None: quotechar = quote warnings.warn("[3.20] 'quote' kwarg is deprecated, use 'quotechar' instead") - if isinstance(stream_or_path, basestring): - if not osp.exists(stream_or_path): - raise Exception("file doesn't exists: %s" % stream_or_path) - stream = open(stream_or_path) + if isinstance(stream_or_path, string_types): + stream = open(stream_or_path, 'rb') else: stream = stream_or_path rowcount = count_lines(stream) @@ -64,7 +64,7 @@ yield urow if withpb: pb.update() - print ' %s rows imported' % rowcount + print(' %s rows imported' % rowcount) def ucsvreader(stream, encoding='utf-8', delimiter=',', quotechar='"', @@ -77,6 +77,8 @@ separators) will be skipped. This is useful for Excel exports which may be full of such lines. """ + if PY3: + stream = codecs.getreader(encoding)(stream) if separator is not None: delimiter = separator warnings.warn("[3.20] 'separator' kwarg is deprecated, use 'delimiter' instead") @@ -86,28 +88,33 @@ it = iter(csvmod.reader(stream, delimiter=delimiter, quotechar=quotechar)) if not ignore_errors: if skipfirst: - it.next() + next(it) for row in it: - decoded = [item.decode(encoding) for item in row] + if PY2: + decoded = [item.decode(encoding) for item in row] + else: + decoded = row if not skip_empty or any(decoded): yield decoded else: if skipfirst: try: - row = it.next() + row = next(it) except csvmod.Error: pass # Safe version, that can cope with error in CSV file while True: try: - row = it.next() + row = next(it) # End of CSV, break except StopIteration: break # Error in CSV, ignore line and continue except csvmod.Error: continue - decoded = [item.decode(encoding) for item in row] + if PY2: + decoded = [item.decode(encoding) for item in row] + else: + decoded = row if not skip_empty or any(decoded): yield decoded - diff -r a4fcee1e9789 -r 19fcce6dc6d1 dataimport/deprecated.py --- a/dataimport/deprecated.py Thu Mar 24 09:43:25 2016 +0100 +++ b/dataimport/deprecated.py Thu Jun 16 14:19:20 2016 +0200 @@ -58,10 +58,13 @@ .. BUG file with one column are not parsable .. TODO rollback() invocation is not possible yet """ +from __future__ import print_function import sys import traceback -from StringIO import StringIO +from io import StringIO + +from six import add_metaclass from logilab.common import attrdict, shellutils from logilab.common.date import strptime @@ -78,7 +81,7 @@ >>> data = lazytable(ucsvreader(open(filename))) """ - header = reader.next() + header = next(reader) for row in reader: yield dict(zip(header, row)) @@ -103,7 +106,7 @@ @deprecated('[3.21] deprecated') def tell(msg): - print msg + print(msg) @deprecated('[3.21] deprecated') @@ -115,9 +118,9 @@ return answer == 'Y' +@add_metaclass(class_deprecated) class catch_error(object): """Helper for @contextmanager decorator.""" - __metaclass__ = class_deprecated __deprecation_warning__ = '[3.21] deprecated' def __init__(self, ctl, key='unexpected error', msg=None): @@ -166,7 +169,9 @@ if res[dest] is None: break except ValueError as err: - raise ValueError('error with %r field: %s' % (src, err)), None, sys.exc_info()[-1] + exc = ValueError('error with %r field: %s' % (src, err)) + exc.__traceback__ = sys.exc_info()[-1] + raise exc return res @@ -254,6 +259,7 @@ if k is not None and len(v) > 1] +@add_metaclass(class_deprecated) class ObjectStore(object): """Store objects in memory for *faster* validation (development mode) @@ -264,7 +270,6 @@ >>> group = store.prepare_insert_entity('CWUser', name=u'unknown') >>> store.prepare_insert_relation(user, 'in_group', group) """ - __metaclass__ = class_deprecated __deprecation_warning__ = '[3.21] use the new importer API' def __init__(self): @@ -289,7 +294,7 @@ """Given an entity type and eid, updates the corresponding fake entity with specified attributes and inlined relations. """ - assert eid in self.types[etype], 'Trying to update with wrong type {}'.format(etype) + assert eid in self.types[etype], 'Trying to update with wrong type %s' % etype data = self.eids[eid] data.update(kwargs) @@ -335,6 +340,7 @@ self.prepare_insert_relation(eid_from, rtype, eid_to, **kwargs) +@add_metaclass(class_deprecated) class CWImportController(object): """Controller of the data import process. @@ -343,7 +349,6 @@ >>> ctl.data = dict_of_data_tables >>> ctl.run() """ - __metaclass__ = class_deprecated __deprecation_warning__ = '[3.21] use the new importer API' def __init__(self, store, askerror=0, catcherrors=None, tell=tell, @@ -421,7 +426,7 @@ self.tell(pformat(sorted(error[1]))) def _print_stats(self): - nberrors = sum(len(err) for err in self.errors.itervalues()) + nberrors = sum(len(err) for err in self.errors.values()) self.tell('\nImport statistics: %i entities, %i types, %i relations and %i errors' % (self.store.nb_inserted_entities, self.store.nb_inserted_types, @@ -456,5 +461,3 @@ return callfunc_every(self.store.commit, self.commitevery, self.get_data(datakey)) - - diff -r a4fcee1e9789 -r 19fcce6dc6d1 dataimport/importer.py --- a/dataimport/importer.py Thu Mar 24 09:43:25 2016 +0100 +++ b/dataimport/importer.py Thu Jun 16 14:19:20 2016 +0200 @@ -69,7 +69,7 @@ def use_extid_as_cwuri_filter(extentities): for extentity in extentities: if extentity.extid not in extid2eid: - extentity.values.setdefault('cwuri', set([unicode(extentity.extid)])) + extentity.values.setdefault('cwuri', set([extentity.extid.decode('utf-8')])) yield extentity return use_extid_as_cwuri_filter @@ -83,15 +83,15 @@ def __init__(self, cnx, source=None): self.cnx = cnx - self._rql_template = 'Any S,O WHERE S {} O' + self._rql_template = 'Any S,O WHERE S %s O' self._kwargs = {} if source is not None: - self._rql_template += ', S cw_source SO, O cw_source SO, SO eid %(s)s' + self._rql_template += ', S cw_source SO, O cw_source SO, SO eid %%(s)s' self._kwargs['s'] = source.eid def __getitem__(self, rtype): """Return a set of (subject, object) eids already related by `rtype`""" - rql = self._rql_template.format(rtype) + rql = self._rql_template % rtype return set(tuple(x) for x in self.cnx.execute(rql, self._kwargs)) @@ -286,6 +286,7 @@ """ schema = self.schema extid2eid = self.extid2eid + order_hint = list(self.etypes_order_hint) for ext_entity in ext_entities: # check data in the transitional representation and prepare it for # later insertion in the database @@ -295,12 +296,17 @@ queue.setdefault(ext_entity.etype, []).append(ext_entity) continue yield ext_entity + if not queue: + continue # check for some entities in the queue that may now be ready. We'll have to restart # search for ready entities until no one is generated + for etype in queue: + if etype not in order_hint: + order_hint.append(etype) new = True while new: new = False - for etype in self.etypes_order_hint: + for etype in order_hint: if etype in queue: new_queue = [] for ext_entity in queue[etype]: @@ -344,8 +350,8 @@ try: subject_eid = extid2eid[subject_uri] object_eid = extid2eid[object_uri] - except KeyError: - missing_relations.append((subject_uri, rtype, object_uri)) + except KeyError as exc: + missing_relations.append((subject_uri, rtype, object_uri, exc)) continue if (subject_eid, object_eid) not in existing: prepare_insert_relation(subject_eid, rtype, object_eid) @@ -367,8 +373,9 @@ raise Exception('\n'.join(msgs)) if missing_relations: msgs = ["can't create some relations, is there missing data?"] - for subject_uri, rtype, object_uri in missing_relations: - msgs.append("%s %s %s" % (subject_uri, rtype, object_uri)) + for subject_uri, rtype, object_uri, exc in missing_relations: + msgs.append("Could not find %s when trying to insert (%s, %s, %s)" + % (exc, subject_uri, rtype, object_uri)) map(error, msgs) if self.raise_on_error: raise Exception('\n'.join(msgs)) diff -r a4fcee1e9789 -r 19fcce6dc6d1 dataimport/massive_store.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dataimport/massive_store.py Thu Jun 16 14:19:20 2016 +0200 @@ -0,0 +1,780 @@ +# coding: utf-8 +# copyright 2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +import logging +from datetime import datetime +from collections import defaultdict +from io import StringIO + +from six.moves import range + +from yams.constraints import SizeConstraint + +from psycopg2 import ProgrammingError + +from cubicweb.server.schema2sql import rschema_has_table +from cubicweb.schema import PURE_VIRTUAL_RTYPES +from cubicweb.dataimport import stores, pgstore +from cubicweb.utils import make_uid +from cubicweb.server.sqlutils import SQL_PREFIX + + +class MassiveObjectStore(stores.RQLObjectStore): + """ + Store for massive import of data, with delayed insertion of meta data. + + WARNINGS: + - This store may be only used with PostgreSQL for now, as it relies + on the COPY FROM method, and on specific PostgreSQL tables to get all + the indexes. + - This store can only insert relations that are not inlined (i.e., + which do *not* have inlined=True in their definition in the schema). + + It should be used as follows: + + store = MassiveObjectStore(cnx) + store.init_rtype_table('Person', 'lives_in', 'Location') + ... + + store.prepare_insert_entity('Person', subj_iid_attribute=person_iid, ...) + store.prepare_insert_entity('Location', obj_iid_attribute=location_iid, ...) + ... + + # subj_iid_attribute and obj_iid_attribute are argument names + # chosen by the user (e.g. "cwuri"). These names can be identical. + # person_iid and location_iid are unique IDs and depend on the data + # (e.g URI). + store.flush() + store.relate_by_iid(person_iid, 'lives_in', location_iid) + # For example: + store.prepare_insert_entity('Person', + cwuri='http://dbpedia.org/toto', + name='Toto') + store.prepare_insert_entity('Location', + uri='http://geonames.org/11111', + name='Somewhere') + store.flush() + store.relate_by_iid('http://dbpedia.org/toto', + 'lives_in', + 'http://geonames.org/11111') + # Finally + store.convert_relations('Person', 'lives_in', 'Location', + 'subj_iid_attribute', 'obj_iid_attribute') + # For the previous example: + store.convert_relations('Person', 'lives_in', 'Location', 'cwuri', 'uri') + ... + store.commit() + store.finish() + """ + # max size of the iid, used to create the iid_eid conversion table + iid_maxsize = 1024 + + def __init__(self, cnx, + on_commit_callback=None, on_rollback_callback=None, + slave_mode=False, + source=None, + eids_seq_range=10000): + """ Create a MassiveObject store, with the following attributes: + + - cnx: CubicWeb cnx + - eids_seq_range: size of eid range reserved by the store for each batch + """ + super(MassiveObjectStore, self).__init__(cnx) + self.logger = logging.getLogger('dataimport.massive_store') + self._cnx = cnx + self.sql = cnx.system_sql + self._data_uri_relations = defaultdict(list) + self.eids_seq_range = eids_seq_range + + # etypes for which we have a uri_eid_%(etype)s table + self._init_uri_eid = set() + # etypes for which we have a uri_eid_%(e)s_idx index + self._uri_eid_inserted = set() + # set of rtypes for which we have a %(rtype)s_relation_iid_tmp table + self._uri_rtypes = set() + # set of etypes whose tables are created + self._entities = set() + # set of rtypes for which we have a %(rtype)s_relation_tmp table + self._rtypes = set() + + self.slave_mode = slave_mode + self.default_values = get_default_values(cnx.vreg.schema) + pg_schema = cnx.repo.config.system_source_config.get('db-namespace') or 'public' + self._dbh = PGHelper(self._cnx, pg_schema) + self._data_entities = defaultdict(list) + self._data_relations = defaultdict(list) + self._now = datetime.utcnow() + self._default_cwuri = make_uid('_auto_generated') + self._count_cwuri = 0 + self.on_commit_callback = on_commit_callback + self.on_rollback_callback = on_rollback_callback + # Do our meta tables already exist? + self._init_massive_metatables() + self.get_next_eid = lambda g=self._get_eid_gen(): next(g) + # recreate then when self.finish() is called + + if not self.slave_mode: + self._drop_all_constraints() + self._drop_metatables_constraints() + if source is None: + source = cnx.repo.system_source + self.source = source + self._etype_eid_idx = dict(cnx.execute('Any XN,X WHERE X is CWEType, X name XN')) + cnx.read_security = False + cnx.write_security = False + + ### INIT FUNCTIONS ######################################################## + + def _drop_all_constraints(self): + schema = self._cnx.vreg.schema + tables = ['cw_%s' % etype.type.lower() + for etype in schema.entities() if not etype.final] + for rschema in schema.relations(): + if rschema.inlined: + continue + elif rschema_has_table(rschema, skip_relations=PURE_VIRTUAL_RTYPES): + tables.append('%s_relation' % rschema.type.lower()) + tables.append('entities') + for tablename in tables: + self._store_and_drop_constraints(tablename) + + def _store_and_drop_constraints(self, tablename): + if not self._constraint_table_created: + # Create a table to save the constraints + # Allow reload even after crash + sql = "CREATE TABLE cwmassive_constraints (origtable text, query text, type varchar(256))" + self.sql(sql) + self._constraint_table_created = True + constraints = self._dbh.application_constraints(tablename) + for name, query in constraints.items(): + sql = 'INSERT INTO cwmassive_constraints VALUES (%(e)s, %(c)s, %(t)s)' + self.sql(sql, {'e': tablename, 'c': query, 't': 'constraint'}) + sql = 'ALTER TABLE %s DROP CONSTRAINT %s' % (tablename, name) + self.sql(sql) + + def reapply_all_constraints(self): + if not self._dbh.table_exists('cwmassive_constraints'): + self.logger.info('The table cwmassive_constraints does not exist') + return + sql = 'SELECT query FROM cwmassive_constraints WHERE type = %(t)s' + crs = self.sql(sql, {'t': 'constraint'}) + for query, in crs.fetchall(): + self.sql(query) + self.sql('DELETE FROM cwmassive_constraints WHERE type = %(t)s ' + 'AND query = %(q)s', {'t': 'constraint', 'q': query}) + + def init_rtype_table(self, etype_from, rtype, etype_to): + """ Build temporary table for standard rtype """ + # Create an uri_eid table for each etype for a better + # control of which etype is concerned by a particular + # possibly multivalued relation. + for etype in (etype_from, etype_to): + if etype and etype not in self._init_uri_eid: + self._init_uri_eid_table(etype) + if rtype not in self._uri_rtypes: + # Create the temporary table + if not self._cnx.repo.schema.rschema(rtype).inlined: + try: + sql = 'CREATE TABLE %(r)s_relation_iid_tmp (uri_from character ' \ + 'varying(%(s)s), uri_to character varying(%(s)s))' + self.sql(sql % {'r': rtype, 's': self.iid_maxsize}) + except ProgrammingError: + # XXX Already exist (probably due to multiple import) + pass + else: + self.logger.warning("inlined relation %s: cannot insert it", rtype) + # Add it to the initialized set + self._uri_rtypes.add(rtype) + + def _init_uri_eid_table(self, etype): + """ Build a temporary table for id/eid convertion + """ + try: + sql = "CREATE TABLE uri_eid_%(e)s (uri character varying(%(size)s), eid integer)" + self.sql(sql % {'e': etype.lower(), 'size': self.iid_maxsize,}) + except ProgrammingError: + # XXX Already exist (probably due to multiple import) + pass + # Add it to the initialized set + self._init_uri_eid.add(etype) + + def _init_massive_metatables(self): + # Check if our tables are not already created (i.e. a restart) + self._initialized_table_created = self._dbh.table_exists('cwmassive_initialized') + self._constraint_table_created = self._dbh.table_exists('cwmassive_constraints') + self._metadata_table_created = self._dbh.table_exists('cwmassive_metadata') + + ### RELATE FUNCTION ####################################################### + + def relate_by_iid(self, iid_from, rtype, iid_to): + """Add new relation based on the internal id (iid) + of the entities (not the eid)""" + # Push data + if isinstance(iid_from, unicode): + iid_from = iid_from.encode('utf-8') + if isinstance(iid_to, unicode): + iid_to = iid_to.encode('utf-8') + self._data_uri_relations[rtype].append({'uri_from': iid_from, 'uri_to': iid_to}) + + ### FLUSH FUNCTIONS ####################################################### + + def flush_relations(self): + """ Flush the relations data + """ + for rtype, data in self._data_uri_relations.items(): + if not data: + self.logger.info('No data for rtype %s', rtype) + buf = StringIO('\n'.join(['%(uri_from)s\t%(uri_to)s' % d for d in data])) + if not buf: + self.logger.info('Empty Buffer for rtype %s', rtype) + continue + cursor = self._cnx.cnxset.cu + if not self._cnx.repo.schema.rschema(rtype).inlined: + cursor.copy_from(buf, '%s_relation_iid_tmp' % rtype.lower(), + null='NULL', columns=('uri_from', 'uri_to')) + else: + self.logger.warning("inlined relation %s: cannot insert it", rtype) + buf.close() + # Clear data cache + self._data_uri_relations[rtype] = [] + + def fill_uri_eid_table(self, etype, uri_label): + """ Fill the uri_eid table + """ + self.logger.info('Fill uri_eid for etype %s', etype) + sql = 'INSERT INTO uri_eid_%(e)s SELECT cw_%(l)s, cw_eid FROM cw_%(e)s' + self.sql(sql % {'l': uri_label, 'e': etype.lower()}) + # Add indexes + self.sql('CREATE INDEX uri_eid_%(e)s_idx ON uri_eid_%(e)s' '(uri)' % {'e': etype.lower()}) + # Set the etype as converted + self._uri_eid_inserted.add(etype) + + def convert_relations(self, etype_from, rtype, etype_to, + uri_label_from='cwuri', uri_label_to='cwuri'): + """ Flush the converted relations + """ + # Always flush relations to be sure + self.logger.info('Convert relations %s %s %s', etype_from, rtype, etype_to) + self.flush_relations() + if uri_label_from and etype_from not in self._uri_eid_inserted: + self.fill_uri_eid_table(etype_from, uri_label_from) + if uri_label_to and etype_to not in self._uri_eid_inserted: + self.fill_uri_eid_table(etype_to, uri_label_to) + if self._cnx.repo.schema.rschema(rtype).inlined: + self.logger.warning("Can't insert inlined relation %s", rtype) + return + if uri_label_from and uri_label_to: + sql = '''INSERT INTO %(r)s_relation (eid_from, eid_to) SELECT DISTINCT O1.eid, O2.eid + FROM %(r)s_relation_iid_tmp AS T, uri_eid_%(ef)s as O1, uri_eid_%(et)s as O2 + WHERE O1.uri=T.uri_from AND O2.uri=T.uri_to AND NOT EXISTS ( + SELECT 1 FROM %(r)s_relation AS TT WHERE TT.eid_from=O1.eid AND TT.eid_to=O2.eid); + ''' + elif uri_label_to: + sql = '''INSERT INTO %(r)s_relation (eid_from, eid_to) SELECT DISTINCT + CAST(T.uri_from AS INTEGER), O1.eid + FROM %(r)s_relation_iid_tmp AS T, uri_eid_%(et)s as O1 + WHERE O1.uri=T.uri_to AND NOT EXISTS ( + SELECT 1 FROM %(r)s_relation AS TT WHERE + TT.eid_from=CAST(T.uri_from AS INTEGER) AND TT.eid_to=O1.eid); + ''' + elif uri_label_from: + sql = '''INSERT INTO %(r)s_relation (eid_from, eid_to) SELECT DISTINCT O1.eid, T.uri_to + O1.eid, CAST(T.uri_to AS INTEGER) + FROM %(r)s_relation_iid_tmp AS T, uri_eid_%(ef)s as O1 + WHERE O1.uri=T.uri_from AND NOT EXISTS ( + SELECT 1 FROM %(r)s_relation AS TT WHERE + TT.eid_from=O1.eid AND TT.eid_to=CAST(T.uri_to AS INTEGER)); + ''' + try: + self.sql(sql % {'r': rtype.lower(), + 'et': etype_to.lower() if etype_to else u'', + 'ef': etype_from.lower() if etype_from else u''}) + except Exception as ex: + self.logger.error("Can't insert relation %s: %s", rtype, ex) + + ### SQL UTILITIES ######################################################### + + def drop_and_store_indexes(self, tablename): + # Drop indexes and constraints + if not self._constraint_table_created: + # Create a table to save the constraints + # Allow reload even after crash + sql = "CREATE TABLE cwmassive_constraints (origtable text, query text, type varchar(256))" + self.sql(sql) + self._constraint_table_created = True + self._drop_table_indexes(tablename) + + def _drop_table_indexes(self, tablename): + """ Drop and store table constraints and indexes """ + indexes = self._dbh.application_indexes(tablename) + for name, query in indexes.items(): + sql = 'INSERT INTO cwmassive_constraints VALUES (%(e)s, %(c)s, %(t)s)' + self.sql(sql, {'e': tablename, 'c': query, 't': 'index'}) + sql = 'DROP INDEX %s' % name + self.sql(sql) + + def reapply_constraint_index(self, tablename): + if not self._dbh.table_exists('cwmassive_constraints'): + self.logger.info('The table cwmassive_constraints does not exist') + return + sql = 'SELECT query FROM cwmassive_constraints WHERE origtable = %(e)s' + crs = self.sql(sql, {'e': tablename}) + for query, in crs.fetchall(): + self.sql(query) + self.sql('DELETE FROM cwmassive_constraints WHERE origtable = %(e)s ' + 'AND query = %(q)s', {'e': tablename, 'q': query}) + + def _drop_metatables_constraints(self): + """ Drop all the constraints for the meta data""" + for tablename in ('created_by_relation', 'owned_by_relation', + 'is_instance_of_relation', 'is_relation', + 'entities'): + self.drop_and_store_indexes(tablename) + + def _create_metatables_constraints(self): + """ Create all the constraints for the meta data""" + for tablename in ('entities', + 'created_by_relation', 'owned_by_relation', + 'is_instance_of_relation', 'is_relation'): + # Indexes and constraints + self.reapply_constraint_index(tablename) + + def init_relation_table(self, rtype): + """ Get and remove all indexes for performance sake """ + # Create temporary table + if not self.slave_mode and rtype not in self._rtypes: + sql = "CREATE TABLE %s_relation_tmp (eid_from integer, eid_to integer)" % rtype.lower() + self.sql(sql) + # Drop indexes and constraints + tablename = '%s_relation' % rtype.lower() + self.drop_and_store_indexes(tablename) + # Push the etype in the initialized table for easier restart + self.init_create_initialized_table() + sql = 'INSERT INTO cwmassive_initialized VALUES (%(e)s, %(t)s)' + self.sql(sql, {'e': rtype, 't': 'rtype'}) + # Mark rtype as "initialized" for faster check + self._rtypes.add(rtype) + + def init_create_initialized_table(self): + """ Create the cwmassive initialized table + """ + if not self._initialized_table_created: + sql = "CREATE TABLE cwmassive_initialized (retype text, type varchar(128))" + self.sql(sql) + self._initialized_table_created = True + + def init_etype_table(self, etype): + """ Add eid sequence to a particular etype table and + remove all indexes for performance sake """ + if etype not in self._entities: + # Only for non-initialized etype and not slave mode store + if not self.slave_mode: + # Drop indexes and constraints + tablename = 'cw_%s' % etype.lower() + self.drop_and_store_indexes(tablename) + # Push the etype in the initialized table for easier restart + self.init_create_initialized_table() + sql = 'INSERT INTO cwmassive_initialized VALUES (%(e)s, %(t)s)' + self.sql(sql, {'e': etype, 't': 'etype'}) + # Mark etype as "initialized" for faster check + self._entities.add(etype) + + def restart_eid_sequence(self, start_eid): + self._cnx.system_sql(self._cnx.repo.system_source.dbhelper.sql_restart_numrange( + 'entities_id_seq', initial_value=start_eid)) + self._cnx.commit() + + ### ENTITIES CREATION ##################################################### + + def _get_eid_gen(self): + """ Function getting the next eid. This is done by preselecting + a given number of eids from the 'entities_id_seq', and then + storing them""" + while True: + last_eid = self._cnx.repo.system_source.create_eid(self._cnx, self.eids_seq_range) + for eid in range(last_eid - self.eids_seq_range + 1, last_eid + 1): + yield eid + + def _apply_default_values(self, etype, kwargs): + """Apply the default values for a given etype, attribute and value.""" + default_values = self.default_values[etype] + missing_keys = set(default_values) - set(kwargs) + kwargs.update((key, default_values[key]) for key in missing_keys) + + # store api ################################################################ + + def prepare_insert_entity(self, etype, **kwargs): + """Given an entity type, attributes and inlined relations, returns the inserted entity's + eid. + """ + # Init the table if necessary + self.init_etype_table(etype) + # Add meta data if not given + if 'modification_date' not in kwargs: + kwargs['modification_date'] = self._now + if 'creation_date' not in kwargs: + kwargs['creation_date'] = self._now + if 'cwuri' not in kwargs: + kwargs['cwuri'] = self._default_cwuri + str(self._count_cwuri) + self._count_cwuri += 1 + if 'eid' not in kwargs: + # If eid is not given and the eids sequence is set, + # use the value from the sequence + kwargs['eid'] = self.get_next_eid() + self._apply_default_values(etype, kwargs) + self._data_entities[etype].append(kwargs) + return kwargs.get('eid') + + def prepare_insert_relation(self, eid_from, rtype, eid_to, **kwargs): + """Insert into the database a relation ``rtype`` between entities with eids ``eid_from`` + and ``eid_to``. + """ + # Init the table if necessary + self.init_relation_table(rtype) + self._data_relations[rtype].append({'eid_from': eid_from, 'eid_to': eid_to}) + + def flush(self): + """Flush the data""" + self.flush_entities() + self.flush_internal_relations() + self.flush_relations() + + def commit(self): + """Commit the database transaction.""" + self.on_commit() + super(MassiveObjectStore, self).commit() + + def finish(self): + """Remove temporary tables and columns.""" + self.logger.info("Start cleaning") + if self.slave_mode: + raise RuntimeError('Store cleanup is not allowed in slave mode') + self.logger.info("Start cleaning") + # Cleanup relations tables + for etype in self._init_uri_eid: + self.sql('DROP TABLE uri_eid_%s' % etype.lower()) + # Remove relations tables + for rtype in self._uri_rtypes: + if not self._cnx.repo.schema.rschema(rtype).inlined: + self.sql('DROP TABLE %(r)s_relation_iid_tmp' % {'r': rtype}) + else: + self.logger.warning("inlined relation %s: no cleanup to be done for it" % rtype) + # Create meta constraints (entities, is_instance_of, ...) + self._create_metatables_constraints() + # Get all the initialized etypes/rtypes + if self._dbh.table_exists('cwmassive_initialized'): + crs = self.sql('SELECT retype, type FROM cwmassive_initialized') + for retype, _type in crs.fetchall(): + self.logger.info('Cleanup for %s' % retype) + if _type == 'etype': + # Cleanup entities tables - Recreate indexes + self._cleanup_entities(retype) + elif _type == 'rtype': + # Cleanup relations tables + self._cleanup_relations(retype) + self.sql('DELETE FROM cwmassive_initialized WHERE retype = %(e)s', + {'e': retype}) + self.reapply_all_constraints() + # Delete the meta data table + for table_name in ('cwmassive_initialized', 'cwmassive_constraints', 'cwmassive_metadata'): + if self._dbh.table_exists(table_name): + self.sql('DROP TABLE %s' % table_name) + self.commit() + + ### FLUSH ################################################################# + + def on_commit(self): + if self.on_commit_callback: + self.on_commit_callback() + + def on_rollback(self, exc, etype, data): + if self.on_rollback_callback: + self.on_rollback_callback(exc, etype, data) + self._cnx.rollback() + else: + raise exc + + def flush_internal_relations(self): + """ Flush the relations data + """ + for rtype, data in self._data_relations.items(): + if not data: + # There is no data for these etype for this flush round. + continue + buf = pgstore._create_copyfrom_buffer(data, ('eid_from', 'eid_to')) + if not buf: + # The buffer is empty. This is probably due to error in _create_copyfrom_buffer + raise ValueError + cursor = self._cnx.cnxset.cu + # Push into the tmp table + cursor.copy_from(buf, '%s_relation_tmp' % rtype.lower(), + null='NULL', columns=('eid_from', 'eid_to')) + # Clear data cache + self._data_relations[rtype] = [] + + def flush_entities(self): + """ Flush the entities data + """ + for etype, data in self._data_entities.items(): + if not data: + # There is no data for these etype for this flush round. + continue + # XXX It may be interresting to directly infer the columns' + # names from the schema instead of using .keys() + columns = data[0].keys() + # XXX For now, the _create_copyfrom_buffer does a "row[column]" + # which can lead to a key error. + # Thus we should create dictionary with all the keys. + columns = set() + for d in data: + columns.update(d.keys()) + _data = [] + _base_data = dict.fromkeys(columns) + for d in data: + _d = _base_data.copy() + _d.update(d) + _data.append(_d) + buf = pgstore._create_copyfrom_buffer(_data, columns) + if not buf: + # The buffer is empty. This is probably due to error in _create_copyfrom_buffer + raise ValueError('Error in buffer creation for etype %s' % etype) + columns = ['cw_%s' % attr for attr in columns] + cursor = self._cnx.cnxset.cu + try: + cursor.copy_from(buf, 'cw_%s' % etype.lower(), null='NULL', columns=columns) + except Exception as exc: + self.on_rollback(exc, etype, data) + # Clear data cache + self._data_entities[etype] = [] + if not self.slave_mode: + self.flush_meta_data() + + def flush_meta_data(self): + """ Flush the meta data (entities table, is_instance table, ...) + """ + if self.slave_mode: + raise RuntimeError('Flushing meta data is not allow in slave mode') + if not self._dbh.table_exists('cwmassive_initialized'): + self.logger.info('No information available for initialized etypes/rtypes') + return + if not self._metadata_table_created: + # Keep the correctly flush meta data in database + sql = "CREATE TABLE cwmassive_metadata (etype text)" + self.sql(sql) + self._metadata_table_created = True + crs = self.sql('SELECT etype FROM cwmassive_metadata') + already_flushed = set(e for e, in crs.fetchall()) + crs = self.sql('SELECT retype FROM cwmassive_initialized WHERE type = %(t)s', + {'t': 'etype'}) + all_etypes = set(e for e, in crs.fetchall()) + for etype in all_etypes: + if etype not in already_flushed: + # Deals with meta data + self.logger.info('Flushing meta data for %s' % etype) + self.insert_massive_meta_data(etype) + sql = 'INSERT INTO cwmassive_metadata VALUES (%(e)s)' + self.sql(sql, {'e': etype}) + + def _cleanup_entities(self, etype): + """ Cleanup etype table """ + # Create indexes and constraints + tablename = SQL_PREFIX + etype.lower() + self.reapply_constraint_index(tablename) + + def _cleanup_relations(self, rtype): + """ Cleanup rtype table """ + # Push into relation table while removing duplicate + sql = '''INSERT INTO %(r)s_relation (eid_from, eid_to) SELECT DISTINCT + T.eid_from, T.eid_to FROM %(r)s_relation_tmp AS T + WHERE NOT EXISTS (SELECT 1 FROM %(r)s_relation AS TT WHERE + TT.eid_from=T.eid_from AND TT.eid_to=T.eid_to);''' % {'r': rtype} + self.sql(sql) + # Drop temporary relation table + sql = ('DROP TABLE %(r)s_relation_tmp' % {'r': rtype.lower()}) + self.sql(sql) + # Create indexes and constraints + tablename = '%s_relation' % rtype.lower() + self.reapply_constraint_index(tablename) + + def insert_massive_meta_data(self, etype): + """ Massive insertion of meta data for a given etype, based on SQL statements. + """ + # Push data - Use coalesce to avoid NULL (and get 0), if there is no + # entities of this type in the entities table. + # Meta data relations + self.metagen_push_relation(etype, self._cnx.user.eid, 'created_by_relation') + self.metagen_push_relation(etype, self._cnx.user.eid, 'owned_by_relation') + self.metagen_push_relation(etype, self.source.eid, 'cw_source_relation') + self.metagen_push_relation(etype, self._etype_eid_idx[etype], 'is_relation') + self.metagen_push_relation(etype, self._etype_eid_idx[etype], 'is_instance_of_relation') + sql = ("INSERT INTO entities (eid, type, asource, extid) " + "SELECT cw_eid, '%s', 'system', NULL FROM cw_%s " + "WHERE NOT EXISTS (SELECT 1 FROM entities WHERE eid=cw_eid)" + % (etype, etype.lower())) + self.sql(sql) + + def metagen_push_relation(self, etype, eid_to, rtype): + sql = ("INSERT INTO %s (eid_from, eid_to) SELECT cw_eid, %s FROM cw_%s " + "WHERE NOT EXISTS (SELECT 1 FROM entities WHERE eid=cw_eid)" + % (rtype, eid_to, etype.lower())) + self.sql(sql) + + +### CONSTRAINTS MANAGEMENT FUNCTIONS ########################################## + +def get_size_constraints(schema): + """analyzes yams ``schema`` and returns the list of size constraints. + + The returned value is a dictionary mapping entity types to a + sub-dictionnaries mapping attribute names -> max size. + """ + size_constraints = {} + # iterates on all entity types + for eschema in schema.entities(): + # for each entity type, iterates on attribute definitions + size_constraints[eschema.type] = eschema_constraints = {} + for rschema, aschema in eschema.attribute_definitions(): + # for each attribute, if a size constraint is found, + # append it to the size constraint list + maxsize = None + rdef = rschema.rdef(eschema, aschema) + for constraint in rdef.constraints: + if isinstance(constraint, SizeConstraint): + maxsize = constraint.max + eschema_constraints[rschema.type] = maxsize + return size_constraints + +def get_default_values(schema): + """analyzes yams ``schema`` and returns the list of default values. + + The returned value is a dictionary mapping entity types to a + sub-dictionnaries mapping attribute names -> default values. + """ + default_values = {} + # iterates on all entity types + for eschema in schema.entities(): + # for each entity type, iterates on attribute definitions + default_values[eschema.type] = eschema_constraints = {} + for rschema, _ in eschema.attribute_definitions(): + # for each attribute, if a size constraint is found, + # append it to the size constraint list + if eschema.default(rschema.type) is not None: + eschema_constraints[rschema.type] = eschema.default(rschema.type) + return default_values + + +class PGHelper(object): + def __init__(self, cnx, pg_schema='public'): + self.cnx = cnx + # Deals with pg schema, see #3216686 + self.pg_schema = pg_schema + + def application_indexes_constraints(self, tablename): + """ Get all the indexes/constraints for a given tablename """ + indexes = self.application_indexes(tablename) + constraints = self.application_constraints(tablename) + _indexes = {} + for name, query in indexes.items(): + # Remove pkey indexes (automatically created by constraints) + # Specific cases of primary key, see #3224079 + if name not in constraints: + _indexes[name] = query + return _indexes, constraints + + def table_exists(self, table_name): + sql = "SELECT * from information_schema.tables WHERE table_name=%(t)s AND table_schema=%(s)s" + crs = self.cnx.system_sql(sql, {'t': table_name, 's': self.pg_schema}) + res = crs.fetchall() + if res: + return True + return False + + # def check_if_primary_key_exists_for_table(self, table_name): + # sql = ("SELECT constraint_name FROM information_schema.table_constraints " + # "WHERE constraint_type = 'PRIMARY KEY' AND table_name=%(t)s AND table_schema=%(s)s") + # crs = self.cnx.system_sql(sql, {'t': table_name, 's': self.pg_schema}) + # res = crs.fetchall() + # if res: + # return True + # return False + + def index_query(self, name): + """Get the request to be used to recreate the index""" + return self.cnx.system_sql("SELECT pg_get_indexdef(c.oid) " + "from pg_catalog.pg_class c " + "LEFT JOIN pg_catalog.pg_namespace n " + "ON n.oid = c.relnamespace " + "WHERE c.relname = %(r)s AND n.nspname=%(n)s", + {'r': name, 'n': self.pg_schema}).fetchone()[0] + + def constraint_query(self, name): + """Get the request to be used to recreate the constraint""" + return self.cnx.system_sql("SELECT pg_get_constraintdef(c.oid) " + "from pg_catalog.pg_constraint c " + "LEFT JOIN pg_catalog.pg_namespace n " + "ON n.oid = c.connamespace " + "WHERE c.conname = %(r)s AND n.nspname=%(n)s", + {'r': name, 'n': self.pg_schema}).fetchone()[0] + + def index_list(self, tablename): + # This SQL query (cf http://www.postgresql.org/message-id/432F450F.4080700@squiz.net) + # aims at getting all the indexes for each table. + sql = '''SELECT c.relname as "Name" + FROM pg_catalog.pg_class c + JOIN pg_catalog.pg_index i ON i.indexrelid = c.oid + JOIN pg_catalog.pg_class c2 ON i.indrelid = c2.oid + LEFT JOIN pg_catalog.pg_user u ON u.usesysid = c.relowner + LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace + WHERE c.relkind IN ('i','') + AND c2.relname = '%s' + AND i.indisprimary = FALSE + AND n.nspname NOT IN ('pg_catalog', 'pg_toast') + AND pg_catalog.pg_table_is_visible(c.oid);''' % tablename + return self.cnx.system_sql(sql).fetchall() + + def application_indexes(self, tablename): + """ Iterate over all the indexes """ + indexes_list = self.index_list(tablename) + indexes = {} + for name, in indexes_list: + indexes[name] = self.index_query(name) + return indexes + + def constraint_list(self, tablename): + sql = '''SELECT i.conname as "Name" + FROM pg_catalog.pg_class c + JOIN pg_catalog.pg_constraint i ON i.conrelid = c.oid + JOIN pg_catalog.pg_class c2 ON i.conrelid=c2.oid + LEFT JOIN pg_catalog.pg_user u ON u.usesysid = c.relowner + LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace + WHERE + c2.relname = '%s' + AND n.nspname NOT IN ('pg_catalog', 'pg_toast') + AND pg_catalog.pg_table_is_visible(c.oid) + ''' % tablename + return self.cnx.system_sql(sql).fetchall() + + def application_constraints(self, tablename): + """ Iterate over all the constraints """ + constraint_list = self.constraint_list(tablename) + constraints = {} + for name, in constraint_list: + query = self.constraint_query(name) + constraints[name] = 'ALTER TABLE %s ADD CONSTRAINT %s %s' % (tablename, name, query) + return constraints diff -r a4fcee1e9789 -r 19fcce6dc6d1 dataimport/pgstore.py --- a/dataimport/pgstore.py Thu Mar 24 09:43:25 2016 +0100 +++ b/dataimport/pgstore.py Thu Jun 16 14:19:20 2016 +0200 @@ -16,18 +16,20 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . """Postgres specific store""" +from __future__ import print_function import warnings -import cPickle import os.path as osp -from StringIO import StringIO +from io import StringIO from time import asctime from datetime import date, datetime, time from collections import defaultdict from base64 import b64encode +from six import string_types, integer_types, text_type, binary_type +from six.moves import cPickle as pickle, range + from cubicweb.utils import make_uid -from cubicweb.server.utils import eschema_eid from cubicweb.server.sqlutils import SQL_PREFIX from cubicweb.dataimport.stores import NoHookRQLObjectStore @@ -48,9 +50,9 @@ _execmany_thread_not_copy_from(cu, statement, data) else: if columns is None: - cu.copy_from(buf, table, null='NULL') + cu.copy_from(buf, table, null=u'NULL') else: - cu.copy_from(buf, table, null='NULL', columns=columns) + cu.copy_from(buf, table, null=u'NULL', columns=columns) def _execmany_thread(sql_connect, statements, dump_output_dir=None, support_copy_from=True, encoding='utf-8'): @@ -79,7 +81,7 @@ columns = list(data[0]) execmany_func(cu, statement, data, table, columns, encoding) except Exception: - print 'unable to copy data into table %s' % table + print('unable to copy data into table %s' % table) # Error in import statement, save data in dump_output_dir if dump_output_dir is not None: pdata = {'data': data, 'statement': statement, @@ -87,11 +89,10 @@ filename = make_uid() try: with open(osp.join(dump_output_dir, - '%s.pickle' % filename), 'w') as fobj: - fobj.write(cPickle.dumps(pdata)) + '%s.pickle' % filename), 'wb') as fobj: + pickle.dump(pdata, fobj) except IOError: - print 'ERROR while pickling in', dump_output_dir, filename+'.pickle' - pass + print('ERROR while pickling in', dump_output_dir, filename+'.pickle') cnx.rollback() raise finally: @@ -101,50 +102,44 @@ def _copyfrom_buffer_convert_None(value, **opts): '''Convert None value to "NULL"''' - return 'NULL' + return u'NULL' def _copyfrom_buffer_convert_number(value, **opts): '''Convert a number into its string representation''' - return str(value) + return text_type(value) def _copyfrom_buffer_convert_string(value, **opts): '''Convert string value. - - Recognized keywords: - :encoding: resulting string encoding (default: utf-8) ''' - encoding = opts.get('encoding','utf-8') - escape_chars = ((u'\\', ur'\\'), (u'\t', u'\\t'), (u'\r', u'\\r'), + escape_chars = ((u'\\', u'\\\\'), (u'\t', u'\\t'), (u'\r', u'\\r'), (u'\n', u'\\n')) for char, replace in escape_chars: value = value.replace(char, replace) - if isinstance(value, unicode): - value = value.encode(encoding) return value def _copyfrom_buffer_convert_date(value, **opts): '''Convert date into "YYYY-MM-DD"''' # Do not use strftime, as it yields issue with date < 1900 # (http://bugs.python.org/issue1777412) - return '%04d-%02d-%02d' % (value.year, value.month, value.day) + return u'%04d-%02d-%02d' % (value.year, value.month, value.day) def _copyfrom_buffer_convert_datetime(value, **opts): '''Convert date into "YYYY-MM-DD HH:MM:SS.UUUUUU"''' # Do not use strftime, as it yields issue with date < 1900 # (http://bugs.python.org/issue1777412) - return '%s %s' % (_copyfrom_buffer_convert_date(value, **opts), - _copyfrom_buffer_convert_time(value, **opts)) + return u'%s %s' % (_copyfrom_buffer_convert_date(value, **opts), + _copyfrom_buffer_convert_time(value, **opts)) def _copyfrom_buffer_convert_time(value, **opts): '''Convert time into "HH:MM:SS.UUUUUU"''' - return '%02d:%02d:%02d.%06d' % (value.hour, value.minute, - value.second, value.microsecond) + return u'%02d:%02d:%02d.%06d' % (value.hour, value.minute, + value.second, value.microsecond) # (types, converter) list. _COPYFROM_BUFFER_CONVERTERS = [ (type(None), _copyfrom_buffer_convert_None), - ((long, int, float), _copyfrom_buffer_convert_number), - (basestring, _copyfrom_buffer_convert_string), + (integer_types + (float,), _copyfrom_buffer_convert_number), + (string_types, _copyfrom_buffer_convert_string), (datetime, _copyfrom_buffer_convert_datetime), (date, _copyfrom_buffer_convert_date), (time, _copyfrom_buffer_convert_time), @@ -164,7 +159,7 @@ rows = [] if columns is None: if isinstance(data[0], (tuple, list)): - columns = range(len(data[0])) + columns = list(range(len(data[0]))) elif isinstance(data[0], dict): columns = data[0].keys() else: @@ -188,6 +183,7 @@ for types, converter in _COPYFROM_BUFFER_CONVERTERS: if isinstance(value, types): value = converter(value, **convert_opts) + assert isinstance(value, text_type) break else: raise ValueError("Unsupported value type %s" % type(value)) @@ -310,7 +306,7 @@ self._sql_eid_insertdicts = {} def flush(self): - print 'starting flush' + print('starting flush') _entities_sql = self._sql_entities _relations_sql = self._sql_relations _inlined_relations_sql = self._sql_inlined_relations @@ -321,7 +317,7 @@ # In that case, simply update the insert dict and remove # the need to make the # UPDATE statement - for statement, datalist in _inlined_relations_sql.iteritems(): + for statement, datalist in _inlined_relations_sql.items(): new_datalist = [] # for a given inlined relation, # browse each couple to be inserted @@ -342,10 +338,10 @@ new_datalist.append(data) _inlined_relations_sql[statement] = new_datalist _execmany_thread(self.system_source.get_connection, - self._sql_eids.items() - + _entities_sql.items() - + _relations_sql.items() - + _inlined_relations_sql.items(), + list(self._sql_eids.items()) + + list(_entities_sql.items()) + + list(_relations_sql.items()) + + list(_inlined_relations_sql.items()), dump_output_dir=self.dump_output_dir, support_copy_from=self.support_copy_from, encoding=self.dbencoding) @@ -422,26 +418,20 @@ """add type and source info for an eid into the system table""" # begin by inserting eid/type/source/extid into the entities table if extid is not None: - assert isinstance(extid, str) - extid = b64encode(extid) + assert isinstance(extid, binary_type) + extid = b64encode(extid).decode('ascii') attrs = {'type': entity.cw_etype, 'eid': entity.eid, 'extid': extid, 'asource': source.uri} self._handle_insert_entity_sql(cnx, self.sqlgen.insert('entities', attrs), attrs) # insert core relations: is, is_instance_of and cw_source - try: - self._handle_is_relation_sql(cnx, 'INSERT INTO is_relation(eid_from,eid_to) VALUES (%s,%s)', - (entity.eid, eschema_eid(cnx, entity.e_schema))) - except IndexError: - # during schema serialization, skip - pass - else: - for eschema in entity.e_schema.ancestors() + [entity.e_schema]: - self._handle_is_relation_sql(cnx, - 'INSERT INTO is_instance_of_relation(eid_from,eid_to) VALUES (%s,%s)', - (entity.eid, eschema_eid(cnx, eschema))) - if 'CWSource' in self.schema and source.eid is not None: # else, cw < 3.10 - self._handle_is_relation_sql(cnx, 'INSERT INTO cw_source_relation(eid_from,eid_to) VALUES (%s,%s)', - (entity.eid, source.eid)) + self._handle_is_relation_sql(cnx, 'INSERT INTO is_relation(eid_from,eid_to) VALUES (%s,%s)', + (entity.eid, entity.e_schema.eid)) + for eschema in entity.e_schema.ancestors() + [entity.e_schema]: + self._handle_is_relation_sql(cnx, + 'INSERT INTO is_instance_of_relation(eid_from,eid_to) VALUES (%s,%s)', + (entity.eid, eschema.eid)) + self._handle_is_relation_sql(cnx, 'INSERT INTO cw_source_relation(eid_from,eid_to) VALUES (%s,%s)', + (entity.eid, source.eid)) # now we can update the full text index if self.do_fti and self.need_fti_indexation(entity.cw_etype): self.index_entity(cnx, entity=entity) diff -r a4fcee1e9789 -r 19fcce6dc6d1 dataimport/stores.py --- a/dataimport/stores.py Thu Mar 24 09:43:25 2016 +0100 +++ b/dataimport/stores.py Thu Jun 16 14:19:20 2016 +0200 @@ -21,7 +21,7 @@ >>> user_eid = store.prepare_insert_entity('CWUser', login=u'johndoe') >>> group_eid = store.prepare_insert_entity('CWUser', name=u'unknown') - >>> store.relate(user_eid, 'in_group', group_eid) + >>> store.prepare_insert_relation(user_eid, 'in_group', group_eid) >>> store.flush() >>> store.commit() >>> store.finish() @@ -61,6 +61,8 @@ from datetime import datetime from copy import copy +from six import text_type + from logilab.common.deprecation import deprecated from logilab.common.decorators import cached @@ -101,7 +103,7 @@ and inlined relations. """ entity = self._cnx.entity_from_eid(eid) - assert entity.cw_etype == etype, 'Trying to update with wrong type {}'.format(etype) + assert entity.cw_etype == etype, 'Trying to update with wrong type %s' % etype # XXX some inlined relations may already exists entity.cw_set(**kwargs) @@ -120,6 +122,10 @@ """Commit the database transaction.""" return self._commit() + def finish(self): + """Nothing to do once import is terminated for this store.""" + pass + @property def session(self): warnings.warn('[3.19] deprecated property.', DeprecationWarning, stacklevel=2) @@ -168,7 +174,7 @@ """Given an entity type, attributes and inlined relations, returns the inserted entity's eid. """ - for k, v in kwargs.iteritems(): + for k, v in kwargs.items(): kwargs[k] = getattr(v, 'eid', v) entity, rels = self.metagen.base_etype_dicts(etype) # make a copy to keep cached entity pristine @@ -183,7 +189,7 @@ kwargs = dict() if inspect.getargspec(self.add_relation).keywords: kwargs['subjtype'] = entity.cw_etype - for rtype, targeteids in rels.iteritems(): + for rtype, targeteids in rels.items(): # targeteids may be a single eid or a list of eids inlined = self.rschema(rtype).inlined try: @@ -253,7 +259,7 @@ source = cnx.repo.system_source self.source = source self.create_eid = cnx.repo.system_source.create_eid - self.time = datetime.now() + self.time = datetime.utcnow() # attributes/relations shared by all entities of the same type self.etype_attrs = [] self.etype_rels = [] @@ -298,7 +304,7 @@ genfunc = self.generate(attr) if genfunc: entity.cw_edited.edited_attribute(attr, genfunc(entity)) - if isinstance(extid, unicode): + if isinstance(extid, text_type): extid = extid.encode('utf-8') return self.source, extid @@ -320,4 +326,3 @@ def gen_owned_by(self, entity): return self._cnx.user.eid - diff -r a4fcee1e9789 -r 19fcce6dc6d1 dataimport/test/test_csv.py --- a/dataimport/test/test_csv.py Thu Mar 24 09:43:25 2016 +0100 +++ b/dataimport/test/test_csv.py Thu Jun 16 14:19:20 2016 +0200 @@ -17,7 +17,7 @@ # with CubicWeb. If not, see . """unittest for cubicweb.dataimport.csv""" -from StringIO import StringIO +from io import BytesIO from logilab.common.testlib import TestCase, unittest_main @@ -27,7 +27,7 @@ class UcsvreaderTC(TestCase): def test_empty_lines_skipped(self): - stream = StringIO('''a,b,c,d, + stream = BytesIO(b'''a,b,c,d, 1,2,3,4, ,,,, ,,,, @@ -45,7 +45,7 @@ list(csv.ucsvreader(stream, skip_empty=False))) def test_skip_first(self): - stream = StringIO('a,b,c,d,\n1,2,3,4,\n') + stream = BytesIO(b'a,b,c,d,\n1,2,3,4,\n') reader = csv.ucsvreader(stream, skipfirst=True, ignore_errors=True) self.assertEqual(list(reader), [[u'1', u'2', u'3', u'4', u'']]) diff -r a4fcee1e9789 -r 19fcce6dc6d1 dataimport/test/test_massive_store.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dataimport/test/test_massive_store.py Thu Jun 16 14:19:20 2016 +0200 @@ -0,0 +1,281 @@ +# -*- coding: utf-8 -*- +# copyright 2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr -- mailto:contact@logilab.fr +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . +"""Massive store test case""" + +import itertools + +from cubicweb.dataimport import ucsvreader +from cubicweb.devtools import testlib, PostgresApptestConfiguration +from cubicweb.devtools import startpgcluster, stoppgcluster +from cubicweb.dataimport.massive_store import MassiveObjectStore, PGHelper + + +def setUpModule(): + startpgcluster(__file__) + + +def tearDownModule(*args): + stoppgcluster(__file__) + + +class MassImportSimpleTC(testlib.CubicWebTC): + configcls = PostgresApptestConfiguration + appid = 'data-massimport' + + def cast(self, _type, value): + try: + return _type(value) + except ValueError: + return None + + def push_geonames_data(self, dumpname, store): + # Push timezones + cnx = store._cnx + for code, gmt, dst, raw_offset in ucsvreader(open(self.datapath('timeZones.txt'), 'rb'), + delimiter='\t'): + cnx.create_entity('TimeZone', code=code, gmt=float(gmt), + dst=float(dst), raw_offset=float(raw_offset)) + timezone_code = dict(cnx.execute('Any C, X WHERE X is TimeZone, X code C')) + # Push data + for ind, infos in enumerate(ucsvreader(open(dumpname, 'rb'), + delimiter='\t', + ignore_errors=True)): + latitude = self.cast(float, infos[4]) + longitude = self.cast(float, infos[5]) + population = self.cast(int, infos[14]) + elevation = self.cast(int, infos[15]) + gtopo = self.cast(int, infos[16]) + feature_class = infos[6] + if len(infos[6]) != 1: + feature_class = None + entity = {'name': infos[1], + 'asciiname': infos[2], + 'alternatenames': infos[3], + 'latitude': latitude, 'longitude': longitude, + 'feature_class': feature_class, + 'alternate_country_code':infos[9], + 'admin_code_3': infos[12], + 'admin_code_4': infos[13], + 'population': population, 'elevation': elevation, + 'gtopo30': gtopo, 'timezone': timezone_code.get(infos[17]), + 'cwuri': u'http://sws.geonames.org/%s/' % int(infos[0]), + 'geonameid': int(infos[0]), + } + store.prepare_insert_entity('Location', **entity) + + def test_autoflush_metadata(self): + with self.admin_access.repo_cnx() as cnx: + crs = cnx.system_sql('SELECT * FROM entities WHERE type=%(t)s', + {'t': 'Location'}) + self.assertEqual(len(crs.fetchall()), 0) + store = MassiveObjectStore(cnx) + store.prepare_insert_entity('Location', name=u'toto') + store.flush() + store.commit() + store.finish() + cnx.commit() + with self.admin_access.repo_cnx() as cnx: + crs = cnx.system_sql('SELECT * FROM entities WHERE type=%(t)s', + {'t': 'Location'}) + self.assertEqual(len(crs.fetchall()), 1) + + def test_massimport_etype_metadata(self): + with self.admin_access.repo_cnx() as cnx: + store = MassiveObjectStore(cnx) + timezone_eid = store.prepare_insert_entity('TimeZone') + store.prepare_insert_entity('Location', timezone=timezone_eid) + store.flush() + store.commit() + eid, etname = cnx.execute('Any X, TN WHERE X timezone TZ, X is T, ' + 'T name TN')[0] + self.assertEqual(cnx.entity_from_eid(eid).cw_etype, etname) + + def test_drop_index(self): + with self.admin_access.repo_cnx() as cnx: + store = MassiveObjectStore(cnx) + cnx.commit() + with self.admin_access.repo_cnx() as cnx: + crs = cnx.system_sql('SELECT indexname FROM pg_indexes') + indexes = [r[0] for r in crs.fetchall()] + self.assertNotIn('entities_pkey', indexes) + self.assertNotIn('unique_entities_extid_idx', indexes) + self.assertNotIn('owned_by_relation_pkey', indexes) + self.assertNotIn('owned_by_relation_to_idx', indexes) + + def test_drop_index_recreation(self): + with self.admin_access.repo_cnx() as cnx: + store = MassiveObjectStore(cnx) + store.finish() + cnx.commit() + with self.admin_access.repo_cnx() as cnx: + crs = cnx.system_sql('SELECT indexname FROM pg_indexes') + indexes = [r[0] for r in crs.fetchall()] + self.assertIn('entities_pkey', indexes) + self.assertIn('unique_entities_extid_idx', indexes) + self.assertIn('owned_by_relation_p_key', indexes) + self.assertIn('owned_by_relation_to_idx', indexes) + + def test_eids_seq_range(self): + with self.admin_access.repo_cnx() as cnx: + store = MassiveObjectStore(cnx, eids_seq_range=1000) + store.restart_eid_sequence(50000) + store.prepare_insert_entity('Location', name=u'toto') + store.flush() + cnx.commit() + with self.admin_access.repo_cnx() as cnx: + crs = cnx.system_sql("SELECT * FROM entities_id_seq") + self.assertGreater(crs.fetchone()[0], 50000) + + def test_eid_entity(self): + with self.admin_access.repo_cnx() as cnx: + store = MassiveObjectStore(cnx, eids_seq_range=1000) + store.restart_eid_sequence(50000) + eid = store.prepare_insert_entity('Location', name=u'toto') + store.flush() + self.assertGreater(eid, 50000) + + def test_eid_entity_2(self): + with self.admin_access.repo_cnx() as cnx: + store = MassiveObjectStore(cnx) + store.restart_eid_sequence(50000) + eid = store.prepare_insert_entity('Location', name=u'toto', eid=10000) + store.flush() + self.assertEqual(eid, 10000) + + @staticmethod + def get_db_descr(cnx): + pg_schema = ( + cnx.repo.config.system_source_config.get('db-namespace') + or 'public') + pgh = PGHelper(cnx, pg_schema) + all_tables = cnx.system_sql(''' +SELECT table_name +FROM information_schema.tables +where table_schema = %(s)s''', {'s': pg_schema}).fetchall() + all_tables_descr = {} + for tablename, in all_tables: + all_tables_descr[tablename] = set(pgh.index_list(tablename)).union( + set(pgh.constraint_list(tablename))) + return all_tables_descr + + def test_identical_schema(self): + with self.admin_access.repo_cnx() as cnx: + init_descr = self.get_db_descr(cnx) + with self.admin_access.repo_cnx() as cnx: + store = MassiveObjectStore(cnx) + store.init_etype_table('CWUser') + store.finish() + with self.admin_access.repo_cnx() as cnx: + final_descr = self.get_db_descr(cnx) + self.assertEqual(init_descr, final_descr) + + def test_on_commit_callback(self): + counter = itertools.count() + with self.admin_access.repo_cnx() as cnx: + store = MassiveObjectStore(cnx, on_commit_callback=lambda:next(counter)) + store.prepare_insert_entity('Location', name=u'toto') + store.flush() + store.commit() + self.assertGreaterEqual(next(counter), 1) + + def test_on_rollback_callback(self): + counter = itertools.count() + with self.admin_access.repo_cnx() as cnx: + store = MassiveObjectStore(cnx, on_rollback_callback=lambda *_: next(counter)) + store.prepare_insert_entity('Location', nm='toto') + store.flush() + store.commit() + self.assertGreaterEqual(next(counter), 1) + + def test_slave_mode_indexes(self): + with self.admin_access.repo_cnx() as cnx: + slave_store = MassiveObjectStore(cnx, slave_mode=True) + with self.admin_access.repo_cnx() as cnx: + crs = cnx.system_sql('SELECT indexname FROM pg_indexes') + indexes = [r[0] for r in crs.fetchall()] + self.assertIn('entities_pkey', indexes) + self.assertIn('unique_entities_extid_idx', indexes) + self.assertIn('owned_by_relation_p_key', indexes) + self.assertIn('owned_by_relation_to_idx', indexes) + + def test_slave_mode_exception(self): + with self.admin_access.repo_cnx() as cnx: + master_store = MassiveObjectStore(cnx, slave_mode=False) + slave_store = MassiveObjectStore(cnx, slave_mode=True) + self.assertRaises(RuntimeError, slave_store.flush_meta_data) + self.assertRaises(RuntimeError, slave_store.finish) + + def test_simple_insert(self): + with self.admin_access.repo_cnx() as cnx: + store = MassiveObjectStore(cnx) + self.push_geonames_data(self.datapath('geonames.csv'), store) + store.flush() + store.commit() + store.finish() + with self.admin_access.repo_cnx() as cnx: + rset = cnx.execute('Any X WHERE X is Location') + self.assertEqual(len(rset), 4000) + rset = cnx.execute('Any X WHERE X is Location, X timezone T') + self.assertEqual(len(rset), 4000) + + def test_index_building(self): + with self.admin_access.repo_cnx() as cnx: + store = MassiveObjectStore(cnx) + self.push_geonames_data(self.datapath('geonames.csv'), store) + store.flush() + + # Check index + crs = cnx.system_sql('SELECT indexname FROM pg_indexes') + indexes = [r[0] for r in crs.fetchall()] + self.assertNotIn('entities_pkey', indexes) + self.assertNotIn('unique_entities_extid_idx', indexes) + self.assertNotIn('owned_by_relation_p_key', indexes) + self.assertNotIn('owned_by_relation_to_idx', indexes) + + # Cleanup -> index + store.finish() + + # Check index again + crs = cnx.system_sql('SELECT indexname FROM pg_indexes') + indexes = [r[0] for r in crs.fetchall()] + self.assertIn('entities_pkey', indexes) + self.assertIn('unique_entities_extid_idx', indexes) + self.assertIn('owned_by_relation_p_key', indexes) + self.assertIn('owned_by_relation_to_idx', indexes) + + def test_multiple_insert(self): + with self.admin_access.repo_cnx() as cnx: + store = MassiveObjectStore(cnx) + store.init_etype_table('TestLocation') + store.finish() + store = MassiveObjectStore(cnx) + store.init_etype_table('TestLocation') + store.finish() + + def test_multiple_insert_relation(self): + with self.admin_access.repo_cnx() as cnx: + store = MassiveObjectStore(cnx) + store.init_relation_table('used_language') + store.finish() + store = MassiveObjectStore(cnx) + store.init_relation_table('used_language') + store.finish() + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r a4fcee1e9789 -r 19fcce6dc6d1 dataimport/test/test_pgstore.py --- a/dataimport/test/test_pgstore.py Thu Mar 24 09:43:25 2016 +0100 +++ b/dataimport/test/test_pgstore.py Thu Jun 16 14:19:20 2016 +0200 @@ -20,9 +20,11 @@ import datetime as DT +from six import PY2 from logilab.common.testlib import TestCase, unittest_main from cubicweb.dataimport import pgstore +from cubicweb.devtools import testlib class CreateCopyFromBufferTC(TestCase): @@ -31,24 +33,24 @@ def test_convert_none(self): cnvt = pgstore._copyfrom_buffer_convert_None - self.assertEqual('NULL', cnvt(None)) + self.assertEqual(u'NULL', cnvt(None)) def test_convert_number(self): cnvt = pgstore._copyfrom_buffer_convert_number - self.assertEqual('42', cnvt(42)) - self.assertEqual('42', cnvt(42L)) - self.assertEqual('42.42', cnvt(42.42)) + self.assertEqual(u'42', cnvt(42)) + if PY2: + self.assertEqual(u'42', cnvt(long(42))) + self.assertEqual(u'42.42', cnvt(42.42)) def test_convert_string(self): cnvt = pgstore._copyfrom_buffer_convert_string # simple - self.assertEqual('babar', cnvt('babar')) + self.assertEqual(u'babar', cnvt('babar')) # unicode - self.assertEqual('\xc3\xa9l\xc3\xa9phant', cnvt(u'éléphant')) - self.assertEqual('\xe9l\xe9phant', cnvt(u'éléphant', encoding='latin1')) + self.assertEqual(u'éléphant', cnvt(u'éléphant')) # escaping - self.assertEqual('babar\\tceleste\\n', cnvt('babar\tceleste\n')) - self.assertEqual(r'C:\\new\tC:\\test', cnvt('C:\\new\tC:\\test')) + self.assertEqual(u'babar\\tceleste\\n', cnvt(u'babar\tceleste\n')) + self.assertEqual(u'C:\\\\new\\tC:\\\\test', cnvt(u'C:\\new\tC:\\test')) def test_convert_date(self): cnvt = pgstore._copyfrom_buffer_convert_date @@ -64,18 +66,19 @@ # test buffer def test_create_copyfrom_buffer_tuple(self): - data = ((42, 42L, 42.42, u'éléphant', DT.date(666, 1, 13), DT.time(6, 6, 6), + l = long if PY2 else int + data = ((42, l(42), 42.42, u'éléphant', DT.date(666, 1, 13), DT.time(6, 6, 6), DT.datetime(666, 6, 13, 6, 6, 6)), - (6, 6L, 6.6, u'babar', DT.date(2014, 1, 14), DT.time(4, 2, 1), + (6, l(6), 6.6, u'babar', DT.date(2014, 1, 14), DT.time(4, 2, 1), DT.datetime(2014, 1, 1, 0, 0, 0))) results = pgstore._create_copyfrom_buffer(data) # all columns - expected = '''42\t42\t42.42\téléphant\t0666-01-13\t06:06:06.000000\t0666-06-13 06:06:06.000000 + expected = u'''42\t42\t42.42\téléphant\t0666-01-13\t06:06:06.000000\t0666-06-13 06:06:06.000000 6\t6\t6.6\tbabar\t2014-01-14\t04:02:01.000000\t2014-01-01 00:00:00.000000''' self.assertMultiLineEqual(expected, results.getvalue()) # selected columns results = pgstore._create_copyfrom_buffer(data, columns=(1, 3, 6)) - expected = '''42\téléphant\t0666-06-13 06:06:06.000000 + expected = u'''42\téléphant\t0666-06-13 06:06:06.000000 6\tbabar\t2014-01-01 00:00:00.000000''' self.assertMultiLineEqual(expected, results.getvalue()) @@ -85,8 +88,18 @@ dict(integer=6, double=6.6, text=u'babar', date=DT.datetime(2014, 1, 1, 0, 0, 0))) results = pgstore._create_copyfrom_buffer(data, ('integer', 'text')) - expected = '''42\téléphant\n6\tbabar''' - self.assertMultiLineEqual(expected, results.getvalue()) + expected = u'''42\téléphant\n6\tbabar''' + self.assertEqual(expected, results.getvalue()) + + +class SQLGenObjectStoreTC(testlib.CubicWebTC): + + def test_prepare_insert_entity(self): + with self.admin_access.repo_cnx() as cnx: + store = pgstore.SQLGenObjectStore(cnx) + eid = store.prepare_insert_entity('CWUser', login=u'toto', + upassword=u'pwd') + self.assertIsNotNone(eid) if __name__ == '__main__': diff -r a4fcee1e9789 -r 19fcce6dc6d1 dataimport/test/test_stores.py --- a/dataimport/test/test_stores.py Thu Mar 24 09:43:25 2016 +0100 +++ b/dataimport/test/test_stores.py Thu Jun 16 14:19:20 2016 +0200 @@ -75,7 +75,7 @@ metagen = stores.MetaGenerator(cnx) # hijack gen_modification_date to ensure we don't go through it metagen.gen_modification_date = None - md = DT.datetime.now() - DT.timedelta(days=1) + md = DT.datetime.utcnow() - DT.timedelta(days=1) entity, rels = metagen.base_etype_dicts('CWUser') entity.cw_edited.update(dict(modification_date=md)) with cnx.ensure_cnx_set: diff -r a4fcee1e9789 -r 19fcce6dc6d1 dataimport/test/unittest_importer.py --- a/dataimport/test/unittest_importer.py Thu Mar 24 09:43:25 2016 +0100 +++ b/dataimport/test/unittest_importer.py Thu Jun 16 14:19:20 2016 +0200 @@ -126,13 +126,26 @@ self.assertEqual(entity.nom, u'Richelieu') self.assertEqual(len(entity.connait), 0) + def test_import_order(self): + """Check import of ext entity in both order""" + with self.admin_access.repo_cnx() as cnx: + importer = self.importer(cnx) + richelieu = ExtEntity('Personne', 3, {'nom': set([u'Richelieu']), + 'enfant': set([4])}) + athos = ExtEntity('Personne', 4, {'nom': set([u'Athos'])}) + importer.import_entities([richelieu, athos]) + cnx.commit() + rset = cnx.execute('Any X WHERE X is Personne, X nom "Richelieu"') + entity = rset.get_entity(0, 0) + self.assertEqual(entity.enfant[0].nom, 'Athos') + def test_update(self): """Check update of ext entity""" with self.admin_access.repo_cnx() as cnx: importer = self.importer(cnx) # First import richelieu = ExtEntity('Personne', 11, - {'nom': {u'Richelieu Diacre'}}) + {'nom': set([u'Richelieu Diacre'])}) importer.import_entities([richelieu]) cnx.commit() rset = cnx.execute('Any X WHERE X is Personne') @@ -140,7 +153,7 @@ self.assertEqual(entity.nom, u'Richelieu Diacre') # Second import richelieu = ExtEntity('Personne', 11, - {'nom': {u'Richelieu Cardinal'}}) + {'nom': set([u'Richelieu Cardinal'])}) importer.import_entities([richelieu]) cnx.commit() rset = cnx.execute('Any X WHERE X is Personne') @@ -152,14 +165,14 @@ class UseExtidAsCwuriTC(TestCase): def test(self): - personne = ExtEntity('Personne', 1, {'nom': set([u'de la lune']), - 'prenom': set([u'Jean'])}) + personne = ExtEntity('Personne', b'1', {'nom': set([u'de la lune']), + 'prenom': set([u'Jean'])}) mapping = {} set_cwuri = use_extid_as_cwuri(mapping) list(set_cwuri((personne,))) self.assertIn('cwuri', personne.values) - self.assertEqual(personne.values['cwuri'], set(['1'])) - mapping[1] = 'whatever' + self.assertEqual(personne.values['cwuri'], set([u'1'])) + mapping[b'1'] = 'whatever' personne.values.pop('cwuri') list(set_cwuri((personne,))) self.assertNotIn('cwuri', personne.values) @@ -167,7 +180,7 @@ def extentities_from_csv(fpath): """Yield ExtEntity read from `fpath` CSV file.""" - with open(fpath) as f: + with open(fpath, 'rb') as f: for uri, name, knows in ucsvreader(f, skipfirst=True, skip_empty=False): yield ExtEntity('Personne', uri, {'nom': set([name]), 'connait': set([knows])}) diff -r a4fcee1e9789 -r 19fcce6dc6d1 debian/changelog --- a/debian/changelog Thu Mar 24 09:43:25 2016 +0100 +++ b/debian/changelog Thu Jun 16 14:19:20 2016 +0200 @@ -1,3 +1,21 @@ +cubicweb (3.22.2-1) unstable; urgency=medium + + * new upstream release + + -- Julien Cristau Tue, 23 Feb 2016 11:45:38 +0100 + +cubicweb (3.22.1-1) unstable; urgency=medium + + * new upstream release + + -- Julien Cristau Fri, 12 Feb 2016 10:38:56 +0100 + +cubicweb (3.22.0-1) unstable; urgency=medium + + * new upstream release + + -- Julien Cristau Mon, 04 Jan 2016 17:53:55 +0100 + cubicweb (3.21.6-1) unstable; urgency=medium * new upstream release diff -r a4fcee1e9789 -r 19fcce6dc6d1 debian/control --- a/debian/control Thu Mar 24 09:43:25 2016 +0100 +++ b/debian/control Thu Jun 16 14:19:20 2016 +0200 @@ -3,23 +3,23 @@ Priority: optional Maintainer: Logilab S.A. Uploaders: Sylvain Thenault , - Julien Jehannet , Adrien Di Mascio , - Aurélien Campéas , Nicolas Chauvat Build-Depends: debhelper (>= 7), python (>= 2.6), + python-six (>= 1.4.0), python-sphinx, python-logilab-common, python-unittest2 | python (>= 2.7), python-logilab-mtconverter, python-markdown, - python-rql, - python-yams (>= 0.40.0), + python-tz, + python-rql (>= 0.34.0), + python-yams (>= 0.42.0), python-lxml, Standards-Version: 3.9.1 -Homepage: http://www.cubicweb.org +Homepage: https://www.cubicweb.org X-Python-Version: >= 2.6 Package: cubicweb @@ -52,11 +52,12 @@ ${python:Depends}, cubicweb-common (= ${source:Version}), cubicweb-ctl (= ${source:Version}), - python-logilab-database (>= 1.13.0), + python-logilab-database (>= 1.15.0), cubicweb-postgresql-support | cubicweb-mysql-support | python-pysqlite2, - python-passlib + python-passlib, + python-tz, Recommends: cubicweb-documentation (= ${source:Version}), Suggests: @@ -107,7 +108,7 @@ ${python:Depends}, cubicweb-web (= ${source:Version}), cubicweb-ctl (= ${source:Version}), - python-twisted-web + python-twisted-web (<< 16.0.0), Recommends: cubicweb-documentation (= ${source:Version}) Description: twisted-based web interface for the CubicWeb framework @@ -155,11 +156,12 @@ ${python:Depends}, graphviz, gettext, + python-six (>= 1.4.0), python-logilab-mtconverter (>= 0.8.0), python-logilab-common (>= 0.63.1), python-markdown, - python-yams (>= 0.40.0), - python-rql (>= 0.31.2), + python-yams (>= 0.42.0), + python-rql (>= 0.34.0), python-lxml Recommends: python-simpletal (>= 4.0), diff -r a4fcee1e9789 -r 19fcce6dc6d1 debian/cubicweb-documentation.doc-base --- a/debian/cubicweb-documentation.doc-base Thu Mar 24 09:43:25 2016 +0100 +++ b/debian/cubicweb-documentation.doc-base Thu Jun 16 14:19:20 2016 +0200 @@ -5,5 +5,5 @@ Section: Apps/Programming Format: HTML -Index: /usr/share/doc/cubicweb-documentation/index.html -Files: /usr/share/doc/cubicweb-documentation/*.html +Index: /usr/share/doc/cubicweb-documentation/html/index.html +Files: /usr/share/doc/cubicweb-documentation/html/* diff -r a4fcee1e9789 -r 19fcce6dc6d1 debian/watch --- a/debian/watch Thu Mar 24 09:43:25 2016 +0100 +++ b/debian/watch Thu Jun 16 14:19:20 2016 +0200 @@ -1,3 +1,3 @@ version=3 opts=uversionmangle=s/(rc|a|b|c)/~$1/ \ -http://pypi.debian.net/cubicweb/cubicweb-(.+)\.(?:zip|tgz|tbz|txz|(?:tar\.(?:gz|bz2|xz))) +https://pypi.debian.net/cubicweb/cubicweb-(.+)\.(?:zip|tgz|tbz|txz|(?:tar\.(?:gz|bz2|xz))) diff -r a4fcee1e9789 -r 19fcce6dc6d1 devtools/__init__.py --- a/devtools/__init__.py Thu Mar 24 09:43:25 2016 +0100 +++ b/devtools/__init__.py Thu Jun 16 14:19:20 2016 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -16,6 +16,7 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . """Test tools for cubicweb""" +from __future__ import print_function __docformat__ = "restructuredtext en" @@ -24,17 +25,19 @@ import errno import logging import shutil -import pickle import glob import subprocess import warnings import tempfile import getpass -from hashlib import sha1 # pylint: disable=E0611 +from hashlib import sha1 # pylint: disable=E0611 from datetime import timedelta -from os.path import (abspath, realpath, join, exists, split, isabs, isdir) +from os.path import abspath, join, exists, split, isabs, isdir from functools import partial +from six import text_type +from six.moves import cPickle as pickle + from logilab.common.date import strptime from logilab.common.decorators import cached, clear_cache @@ -92,7 +95,7 @@ DEFAULT_PSQL_SOURCES = DEFAULT_SOURCES.copy() DEFAULT_PSQL_SOURCES['system'] = DEFAULT_SOURCES['system'].copy() DEFAULT_PSQL_SOURCES['system']['db-driver'] = 'postgres' -DEFAULT_PSQL_SOURCES['system']['db-user'] = unicode(getpass.getuser()) +DEFAULT_PSQL_SOURCES['system']['db-user'] = text_type(getpass.getuser()) DEFAULT_PSQL_SOURCES['system']['db-password'] = None def turn_repo_off(repo): @@ -109,7 +112,7 @@ try: repo.close(sessionid) except BadConnectionId: #this is strange ? thread issue ? - print 'XXX unknown session', sessionid + print('XXX unknown session', sessionid) for cnxset in repo.cnxsets: cnxset.close(True) repo.system_source.shutdown() @@ -148,7 +151,7 @@ else: # cube test apphome = abspath('..') self._apphome = apphome - ServerConfiguration.__init__(self, appid) + super(TestServerConfiguration, self).__init__(appid) self.init_log(log_threshold, force=True) # need this, usually triggered by cubicweb-ctl self.load_cwctl_plugins() @@ -162,7 +165,7 @@ return None, None return self.anonymous_credential - def set_anonymous_allowed(self, allowed, anonuser='anon'): + def set_anonymous_allowed(self, allowed, anonuser=u'anon'): if allowed: self.anonymous_credential = (anonuser, anonuser) else: @@ -193,7 +196,7 @@ def sources_file(self): """define in subclasses self.sourcefile if necessary""" if self.sourcefile: - print 'Reading sources from', self.sourcefile + print('Reading sources from', self.sourcefile) sourcefile = self.sourcefile if not isabs(sourcefile): sourcefile = join(self.apphome, sourcefile) @@ -367,7 +370,8 @@ # XXX set a clearer error message ??? backup_coordinates, config_path = self.db_cache[self.db_cache_key(db_id)] # reload the config used to create the database. - config = pickle.loads(open(config_path, 'rb').read()) + with open(config_path, 'rb') as f: + config = pickle.load(f) # shutdown repo before changing database content if self._repo is not None: self._repo.turn_repo_off() @@ -399,10 +403,9 @@ def _new_repo(self, config): """Factory method to create a new Repository Instance""" - from cubicweb.repoapi import _get_inmemory_repo config._cubes = None - repo = _get_inmemory_repo(config) - config.repository = lambda x=None: repo + repo = config.repository() + config.repository = lambda vreg=None: repo # extending Repository class repo._has_started = False repo._needs_refresh = False @@ -415,7 +418,7 @@ from cubicweb.repoapi import connect repo = self.get_repo() sources = self.config.read_sources_file() - login = unicode(sources['admin']['login']) + login = text_type(sources['admin']['login']) password = sources['admin']['password'] or 'xxx' cnx = connect(repo, login, password=password) return cnx @@ -464,7 +467,7 @@ dbname, data = data.split('-', 1) db_id, filetype = data.split('.', 1) entries.setdefault((dbname, db_id), {})[filetype] = filepath - for (dbname, db_id), entry in entries.iteritems(): + for (dbname, db_id), entry in entries.items(): # apply necessary transformation from the driver value = self.process_cache_entry(directory, dbname, db_id, entry) assert 'config' in entry @@ -494,7 +497,7 @@ if test_db_id is DEFAULT_EMPTY_DB_ID: self.init_test_database() else: - print 'Building %s for database %s' % (test_db_id, self.dbname) + print('Building %s for database %s' % (test_db_id, self.dbname)) self.build_db_cache(DEFAULT_EMPTY_DB_ID) self.restore_database(DEFAULT_EMPTY_DB_ID) repo = self.get_repo(startup=True) @@ -537,13 +540,13 @@ def startpgcluster(pyfile): """Start a postgresql cluster next to pyfile""" - datadir = join(os.path.dirname(pyfile), 'data', + datadir = join(os.path.dirname(pyfile), 'data', 'database', 'pgdb-%s' % os.path.splitext(os.path.basename(pyfile))[0]) if not exists(datadir): try: subprocess.check_call(['initdb', '-D', datadir, '-E', 'utf-8', '--locale=C']) - except OSError, err: + except OSError as err: if err.errno == errno.ENOENT: raise OSError('"initdb" could not be found. ' 'You should add the postgresql bin folder to your PATH ' @@ -562,7 +565,11 @@ subprocess.check_call(['pg_ctl', 'start', '-w', '-D', datadir, '-o', options], env=env) - except OSError, err: + except OSError as err: + try: + os.rmdir(sockdir) + except OSError: + pass if err.errno == errno.ENOENT: raise OSError('"pg_ctl" could not be found. ' 'You should add the postgresql bin folder to your PATH ' @@ -572,9 +579,13 @@ def stoppgcluster(pyfile): """Kill the postgresql cluster running next to pyfile""" - datadir = join(os.path.dirname(pyfile), 'data', + datadir = join(os.path.dirname(pyfile), 'data', 'database', 'pgdb-%s' % os.path.splitext(os.path.basename(pyfile))[0]) subprocess.call(['pg_ctl', 'stop', '-D', datadir, '-m', 'fast']) + try: + os.rmdir(DEFAULT_PSQL_SOURCES['system']['db-host']) + except OSError: + pass class PostgresTestDataBaseHandler(TestDataBaseHandler): @@ -678,7 +689,7 @@ @property def _config_id(self): - return sha1(self.config.apphome).hexdigest()[:10] + return sha1(self.config.apphome.encode('utf-8')).hexdigest()[:10] def _backup_name(self, db_id): # merge me with parent backup_name = '_'.join(('cache', self._config_id, self.dbname, db_id)) @@ -796,11 +807,6 @@ # traceback.print_stack(file=backup_stack_file) return backup_file - def _new_repo(self, config): - repo = super(SQLiteTestDataBaseHandler, self)._new_repo(config) - install_sqlite_patch(repo.querier) - return repo - def _restore_database(self, backup_coordinates, _config): # remove database file if it exists ? dbfile = self.absolute_dbfile() @@ -820,46 +826,6 @@ atexit.register(SQLiteTestDataBaseHandler._cleanup_all_tmpdb) -def install_sqlite_patch(querier): - """This patch hotfixes the following sqlite bug : - - http://www.sqlite.org/cvstrac/tktview?tn=1327,33 - (some dates are returned as strings rather thant date objects) - """ - if hasattr(querier.__class__, '_devtools_sqlite_patched'): - return # already monkey patched - def wrap_execute(base_execute): - def new_execute(*args, **kwargs): - rset = base_execute(*args, **kwargs) - if rset.description: - found_date = False - for row, rowdesc in zip(rset, rset.description): - for cellindex, (value, vtype) in enumerate(zip(row, rowdesc)): - if vtype in ('Date', 'Datetime') and type(value) is unicode: - found_date = True - value = value.rsplit('.', 1)[0] - try: - row[cellindex] = strptime(value, '%Y-%m-%d %H:%M:%S') - except Exception: - row[cellindex] = strptime(value, '%Y-%m-%d') - if vtype == 'Time' and type(value) is unicode: - found_date = True - try: - row[cellindex] = strptime(value, '%H:%M:%S') - except Exception: - # DateTime used as Time? - row[cellindex] = strptime(value, '%Y-%m-%d %H:%M:%S') - if vtype == 'Interval' and type(value) is int: - found_date = True - row[cellindex] = timedelta(0, value, 0) # XXX value is in number of seconds? - if not found_date: - break - return rset - return new_execute - querier.__class__.execute = wrap_execute(querier.__class__.execute) - querier.__class__._devtools_sqlite_patched = True - - - HANDLERS = {} def register_handler(handlerkls, overwrite=False): @@ -882,7 +848,7 @@ We only keep one repo in cache to prevent too much objects to stay alive (database handler holds a reference to a repository). As at the moment a new handler is created for each TestCase class and all test methods are executed - sequentialy whithin this class, there should not have more cache miss that + sequentially whithin this class, there should not have more cache miss that if we had a wider cache as once a Handler stop being used it won't be used again. """ @@ -947,5 +913,3 @@ handler = get_test_db_handler(config) handler.build_db_cache() return handler.get_repo_and_cnx() - - diff -r a4fcee1e9789 -r 19fcce6dc6d1 devtools/devctl.py --- a/devtools/devctl.py Thu Mar 24 09:43:25 2016 +0100 +++ b/devtools/devctl.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,6 +18,7 @@ """additional cubicweb-ctl commands and command handlers for cubicweb and cubicweb's cubes development """ +from __future__ import print_function __docformat__ = "restructuredtext en" @@ -25,10 +26,12 @@ # possible (for cubicweb-ctl reactivity, necessary for instance for usable bash # completion). So import locally in command helpers. import sys -from datetime import datetime +from datetime import datetime, date from os import mkdir, chdir, path as osp from warnings import warn +from six.moves import input + from logilab.common import STD_BLACKLIST from cubicweb.__pkginfo__ import version as cubicwebversion @@ -41,6 +44,11 @@ from cubicweb.server.serverconfig import ServerConfiguration +STD_BLACKLIST = set(STD_BLACKLIST) +STD_BLACKLIST.add('.tox') +STD_BLACKLIST.add('test') + + class DevConfiguration(ServerConfiguration, WebConfiguration): """dummy config to get full library schema and appobjects for a cube or for cubicweb (without a home) @@ -83,7 +91,7 @@ def cleanup_sys_modules(config): # cleanup sys.modules, required when we're updating multiple cubes - for name, mod in sys.modules.items(): + for name, mod in list(sys.modules.items()): if mod is None: # duh ? logilab.common.os for instance del sys.modules[name] @@ -127,7 +135,7 @@ from cubicweb.i18n import add_msg from cubicweb.schema import NO_I18NCONTEXT, CONSTRAINTS w('# schema pot file, generated on %s\n' - % datetime.now().strftime('%Y-%m-%d %H:%M:%S')) + % datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')) w('# \n') w('# singular and plural forms for each entity type\n') w('\n') @@ -250,7 +258,7 @@ # bw compat, necessary until all translation of relation are # done properly... add_msg(w, '%s_object' % rtype) - for rdef in rschema.rdefs.itervalues(): + for rdef in rschema.rdefs.values(): if not rdef.description or rdef.description in done: continue if (librschema is None or @@ -267,7 +275,7 @@ for reg, objdict in vreg.items(): if reg in ('boxes', 'contentnavigation'): continue - for objects in objdict.itervalues(): + for objects in objdict.values(): for obj in objects: objid = '%s_%s' % (reg, obj.__regid__) if objid in done: @@ -314,21 +322,21 @@ from cubicweb.i18n import extract_from_tal, execute2 tempdir = tempfile.mkdtemp(prefix='cw-') cwi18ndir = WebConfiguration.i18n_lib_dir() - print '-> extract messages:', - print 'schema', + print('-> extract messages:', end=' ') + print('schema', end=' ') schemapot = osp.join(tempdir, 'schema.pot') potfiles = [schemapot] potfiles.append(schemapot) # explicit close necessary else the file may not be yet flushed when # we'll using it below - schemapotstream = file(schemapot, 'w') + schemapotstream = open(schemapot, 'w') generate_schema_pot(schemapotstream.write, cubedir=None) schemapotstream.close() - print 'TAL', + print('TAL', end=' ') tali18nfile = osp.join(tempdir, 'tali18n.py') extract_from_tal(find(osp.join(BASEDIR, 'web'), ('.py', '.pt')), tali18nfile) - print '-> generate .pot files.' + print('-> generate .pot files.') pyfiles = get_module_files(BASEDIR) pyfiles += globfind(osp.join(BASEDIR, 'misc', 'migration'), '*.py') schemafiles = globfind(osp.join(BASEDIR, 'schemas'), '*.py') @@ -349,12 +357,12 @@ if osp.exists(potfile): potfiles.append(potfile) else: - print '-> WARNING: %s file was not generated' % potfile - print '-> merging %i .pot files' % len(potfiles) + print('-> WARNING: %s file was not generated' % potfile) + print('-> merging %i .pot files' % len(potfiles)) cubicwebpot = osp.join(tempdir, 'cubicweb.pot') cmd = ['msgcat', '-o', cubicwebpot] + potfiles execute2(cmd) - print '-> merging main pot file with existing translations.' + print('-> merging main pot file with existing translations.') chdir(cwi18ndir) toedit = [] for lang in CubicWebNoAppConfiguration.cw_languages(): @@ -368,10 +376,10 @@ # cleanup rm(tempdir) # instructions pour la suite - print '-> regenerated CubicWeb\'s .po catalogs.' - print '\nYou can now edit the following files:' - print '* ' + '\n* '.join(toedit) - print 'when you are done, run "cubicweb-ctl i18ncube yourcube".' + print('-> regenerated CubicWeb\'s .po catalogs.') + print('\nYou can now edit the following files:') + print('* ' + '\n* '.join(toedit)) + print('when you are done, run "cubicweb-ctl i18ncube yourcube".') class UpdateCubeCatalogCommand(Command): @@ -398,25 +406,25 @@ from subprocess import CalledProcessError for cubedir in cubes: if not osp.isdir(cubedir): - print '-> ignoring %s that is not a directory.' % cubedir + print('-> ignoring %s that is not a directory.' % cubedir) continue try: toedit = update_cube_catalogs(cubedir) except CalledProcessError as exc: - print '\n*** error while updating catalogs for cube', cubedir - print 'cmd:\n%s' % exc.cmd - print 'stdout:\n%s\nstderr:\n%s' % exc.data + print('\n*** error while updating catalogs for cube', cubedir) + print('cmd:\n%s' % exc.cmd) + print('stdout:\n%s\nstderr:\n%s' % exc.data) except Exception: import traceback traceback.print_exc() - print '*** error while updating catalogs for cube', cubedir + print('*** error while updating catalogs for cube', cubedir) return False else: # instructions pour la suite if toedit: - print '-> regenerated .po catalogs for cube %s.' % cubedir - print '\nYou can now edit the following files:' - print '* ' + '\n* '.join(toedit) + print('-> regenerated .po catalogs for cube %s.' % cubedir) + print('\nYou can now edit the following files:') + print('* ' + '\n* '.join(toedit)) print ('When you are done, run "cubicweb-ctl i18ninstance ' '" to see changes in your instances.') return True @@ -429,7 +437,7 @@ from cubicweb.i18n import extract_from_tal, execute2 cube = osp.basename(osp.normpath(cubedir)) tempdir = tempfile.mkdtemp() - print underline_title('Updating i18n catalogs for cube %s' % cube) + print(underline_title('Updating i18n catalogs for cube %s' % cube)) chdir(cubedir) if osp.exists(osp.join('i18n', 'entities.pot')): warn('entities.pot is deprecated, rename file to static-messages.pot (%s)' @@ -439,20 +447,20 @@ potfiles = [osp.join('i18n', 'static-messages.pot')] else: potfiles = [] - print '-> extracting messages:', - print 'schema', + print('-> extracting messages:', end=' ') + print('schema', end=' ') schemapot = osp.join(tempdir, 'schema.pot') potfiles.append(schemapot) # explicit close necessary else the file may not be yet flushed when # we'll using it below - schemapotstream = file(schemapot, 'w') + schemapotstream = open(schemapot, 'w') generate_schema_pot(schemapotstream.write, cubedir) schemapotstream.close() - print 'TAL', + print('TAL', end=' ') tali18nfile = osp.join(tempdir, 'tali18n.py') - ptfiles = find('.', ('.py', '.pt'), blacklist=STD_BLACKLIST+('test',)) + ptfiles = find('.', ('.py', '.pt'), blacklist=STD_BLACKLIST) extract_from_tal(ptfiles, tali18nfile) - print 'Javascript' + print('Javascript') jsfiles = [jsfile for jsfile in find('.', '.js') if osp.basename(jsfile).startswith('cub')] if jsfiles: @@ -463,9 +471,9 @@ # no pot file created if there are no string to translate if osp.exists(tmppotfile): potfiles.append(tmppotfile) - print '-> creating cube-specific catalog' + print('-> creating cube-specific catalog') tmppotfile = osp.join(tempdir, 'generated.pot') - cubefiles = find('.', '.py', blacklist=STD_BLACKLIST+('test',)) + cubefiles = find('.', '.py', blacklist=STD_BLACKLIST) cubefiles.append(tali18nfile) cmd = ['xgettext', '--no-location', '--omit-header', '-k_', '-o', tmppotfile] cmd.extend(cubefiles) @@ -473,20 +481,20 @@ if osp.exists(tmppotfile): # doesn't exists of no translation string found potfiles.append(tmppotfile) potfile = osp.join(tempdir, 'cube.pot') - print '-> merging %i .pot files' % len(potfiles) + print('-> merging %i .pot files' % len(potfiles)) cmd = ['msgcat', '-o', potfile] cmd.extend(potfiles) execute2(cmd) if not osp.exists(potfile): - print 'no message catalog for cube', cube, 'nothing to translate' + print('no message catalog for cube', cube, 'nothing to translate') # cleanup rm(tempdir) return () - print '-> merging main pot file with existing translations:', + print('-> merging main pot file with existing translations:', end=' ') chdir('i18n') toedit = [] for lang in CubicWebNoAppConfiguration.cw_languages(): - print lang, + print(lang, end=' ') cubepo = '%s.po' % lang if not osp.exists(cubepo): shutil.copy(potfile, cubepo) @@ -496,7 +504,7 @@ ensure_fs_mode(cubepo) shutil.move('%snew' % cubepo, cubepo) toedit.append(osp.abspath(cubepo)) - print + print() # cleanup rm(tempdir) return toedit @@ -620,7 +628,7 @@ " Please specify it using the --directory option") cubesdir = cubespath[0] if not osp.isdir(cubesdir): - print "-> creating cubes directory", cubesdir + print("-> creating cubes directory", cubesdir) try: mkdir(cubesdir) except OSError as err: @@ -632,7 +640,7 @@ skeldir = osp.join(BASEDIR, 'skeleton') default_name = 'cubicweb-%s' % cubename.lower().replace('_', '-') if verbose: - distname = raw_input('Debian name for your cube ? [%s]): ' + distname = input('Debian name for your cube ? [%s]): ' % default_name).strip() if not distname: distname = default_name @@ -644,12 +652,13 @@ if not re.match('[a-z][-a-z0-9]*$', distname): raise BadCommandUsage( 'cube distname should be a valid debian package name') - longdesc = shortdesc = raw_input( + longdesc = shortdesc = input( 'Enter a short description for your cube: ') if verbose: - longdesc = raw_input( + longdesc = input( 'Enter a long description (leave empty to reuse the short one): ') - dependencies = {'cubicweb': '>= %s' % cubicwebversion} + dependencies = {'cubicweb': '>= %s' % cubicwebversion, + 'six': '>= 1.4.0',} if verbose: dependencies.update(self._ask_for_dependencies()) context = {'cubename' : cubename, @@ -658,7 +667,7 @@ 'longdesc' : longdesc or shortdesc, 'dependencies' : dependencies, 'version' : cubicwebversion, - 'year' : str(datetime.now().year), + 'year' : str(date.today().year), 'author': self['author'], 'author-email': self['author-email'], 'author-web-site': self['author-web-site'], @@ -681,7 +690,7 @@ if answer == 'y': depcubes.append(cube) if answer == 'type': - depcubes = splitstrip(raw_input('type dependencies: ')) + depcubes = splitstrip(input('type dependencies: ')) break elif answer == 'skip': break @@ -710,7 +719,7 @@ requests = {} for filepath in args: try: - stream = file(filepath) + stream = open(filepath) except OSError as ex: raise BadCommandUsage("can't open rql log file %s: %s" % (filepath, ex)) @@ -731,17 +740,17 @@ except Exception as exc: sys.stderr.write('Line %s: %s (%s)\n' % (lineno, exc, line)) stat = [] - for rql, times in requests.iteritems(): + for rql, times in requests.items(): stat.append( (sum(time[0] for time in times), sum(time[1] for time in times), len(times), rql) ) stat.sort() stat.reverse() total_time = sum(clocktime for clocktime, cputime, occ, rql in stat) * 0.01 - print 'Percentage;Cumulative Time (clock);Cumulative Time (CPU);Occurences;Query' + print('Percentage;Cumulative Time (clock);Cumulative Time (CPU);Occurences;Query') for clocktime, cputime, occ, rql in stat: - print '%.2f;%.2f;%.2f;%s;%s' % (clocktime/total_time, clocktime, - cputime, occ, rql) + print('%.2f;%.2f;%.2f;%s;%s' % (clocktime/total_time, clocktime, + cputime, occ, rql)) class GenerateSchema(Command): diff -r a4fcee1e9789 -r 19fcce6dc6d1 devtools/fake.py --- a/devtools/fake.py Thu Mar 24 09:43:25 2016 +0100 +++ b/devtools/fake.py Thu Jun 16 14:19:20 2016 +0200 @@ -22,6 +22,8 @@ from contextlib import contextmanager +from six import string_types + from logilab.database import get_db_helper from cubicweb.req import RequestSessionBase @@ -91,7 +93,7 @@ def set_request_header(self, header, value, raw=False): """set an incoming HTTP header (for test purpose only)""" - if isinstance(value, basestring): + if isinstance(value, string_types): value = [value] if raw: # adding encoded header is important, else page content @@ -110,7 +112,7 @@ def build_url_params(self, **kwargs): # overriden to get predictable resultts args = [] - for param, values in sorted(kwargs.iteritems()): + for param, values in sorted(kwargs.items()): if not isinstance(values, (list, tuple)): values = (values,) for value in values: diff -r a4fcee1e9789 -r 19fcce6dc6d1 devtools/fill.py --- a/devtools/fill.py Thu Mar 24 09:43:25 2016 +0100 +++ b/devtools/fill.py Thu Jun 16 14:19:20 2016 +0200 @@ -17,6 +17,7 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . """This modules defines func / methods for creating test repositories""" +from __future__ import print_function __docformat__ = "restructuredtext en" @@ -25,6 +26,10 @@ from copy import deepcopy from datetime import datetime, date, time, timedelta from decimal import Decimal +import inspect + +from six import text_type, add_metaclass +from six.moves import range from logilab.common import attrdict from logilab.mtconverter import xml_escape @@ -37,6 +42,9 @@ from cubicweb.schema import RQLConstraint def custom_range(start, stop, step): + if start == stop: + yield start + return while start < stop: yield start start += step @@ -173,7 +181,7 @@ generate_tztime = generate_time # XXX implementation should add a timezone def generate_bytes(self, entity, attrname, index, format=None): - fakefile = Binary("%s%s" % (attrname, index)) + fakefile = Binary(("%s%s" % (attrname, index)).encode('ascii')) fakefile.filename = u"file_%s" % attrname return fakefile @@ -209,8 +217,10 @@ minvalue = maxvalue = None for cst in self.eschema.rdef(attrname).constraints: if isinstance(cst, IntervalBoundConstraint): - minvalue = self._actual_boundary(entity, attrname, cst.minvalue) - maxvalue = self._actual_boundary(entity, attrname, cst.maxvalue) + if cst.minvalue is not None: + minvalue = self._actual_boundary(entity, attrname, cst.minvalue) + if cst.maxvalue is not None: + maxvalue = self._actual_boundary(entity, attrname, cst.maxvalue) elif isinstance(cst, BoundaryConstraint): if cst.operator[0] == '<': maxvalue = self._actual_boundary(entity, attrname, cst.boundary) @@ -224,7 +234,7 @@ """ for cst in self.eschema.rdef(attrname).constraints: if isinstance(cst, StaticVocabularyConstraint): - return unicode(choice(cst.vocabulary())) + return text_type(choice(cst.vocabulary())) return None # XXX nothing to do here @@ -254,13 +264,15 @@ for attrname, attrvalue in classdict.items(): if callable(attrvalue): if attrname.startswith('generate_') and \ - attrvalue.func_code.co_argcount < 2: + len(inspect.getargspec(attrvalue).args) < 2: raise TypeError('generate_xxx must accept at least 1 argument') setattr(_ValueGenerator, attrname, attrvalue) return type.__new__(mcs, name, bases, classdict) + +@add_metaclass(autoextend) class ValueGenerator(_ValueGenerator): - __metaclass__ = autoextend + pass def _default_choice_func(etype, attrname): @@ -286,7 +298,7 @@ returns acceptable values for this attribute """ queries = [] - for index in xrange(entity_num): + for index in range(entity_num): restrictions = [] args = {} for attrname, value in make_entity(etype, schema, vreg, index, choice_func).items(): @@ -347,7 +359,7 @@ fmt = vreg.property_value('ui.float-format') value = fmt % value else: - value = unicode(value) + value = text_type(value) return entity @@ -363,7 +375,7 @@ rql += ', %s is %s' % (selectvar, objtype) rset = cnx.execute(rql) except Exception: - print "could restrict eid_list with given constraints (%r)" % constraints + print("could restrict eid_list with given constraints (%r)" % constraints) return [] return set(eid for eid, in rset.rows) @@ -508,8 +520,8 @@ break else: # FIXME: 20 should be read from config - subjeidsiter = [choice(tuple(subjeids)) for i in xrange(min(len(subjeids), 20))] - objeidsiter = [choice(tuple(objeids)) for i in xrange(min(len(objeids), 20))] + subjeidsiter = [choice(tuple(subjeids)) for i in range(min(len(subjeids), 20))] + objeidsiter = [choice(tuple(objeids)) for i in range(min(len(objeids), 20))] for subjeid, objeid in zip(subjeidsiter, objeidsiter): if subjeid != objeid and not (subjeid, objeid) in used: used.add( (subjeid, objeid) ) diff -r a4fcee1e9789 -r 19fcce6dc6d1 devtools/htmlparser.py --- a/devtools/htmlparser.py Thu Mar 24 09:43:25 2016 +0100 +++ b/devtools/htmlparser.py Thu Jun 16 14:19:20 2016 +0200 @@ -20,7 +20,7 @@ import re import sys from xml import sax -from cStringIO import StringIO +from io import BytesIO from lxml import etree @@ -33,7 +33,7 @@ ERR_COUNT = 0 -_REM_SCRIPT_RGX = re.compile(r"]*>.*?", re.U|re.M|re.I|re.S) +_REM_SCRIPT_RGX = re.compile(br"]*>.*?", re.M|re.I|re.S) def _remove_script_tags(data): """Remove the script (usually javascript) tags to help the lxml XMLParser / HTMLParser do their job. Without that, they choke on @@ -70,7 +70,7 @@ # # using that, we'll miss most actual validation error we want to # catch. For now, use dumb regexp - return _REM_SCRIPT_RGX.sub('', data) + return _REM_SCRIPT_RGX.sub(b'', data) class Validator(object): @@ -164,10 +164,10 @@ def _parse(self, data): inpsrc = sax.InputSource() - inpsrc.setByteStream(StringIO(data)) + inpsrc.setByteStream(BytesIO(data)) try: self._parser.parse(inpsrc) - except sax.SAXParseException, exc: + except sax.SAXParseException as exc: new_exc = AssertionError(u'invalid document: %s' % exc) new_exc.position = (exc._linenum, exc._colnum) raise new_exc @@ -209,7 +209,7 @@ def matching_nodes(self, tag, **attrs): for elt in self.etree.iterfind(self._iterstr(tag)): eltattrs = elt.attrib - for attr, value in attrs.iteritems(): + for attr, value in attrs.items(): try: if eltattrs[attr] != value: break diff -r a4fcee1e9789 -r 19fcce6dc6d1 devtools/httptest.py --- a/devtools/httptest.py Thu Mar 24 09:43:25 2016 +0100 +++ b/devtools/httptest.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,17 +18,18 @@ """this module contains base classes and utilities for integration with running http server """ +from __future__ import print_function + __docformat__ = "restructuredtext en" import random import threading import socket -import httplib -from urlparse import urlparse -from twisted.internet import reactor, error +from six.moves import range, http_client +from six.moves.urllib.parse import urlparse -from cubicweb.etwist.server import run + from cubicweb.devtools.testlib import CubicWebTC from cubicweb.devtools import ApptestConfiguration @@ -61,34 +62,14 @@ raise RuntimeError('get_available_port([ports_range]) cannot find an available port') -class CubicWebServerConfig(ApptestConfiguration): - """basic configuration class for configuring test server - - Class attributes: - - * `ports_range`: list giving range of http ports to test (range(7000, 8000) - by default). The first port found as available in `ports_range` will be - used to launch the test web server. - +class CubicWebServerTC(CubicWebTC): + """Class for running a Twisted-based test web server. """ ports_range = range(7000, 8000) - def default_base_url(self): - port = self['port'] or get_available_port(self.ports_range) - self.global_set_option('port', port) # force rewrite here - return 'http://127.0.0.1:%d/' % self['port'] - - - -class CubicWebServerTC(CubicWebTC): - """Class for running test web server. See :class:`CubicWebServerConfig`. - - Class attributes: - * `anonymous_allowed`: flag telling if anonymous browsing should be allowed - """ - configcls = CubicWebServerConfig - def start_server(self): + from twisted.internet import reactor + from cubicweb.etwist.server import run # use a semaphore to avoid starting test while the http server isn't # fully initilialized semaphore = threading.Semaphore(0) @@ -110,12 +91,13 @@ #pre init utils connection parseurl = urlparse(self.config['base-url']) assert parseurl.port == self.config['port'], (self.config['base-url'], self.config['port']) - self._web_test_cnx = httplib.HTTPConnection(parseurl.hostname, - parseurl.port) + self._web_test_cnx = http_client.HTTPConnection(parseurl.hostname, + parseurl.port) self._ident_cookie = None def stop_server(self, timeout=15): """Stop the webserver, waiting for the thread to return""" + from twisted.internet import reactor if self._web_test_cnx is None: self.web_logout() self._web_test_cnx.close() @@ -139,7 +121,7 @@ passwd = user response = self.web_get("login?__login=%s&__password=%s" % (user, passwd)) - assert response.status == httplib.SEE_OTHER, response.status + assert response.status == http_client.SEE_OTHER, response.status self._ident_cookie = response.getheader('Set-Cookie') assert self._ident_cookie return True @@ -151,7 +133,7 @@ self._ident_cookie = None def web_request(self, path='', method='GET', body=None, headers=None): - """Return an httplib.HTTPResponse object for the specified path + """Return an http_client.HTTPResponse object for the specified path Use available credential if available. """ @@ -171,12 +153,18 @@ def setUp(self): super(CubicWebServerTC, self).setUp() + port = self.config['port'] or get_available_port(self.ports_range) + self.config.global_set_option('port', port) # force rewrite here + self.config.global_set_option('base-url', 'http://127.0.0.1:%d/' % port) + # call load_configuration again to let the config reset its datadir_url + self.config.load_configuration() self.start_server() def tearDown(self): + from twisted.internet import error try: self.stop_server() except error.ReactorNotRunning as err: # Server could be launched manually - print err + print(err) super(CubicWebServerTC, self).tearDown() diff -r a4fcee1e9789 -r 19fcce6dc6d1 devtools/instrument.py --- a/devtools/instrument.py Thu Mar 24 09:43:25 2016 +0100 +++ b/devtools/instrument.py Thu Jun 16 14:19:20 2016 +0200 @@ -14,6 +14,7 @@ # You should have received a copy of the GNU Lesser General Public License along # with this program. If not, see . """Instrumentation utilities""" +from __future__ import print_function import os @@ -45,10 +46,10 @@ return _COLORS[key] def warn(msg, *args): - print 'WARNING: %s' % (msg % args) + print('WARNING: %s' % (msg % args)) def info(msg): - print 'INFO: ' + msg + print('INFO: ' + msg) class PropagationAnalyzer(object): @@ -185,7 +186,7 @@ def add_colors_legend(self, graph): """Add a legend of used colors to the graph.""" - for package, color in sorted(_COLORS.iteritems()): + for package, color in sorted(_COLORS.items()): graph.add_node(package, color=color, fontcolor=color, shape='record') diff -r a4fcee1e9789 -r 19fcce6dc6d1 devtools/qunit.py --- a/devtools/qunit.py Thu Mar 24 09:43:25 2016 +0100 +++ b/devtools/qunit.py Thu Jun 16 14:19:20 2016 +0200 @@ -15,53 +15,28 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . +from __future__ import absolute_import import os, os.path as osp -from tempfile import mkdtemp, NamedTemporaryFile, TemporaryFile -import tempfile -from Queue import Queue, Empty -from subprocess import Popen, check_call, CalledProcessError -from shutil import rmtree, copy as copyfile -from uuid import uuid4 +import errno +from tempfile import mkdtemp +from subprocess import Popen, PIPE, STDOUT + +from six.moves.queue import Queue, Empty # imported by default to simplify further import statements from logilab.common.testlib import unittest_main, with_tempdir, InnerTest, Tags -from logilab.common.shellutils import getlogin +import webtest.http import cubicweb from cubicweb.view import View from cubicweb.web.controller import Controller from cubicweb.web.views.staticcontrollers import StaticFileController, STATIC_CONTROLLERS -from cubicweb.devtools.httptest import CubicWebServerTC - - -class VerboseCalledProcessError(CalledProcessError): - - def __init__(self, returncode, command, stdout, stderr): - super(VerboseCalledProcessError, self).__init__(returncode, command) - self.stdout = stdout - self.stderr = stderr - - def __str__(self): - str = [ super(VerboseCalledProcessError, self).__str__()] - if self.stdout.strip(): - str.append('******************') - str.append('* process stdout *') - str.append('******************') - str.append(self.stdout) - if self.stderr.strip(): - str.append('******************') - str.append('* process stderr *') - str.append('******************') - str.append(self.stderr) - return '\n'.join(str) - +from cubicweb.devtools import webtest as cwwebtest class FirefoxHelper(object): - profile_name_mask = 'PYTEST_PROFILE_%(uid)s' - def __init__(self, url=None): self._process = None self._profile_dir = mkdtemp(prefix='cwtest-ffxprof-') @@ -70,6 +45,17 @@ self.firefox_cmd = [osp.join(osp.dirname(__file__), 'data', 'xvfb-run.sh'), '-a', '-s', '-noreset -screen 0 800x600x24'] + self.firefox_cmd + def test(self): + try: + proc = Popen(['firefox', '--help'], stdout=PIPE, stderr=STDOUT) + stdout, _ = proc.communicate() + return proc.returncode == 0, stdout + except OSError as exc: + if exc.errno == errno.ENOENT: + msg = '[%s] %s' % (errno.errorcode[exc.errno], exc.strerror) + return False, msg + raise + def start(self, url): self.stop() cmd = self.firefox_cmd + ['-silent', '--profile', self._profile_dir, @@ -88,60 +74,46 @@ self.stop() -class QUnitTestCase(CubicWebServerTC): +class QUnitTestCase(cwwebtest.CubicWebTestTC): - tags = CubicWebServerTC.tags | Tags(('qunit',)) + tags = cwwebtest.CubicWebTestTC.tags | Tags(('qunit',)) # testfile, (dep_a, dep_b) all_js_tests = () def setUp(self): - self.config.global_set_option('access-control-allow-origin', '*') super(QUnitTestCase, self).setUp() self.test_queue = Queue() class MyQUnitResultController(QUnitResultController): tc = self test_queue = self.test_queue self._qunit_controller = MyQUnitResultController - self.vreg.register(MyQUnitResultController) - self.vreg.register(QUnitView) - self.vreg.register(CWSoftwareRootStaticController) + self.webapp.app.appli.vreg.register(MyQUnitResultController) + self.webapp.app.appli.vreg.register(QUnitView) + self.webapp.app.appli.vreg.register(CWDevtoolsStaticController) + self.server = webtest.http.StopableWSGIServer.create(self.webapp.app) + self.config.global_set_option('base-url', self.server.application_url) def tearDown(self): + self.server.shutdown() + self.webapp.app.appli.vreg.unregister(self._qunit_controller) + self.webapp.app.appli.vreg.unregister(QUnitView) + self.webapp.app.appli.vreg.unregister(CWDevtoolsStaticController) super(QUnitTestCase, self).tearDown() - self.vreg.unregister(self._qunit_controller) - self.vreg.unregister(QUnitView) - self.vreg.unregister(CWSoftwareRootStaticController) - - def abspath(self, path): - """use self.__module__ to build absolute path if necessary""" - if not osp.isabs(path): - dirname = osp.dirname(__import__(self.__module__).__file__) - return osp.abspath(osp.join(dirname,path)) - return path def test_javascripts(self): for args in self.all_js_tests: - test_file = self.abspath(args[0]) + self.assertIn(len(args), (1, 2)) + test_file = args[0] if len(args) > 1: - depends = [self.abspath(dep) for dep in args[1]] + depends = args[1] else: depends = () - if len(args) > 2: - data = [self.abspath(data) for data in args[2]] - else: - data = () - for js_test in self._test_qunit(test_file, depends, data): + for js_test in self._test_qunit(test_file, depends): yield js_test @with_tempdir - def _test_qunit(self, test_file, depends=(), data_files=(), timeout=10): - assert osp.exists(test_file), test_file - for dep in depends: - assert osp.exists(dep), dep - for data in data_files: - assert osp.exists(data), data - + def _test_qunit(self, test_file, depends=(), timeout=10): QUnitView.test_file = test_file QUnitView.depends = depends @@ -149,6 +121,9 @@ self.test_queue.get(False) browser = FirefoxHelper() + isavailable, reason = browser.test() + if not isavailable: + self.fail('firefox not available or not working properly (%s)' % reason) browser.start(self.config['base-url'] + "?vid=qunit") test_count = 0 error = False @@ -188,6 +163,7 @@ def publish(self, rset=None): event = self._cw.form['event'] getattr(self, 'handle_%s' % event)() + return b'' def handle_module_start(self): self.__class__._current_module_name = self._cw.form.get('name', '') @@ -234,20 +210,15 @@ def call(self, **kwargs): w = self.w req = self._cw - data = { - 'jquery': req.data_url('jquery.js'), - 'web_test': req.build_url('cwsoftwareroot/devtools/data'), - } w(u''' - - - - ''' - % data) + + + + ''') w(u'') w(u'') w(u'') - prefix = len(cubicweb.CW_SOFTWARE_ROOT) + 1 for dep in self.depends: - dep = req.build_url('cwsoftwareroot/') + dep[prefix:] - w(u' ' % dep) + w(u' \n' % dep) w(u' ') - test_url = req.build_url('cwsoftwareroot/') + self.test_file[prefix:] - w(u' ' % test_url) + w(u' ' % self.test_file) w(u'''
@@ -309,16 +277,16 @@ ''') -class CWSoftwareRootStaticController(StaticFileController): - __regid__ = 'cwsoftwareroot' +class CWDevtoolsStaticController(StaticFileController): + __regid__ = 'devtools' def publish(self, rset=None): - staticdir = cubicweb.CW_SOFTWARE_ROOT + staticdir = osp.join(osp.dirname(__file__), 'data') relpath = self.relpath[len(self.__regid__) + 1:] return self.static_file(osp.join(staticdir, relpath)) -STATIC_CONTROLLERS.append(CWSoftwareRootStaticController) +STATIC_CONTROLLERS.append(CWDevtoolsStaticController) if __name__ == '__main__': diff -r a4fcee1e9789 -r 19fcce6dc6d1 devtools/repotest.py --- a/devtools/repotest.py Thu Mar 24 09:43:25 2016 +0100 +++ b/devtools/repotest.py Thu Jun 16 14:19:20 2016 +0200 @@ -19,6 +19,7 @@ This module contains functions to initialize a new repository. """ +from __future__ import print_function __docformat__ = "restructuredtext en" @@ -29,10 +30,9 @@ def tuplify(mylist): return [tuple(item) for item in mylist] -def snippet_cmp(a, b): - a = (a[0], [e.expression for e in a[1]]) - b = (b[0], [e.expression for e in b[1]]) - return cmp(a, b) +def snippet_key(a): + # a[0] may be a dict or a key/value tuple + return (sorted(dict(a[0]).items()), [e.expression for e in a[1]]) def test_plan(self, rql, expected, kwargs=None): with self.session.new_cnx() as cnx: @@ -57,7 +57,7 @@ 'expected %s queries, got %s' % (len(equeries), len(queries))) for i, (rql, sol) in enumerate(queries): self.assertEqual(rql, equeries[i][0]) - self.assertEqual(sorted(sol), sorted(equeries[i][1])) + self.assertEqual(sorted(sorted(x.items()) for x in sol), sorted(sorted(x.items()) for x in equeries[i][1])) idx = 2 else: idx = 1 @@ -66,7 +66,7 @@ self.assertEqual(len(step[-1]), len(expected[-1]), 'got %s child steps, expected %s' % (len(step[-1]), len(expected[-1]))) except AssertionError: - print 'error on step ', + print('error on step ', end=' ') pprint(step[:-1]) raise children = step[-1] @@ -115,7 +115,7 @@ schema_eids[x] = x.eid for x in schema.relations(): schema_eids[x] = x.eid - for rdef in x.rdefs.itervalues(): + for rdef in x.rdefs.values(): schema_eids[(rdef.subject, rdef.rtype, rdef.object)] = rdef.eid return schema_eids @@ -127,7 +127,7 @@ for x in schema.relations(): x.eid = schema_eids[x] schema._eid_index[x.eid] = x - for rdef in x.rdefs.itervalues(): + for rdef in x.rdefs.values(): rdef.eid = schema_eids[(rdef.subject, rdef.rtype, rdef.object)] schema._eid_index[rdef.eid] = rdef @@ -187,7 +187,7 @@ plan = self.qhelper.plan_factory(union, {}, FakeSession(self.repo)) plan.preprocess(union) for select in union.children: - select.solutions.sort() + select.solutions.sort(key=lambda x: list(x.items())) #print '********* ppsolutions', solutions return union @@ -197,7 +197,7 @@ def setUp(self): self.o = self.repo.querier - self.session = self.repo._sessions.values()[0] + self.session = next(iter(self.repo._sessions.values())) self.ueid = self.session.user.eid assert self.ueid != -1 self.repo._type_source_cache = {} # clear cache @@ -238,7 +238,7 @@ if simplify: rqlhelper.simplify(rqlst) for select in rqlst.children: - select.solutions.sort() + select.solutions.sort(key=lambda x: list(x.items())) return self.o.plan_factory(rqlst, kwargs, cnx) def _prepare(self, cnx, rql, kwargs=None): @@ -286,13 +286,13 @@ if rqlst.TYPE == 'select': self.repo.vreg.rqlhelper.annotate(rqlst) for select in rqlst.children: - select.solutions.sort() + select.solutions.sort(key=lambda x: list(x.items())) else: - rqlst.solutions.sort() + rqlst.solutions.sort(key=lambda x: list(x.items())) return self.o.plan_factory(rqlst, kwargs, cnx) -# monkey patch some methods to get predicatable results ####################### +# monkey patch some methods to get predictable results ####################### from cubicweb import rqlrewrite _orig_iter_relations = rqlrewrite.iter_relations @@ -300,16 +300,15 @@ _orig_build_variantes = rqlrewrite.RQLRewriter.build_variantes def _insert_snippets(self, snippets, varexistsmap=None): - _orig_insert_snippets(self, sorted(snippets, snippet_cmp), varexistsmap) + _orig_insert_snippets(self, sorted(snippets, key=snippet_key), varexistsmap) def _build_variantes(self, newsolutions): variantes = _orig_build_variantes(self, newsolutions) sortedvariantes = [] for variante in variantes: - orderedkeys = sorted((k[1], k[2], v) for k, v in variante.iteritems()) - variante = DumbOrderedDict(sorted(variante.iteritems(), - lambda a, b: cmp((a[0][1],a[0][2],a[1]), - (b[0][1],b[0][2],b[1])))) + orderedkeys = sorted((k[1], k[2], v) for k, v in variante.items()) + variante = DumbOrderedDict(sorted(variante.items(), + key=lambda a: (a[0][1], a[0][2], a[1]))) sortedvariantes.append( (orderedkeys, variante) ) return [v for ok, v in sorted(sortedvariantes)] @@ -318,7 +317,7 @@ def _check_permissions(*args, **kwargs): res, restricted = _orig_check_permissions(*args, **kwargs) - res = DumbOrderedDict(sorted(res.iteritems(), lambda a, b: cmp(a[1], b[1]))) + res = DumbOrderedDict(sorted(res.items(), key=lambda x: [y.items() for y in x[1]])) return res, restricted def _dummy_check_permissions(self, rqlst): diff -r a4fcee1e9789 -r 19fcce6dc6d1 devtools/stresstester.py --- a/devtools/stresstester.py Thu Mar 24 09:43:25 2016 +0100 +++ b/devtools/stresstester.py Thu Jun 16 14:19:20 2016 +0200 @@ -41,6 +41,7 @@ Copyright (c) 2003-2011 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2. http://www.logilab.fr/ -- mailto:contact@logilab.fr """ +from __future__ import print_function import os import sys @@ -84,7 +85,7 @@ def usage(status=0): """print usage string and exit""" - print __doc__ % basename(sys.argv[0]) + print(__doc__ % basename(sys.argv[0])) sys.exit(status) @@ -133,7 +134,7 @@ 'nb-times=', 'nb-threads=', 'profile', 'report-output=',]) except Exception as ex: - print ex + print(ex) usage(1) repeat = 100 threads = 1 @@ -155,7 +156,7 @@ elif opt in ('-P', '--profile'): prof_file = val elif opt in ('-o', '--report-output'): - report_output = file(val, 'w') + report_output = open(val, 'w') if len(args) != 2: usage(1) queries = [query for query in lines(args[1]) if not query.startswith('#')] @@ -166,7 +167,7 @@ from cubicweb.cwconfig import instance_configuration config = instance_configuration(args[0]) # get local access to the repository - print "Creating repo", prof_file + print("Creating repo", prof_file) repo = Repository(config, prof_file) cnxid = repo.connect(user, password=password) # connection to the CubicWeb repository diff -r a4fcee1e9789 -r 19fcce6dc6d1 devtools/test/data/js_examples/dep_1.js --- a/devtools/test/data/js_examples/dep_1.js Thu Mar 24 09:43:25 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -a = 4; diff -r a4fcee1e9789 -r 19fcce6dc6d1 devtools/test/data/js_examples/deps_2.js --- a/devtools/test/data/js_examples/deps_2.js Thu Mar 24 09:43:25 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -b = a +2; diff -r a4fcee1e9789 -r 19fcce6dc6d1 devtools/test/data/js_examples/test_simple_failure.js --- a/devtools/test/data/js_examples/test_simple_failure.js Thu Mar 24 09:43:25 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,18 +0,0 @@ -$(document).ready(function() { - - QUnit.module("air"); - - QUnit.test("test 1", function (assert) { - assert.equal(2, 4); - }); - - QUnit.test("test 2", function (assert) { - assert.equal('', '45'); - assert.equal('1024', '32'); - }); - - QUnit.module("able"); - QUnit.test("test 3", function (assert) { - assert.deepEqual(1, 1); - }); -}); diff -r a4fcee1e9789 -r 19fcce6dc6d1 devtools/test/data/js_examples/test_simple_success.js --- a/devtools/test/data/js_examples/test_simple_success.js Thu Mar 24 09:43:25 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,17 +0,0 @@ -$(document).ready(function() { - - QUnit.module("air"); - - QUnit.test("test 1", function (assert) { - assert.equal(2, 2); - }); - - QUnit.test("test 2", function (assert) { - assert.equal('45', '45'); - }); - - QUnit.module("able"); - QUnit.test("test 3", function (assert) { - assert.deepEqual(1, 1); - }); -}); diff -r a4fcee1e9789 -r 19fcce6dc6d1 devtools/test/data/js_examples/test_with_dep.js --- a/devtools/test/data/js_examples/test_with_dep.js Thu Mar 24 09:43:25 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,9 +0,0 @@ -$(document).ready(function() { - - QUnit.module("air"); - - QUnit.test("test 1", function (assert) { - assert.equal(a, 4); - }); - -}); diff -r a4fcee1e9789 -r 19fcce6dc6d1 devtools/test/data/js_examples/test_with_ordered_deps.js --- a/devtools/test/data/js_examples/test_with_ordered_deps.js Thu Mar 24 09:43:25 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,9 +0,0 @@ -$(document).ready(function() { - - QUnit.module("air"); - - QUnit.test("test 1", function (assert) { - assert.equal(b, 6); - }); - -}); diff -r a4fcee1e9789 -r 19fcce6dc6d1 devtools/test/data/js_examples/utils.js --- a/devtools/test/data/js_examples/utils.js Thu Mar 24 09:43:25 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,29 +0,0 @@ -function datetuple(d) { - return [d.getFullYear(), d.getMonth()+1, d.getDate(), - d.getHours(), d.getMinutes()]; -} - -function pprint(obj) { - print('{'); - for(k in obj) { - print(' ' + k + ' = ' + obj[k]); - } - print('}'); -} - -function arrayrepr(array) { - return '[' + array.join(', ') + ']'; -} - -function assertArrayEquals(array1, array2) { - if (array1.length != array2.length) { - throw new crosscheck.AssertionFailure(array1.join(', ') + ' != ' + array2.join(', ')); - } - for (var i=0; i. +"""unit tests for cubicweb-ctl commands from devtools""" + +import os.path as osp +import sys +import tempfile +import shutil +from subprocess import Popen, PIPE, STDOUT +from unittest import TestCase + + +class CubicWebCtlTC(TestCase): + """test case for devtools commands""" + + def test_newcube(self): + cwctl = osp.abspath(osp.join(osp.dirname(__file__), '../../bin/cubicweb-ctl')) + + tmpdir = tempfile.mkdtemp(prefix="temp-cwctl-newcube") + try: + cmd = [sys.executable, cwctl, 'newcube', + '--directory', tmpdir, 'foo'] + proc = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=STDOUT) + stdout, _ = proc.communicate(b'short_desc\n') + finally: + shutil.rmtree(tmpdir, ignore_errors=True) + self.assertEqual(proc.returncode, 0, msg=stdout) + + +if __name__ == '__main__': + from unittest import main + main() diff -r a4fcee1e9789 -r 19fcce6dc6d1 devtools/test/unittest_httptest.py --- a/devtools/test/unittest_httptest.py Thu Mar 24 09:43:25 2016 +0100 +++ b/devtools/test/unittest_httptest.py Thu Jun 16 14:19:20 2016 +0200 @@ -17,7 +17,7 @@ # with CubicWeb. If not, see . """unittest for cubicweb.devtools.httptest module""" -import httplib +from six.moves import http_client from logilab.common.testlib import Tags from cubicweb.devtools.httptest import CubicWebServerTC @@ -28,12 +28,12 @@ def test_response(self): try: response = self.web_get() - except httplib.NotConnected as ex: + except http_client.NotConnected as ex: self.fail("Can't connection to test server: %s" % ex) def test_response_anon(self): response = self.web_get() - self.assertEqual(response.status, httplib.OK) + self.assertEqual(response.status, http_client.OK) def test_base_url(self): if self.config['base-url'] not in self.web_get().read(): @@ -47,20 +47,20 @@ def test_response_denied(self): response = self.web_get() - self.assertEqual(response.status, httplib.FORBIDDEN) + self.assertEqual(response.status, http_client.FORBIDDEN) def test_login(self): response = self.web_get() - if response.status != httplib.FORBIDDEN: + if response.status != http_client.FORBIDDEN: self.skipTest('Already authenticated, "test_response_denied" must have failed') # login self.web_login(self.admlogin, self.admpassword) response = self.web_get() - self.assertEqual(response.status, httplib.OK, response.body) + self.assertEqual(response.status, http_client.OK, response.body) # logout self.web_logout() response = self.web_get() - self.assertEqual(response.status, httplib.FORBIDDEN, response.body) + self.assertEqual(response.status, http_client.FORBIDDEN, response.body) diff -r a4fcee1e9789 -r 19fcce6dc6d1 devtools/test/unittest_i18n.py --- a/devtools/test/unittest_i18n.py Thu Mar 24 09:43:25 2016 +0100 +++ b/devtools/test/unittest_i18n.py Thu Jun 16 14:19:20 2016 +0200 @@ -32,21 +32,22 @@ """load a po file and return a set of encountered (msgid, msgctx)""" msgs = set() msgid = msgctxt = None - for line in open(fname): - if line.strip() in ('', '#'): - continue - if line.startswith('msgstr'): - assert not (msgid, msgctxt) in msgs - msgs.add( (msgid, msgctxt) ) - msgid = msgctxt = None - elif line.startswith('msgid'): - msgid = line.split(' ', 1)[1][1:-1] - elif line.startswith('msgctx'): - msgctxt = line.split(' ', 1)[1][1: -1] - elif msgid is not None: - msgid += line[1:-1] - elif msgctxt is not None: - msgctxt += line[1:-1] + with open(fname) as fobj: + for line in fobj: + if line.strip() in ('', '#'): + continue + if line.startswith('msgstr'): + assert not (msgid, msgctxt) in msgs + msgs.add( (msgid, msgctxt) ) + msgid = msgctxt = None + elif line.startswith('msgid'): + msgid = line.split(' ', 1)[1][1:-1] + elif line.startswith('msgctx'): + msgctxt = line.split(' ', 1)[1][1: -1] + elif msgid is not None: + msgid += line[1:-1] + elif msgctxt is not None: + msgctxt += line[1:-1] return msgs diff -r a4fcee1e9789 -r 19fcce6dc6d1 devtools/test/unittest_qunit.py --- a/devtools/test/unittest_qunit.py Thu Mar 24 09:43:25 2016 +0100 +++ b/devtools/test/unittest_qunit.py Thu Jun 16 14:19:20 2016 +0200 @@ -1,15 +1,10 @@ -from logilab.common.testlib import unittest_main -from cubicweb.devtools.qunit import QUnitTestCase - -from os import path as osp - -JSTESTDIR = osp.abspath(osp.join(osp.dirname(__file__), 'data', 'js_examples')) +from cubicweb.devtools import qunit def js(name): - return osp.join(JSTESTDIR, name) + return '/static/js_examples/' + name -class QUnitTestCaseTC(QUnitTestCase): +class QUnitTestCaseTC(qunit.QUnitTestCase): all_js_tests = ( (js('test_simple_success.js'),), @@ -28,4 +23,5 @@ if __name__ == '__main__': - unittest_main() + from unittest import main + main() diff -r a4fcee1e9789 -r 19fcce6dc6d1 devtools/test/unittest_testlib.py --- a/devtools/test/unittest_testlib.py Thu Mar 24 09:43:25 2016 +0100 +++ b/devtools/test/unittest_testlib.py Thu Jun 16 14:19:20 2016 +0200 @@ -17,9 +17,10 @@ # with CubicWeb. If not, see . """unittests for cw.devtools.testlib module""" -from cStringIO import StringIO +from io import BytesIO, StringIO +from unittest import TextTestRunner -from unittest import TextTestRunner +from six import PY2 from logilab.common.testlib import TestSuite, TestCase, unittest_main from logilab.common.registry import yes @@ -33,7 +34,7 @@ class entity: cw_etype = 'Entity' eid = 0 - sio = StringIO('hop\n') + sio = BytesIO(b'hop\n') form = CubicWebTC.fake_form('import', {'file': ('filename.txt', sio), 'encoding': u'utf-8', @@ -51,7 +52,7 @@ class WebTestTC(TestCase): def setUp(self): - output = StringIO() + output = BytesIO() if PY2 else StringIO() self.runner = TextTestRunner(stream=output) def test_error_raised(self): diff -r a4fcee1e9789 -r 19fcce6dc6d1 devtools/test/unittest_webtest.py --- a/devtools/test/unittest_webtest.py Thu Mar 24 09:43:25 2016 +0100 +++ b/devtools/test/unittest_webtest.py Thu Jun 16 14:19:20 2016 +0200 @@ -1,4 +1,4 @@ -import httplib +from six.moves import http_client from logilab.common.testlib import Tags from cubicweb.devtools.webtest import CubicWebTestTC @@ -21,19 +21,19 @@ def test_reponse_denied(self): res = self.webapp.get('/', expect_errors=True) - self.assertEqual(httplib.FORBIDDEN, res.status_int) + self.assertEqual(http_client.FORBIDDEN, res.status_int) def test_login(self): res = self.webapp.get('/', expect_errors=True) - self.assertEqual(httplib.FORBIDDEN, res.status_int) + self.assertEqual(http_client.FORBIDDEN, res.status_int) self.login(self.admlogin, self.admpassword) res = self.webapp.get('/') - self.assertEqual(httplib.OK, res.status_int) + self.assertEqual(http_client.OK, res.status_int) self.logout() res = self.webapp.get('/', expect_errors=True) - self.assertEqual(httplib.FORBIDDEN, res.status_int) + self.assertEqual(http_client.FORBIDDEN, res.status_int) if __name__ == '__main__': diff -r a4fcee1e9789 -r 19fcce6dc6d1 devtools/testlib.py --- a/devtools/testlib.py Thu Mar 24 09:43:25 2016 +0100 +++ b/devtools/testlib.py Thu Jun 16 14:19:20 2016 +0200 @@ -16,19 +16,19 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . """this module contains base classes and utilities for cubicweb tests""" -__docformat__ = "restructuredtext en" +from __future__ import print_function import sys import re -import urlparse from os.path import dirname, join, abspath -from urllib import unquote from math import log from contextlib import contextmanager -from warnings import warn -from types import NoneType from itertools import chain +from six import text_type, string_types +from six.moves import range +from six.moves.urllib.parse import urlparse, parse_qs, unquote as urlunquote + import yams.schema from logilab.common.testlib import TestCase, InnerTest, Tags @@ -40,7 +40,7 @@ from logilab.common.shellutils import getlogin from cubicweb import (ValidationError, NoSelectableObject, AuthenticationError, - ProgrammingError, BadConnectionId) + BadConnectionId) from cubicweb import cwconfig, devtools, web, server, repoapi from cubicweb.utils import json from cubicweb.sobjects import notification @@ -49,7 +49,7 @@ from cubicweb.server.session import Session from cubicweb.devtools import SYSTEM_ENTITIES, SYSTEM_RELATIONS, VIEW_VALIDATORS from cubicweb.devtools import fake, htmlparser, DEFAULT_EMPTY_DB_ID -from cubicweb.utils import json + # low-level utilities ########################################################## @@ -60,10 +60,11 @@ def do_view(self, arg): import webbrowser data = self._getval(arg) - with file('/tmp/toto.html', 'w') as toto: + with open('/tmp/toto.html', 'w') as toto: toto.write(data) webbrowser.open('file:///tmp/toto.html') + def line_context_filter(line_no, center, before=3, after=None): """return true if line are in context @@ -73,6 +74,7 @@ after = before return center - before <= line_no <= center + after + def unprotected_entities(schema, strict=False): """returned a set of each non final entity type, excluding "system" entities (eg CWGroup, CWUser...) @@ -83,9 +85,11 @@ protected_entities = yams.schema.BASE_TYPES.union(SYSTEM_ENTITIES) return set(schema.entities()) - protected_entities + class JsonValidator(object): def parse_string(self, data): - return json.loads(data) + return json.loads(data.decode('ascii')) + @contextmanager def real_error_handling(app): @@ -109,10 +113,12 @@ # restore app.error_handler = fake_error_handler + # email handling, to test emails sent by an application ######################## MAILBOX = [] + class Email(object): """you'll get instances of Email into MAILBOX during tests that trigger some notification. @@ -143,13 +149,17 @@ return '' % (','.join(self.recipients), self.message.get('Subject')) + # the trick to get email into MAILBOX instead of actually sent: monkey patch # cwconfig.SMTP object class MockSMTP: + def __init__(self, server, port): pass + def close(self): pass + def sendmail(self, fromaddr, recipients, msg): MAILBOX.append(Email(fromaddr, recipients, msg)) @@ -220,8 +230,6 @@ req = self.requestcls(self._repo.vreg, url=url, headers=headers, method=method, form=kwargs) with self._session.new_cnx() as cnx: - if 'ecache' in cnx.transaction_data: - del cnx.transaction_data['ecache'] req.set_cnx(cnx) yield req @@ -243,7 +251,6 @@ cnx.commit() - # base class for cubicweb tests requiring a full cw environments ############### class CubicWebTC(TestCase): @@ -261,6 +268,7 @@ * `admlogin`, login of the admin user * `admpassword`, password of the admin user * `shell`, create and use shell environment + * `anonymous_allowed`: flag telling if anonymous browsing should be allowed """ appid = 'data' configcls = devtools.ApptestConfiguration @@ -283,7 +291,7 @@ """provide a new RepoAccess object for a given user The access is automatically closed at the end of the test.""" - login = unicode(login) + login = text_type(login) access = RepoAccess(self.repo, login, self.requestcls) self._open_access.add(access) return access @@ -293,14 +301,13 @@ try: self._open_access.pop().close() except BadConnectionId: - continue # already closed + continue # already closed @property def session(self): """return admin session""" return self._admin_session - #XXX this doesn't need to a be classmethod anymore def _init_repo(self): """init the repository and connection to it. """ @@ -310,11 +317,10 @@ db_handler.restore_database(self.test_db_id) self.repo = db_handler.get_repo(startup=True) # get an admin session (without actual login) - login = unicode(db_handler.config.default_admin_config['login']) + login = text_type(db_handler.config.default_admin_config['login']) self.admin_access = self.new_access(login) self._admin_session = self.admin_access._session - # config management ######################################################## @classproperty @@ -323,8 +329,11 @@ Configuration is cached on the test class. """ + if cls is CubicWebTC: + # Prevent direct use of CubicWebTC directly to avoid database + # caching issues + return None try: - assert not cls is CubicWebTC, "Don't use CubicWebTC directly to prevent database caching issue" return cls.__dict__['_config'] except KeyError: home = abspath(join(dirname(sys.modules[cls.__module__].__file__), cls.appid)) @@ -332,7 +341,7 @@ config.mode = 'test' return config - @classmethod # XXX could be turned into a regular method + @classmethod # XXX could be turned into a regular method def init_config(cls, config): """configuration initialization hooks. @@ -345,16 +354,16 @@ been properly bootstrapped. """ admincfg = config.default_admin_config - cls.admlogin = unicode(admincfg['login']) + cls.admlogin = text_type(admincfg['login']) cls.admpassword = admincfg['password'] # uncomment the line below if you want rql queries to be logged - #config.global_set_option('query-log-file', - # '/tmp/test_rql_log.' + `os.getpid()`) + # config.global_set_option('query-log-file', + # '/tmp/test_rql_log.' + `os.getpid()`) config.global_set_option('log-file', None) # set default-dest-addrs to a dumb email address to avoid mailbox or # mail queue pollution config.global_set_option('default-dest-addrs', ['whatever']) - send_to = '%s@logilab.fr' % getlogin() + send_to = '%s@logilab.fr' % getlogin() config.global_set_option('sender-addr', send_to) config.global_set_option('default-dest-addrs', send_to) config.global_set_option('sender-name', 'cubicweb-test') @@ -364,15 +373,13 @@ # web resources try: config.global_set_option('embed-allowed', re.compile('.*')) - except Exception: # not in server only configuration + except Exception: # not in server only configuration pass - config.set_anonymous_allowed(cls.anonymous_allowed) @property def vreg(self): return self.repo.vreg - # global resources accessors ############################################### @property @@ -404,8 +411,9 @@ self.__class__._repo_init_failed = ex raise self.addCleanup(self._close_access) + self.config.set_anonymous_allowed(self.anonymous_allowed) self.setup_database() - MAILBOX[:] = [] # reset mailbox + MAILBOX[:] = [] # reset mailbox def tearDown(self): # XXX hack until logilab.common.testlib is fixed @@ -421,8 +429,10 @@ # monkey patch send mail operation so emails are sent synchronously _old_mail_postcommit_event = SendMailOp.postcommit_event SendMailOp.postcommit_event = SendMailOp.sendmails + def reverse_SendMailOp_monkey_patch(): SendMailOp.postcommit_event = _old_mail_postcommit_event + self.addCleanup(reverse_SendMailOp_monkey_patch) def setup_database(self): @@ -446,31 +456,30 @@ else: return req.user - @iclassmethod # XXX turn into a class method + @iclassmethod # XXX turn into a class method def create_user(self, req, login=None, groups=('users',), password=None, email=None, commit=True, **kwargs): """create and return a new user entity""" if password is None: password = login if login is not None: - login = unicode(login) + login = text_type(login) user = req.create_entity('CWUser', login=login, upassword=password, **kwargs) req.execute('SET X in_group G WHERE X eid %%(x)s, G name IN(%s)' % ','.join(repr(str(g)) for g in groups), {'x': user.eid}) if email is not None: - req.create_entity('EmailAddress', address=unicode(email), + req.create_entity('EmailAddress', address=text_type(email), reverse_primary_email=user) user.cw_clear_relation_cache('in_group', 'subject') if commit: try: - req.commit() # req is a session + req.commit() # req is a session except AttributeError: req.cnx.commit() return user - # other utilities ######################################################### @contextmanager @@ -518,10 +527,10 @@ similar to `orig_permissions.update(partial_perms)`. """ torestore = [] - for erschema, etypeperms in chain(perm_overrides, perm_kwoverrides.iteritems()): - if isinstance(erschema, basestring): + for erschema, etypeperms in chain(perm_overrides, perm_kwoverrides.items()): + if isinstance(erschema, string_types): erschema = self.schema[erschema] - for action, actionperms in etypeperms.iteritems(): + for action, actionperms in etypeperms.items(): origperms = erschema.permissions[action] erschema.set_action_permissions(action, actionperms) torestore.append([erschema, action, origperms]) @@ -549,7 +558,6 @@ self.assertListEqual(sorted(tr.name for tr in transitions), sorted(expected)) - # views and actions registries inspection ################################## def pviews(self, req, rset): @@ -585,6 +593,7 @@ @property def items(self): return self + class fake_box(object): def action_link(self, action, **kwargs): return (action.title, action.url()) @@ -611,7 +620,7 @@ try: view = viewsvreg._select_best(views, req, rset=rset) if view is None: - raise NoSelectableObject((req,), {'rset':rset}, views) + raise NoSelectableObject((req,), {'rset': rset}, views) if view.linkable(): yield view else: @@ -642,7 +651,6 @@ else: not_selected(self.vreg, view) - # web ui testing utilities ################################################# @property @@ -650,8 +658,10 @@ def app(self): """return a cubicweb publisher""" publisher = application.CubicWebPublisher(self.repo, self.config) + def raise_error_handler(*args, **kwargs): raise + publisher.error_handler = raise_error_handler return publisher @@ -703,7 +713,7 @@ for fields that are not tied to the given entity """ assert field_dict or entity_field_dicts, \ - 'field_dict and entity_field_dicts arguments must not be both unspecified' + 'field_dict and entity_field_dicts arguments must not be both unspecified' if field_dict is None: field_dict = {} form = {'__form_id': formid} @@ -711,9 +721,11 @@ for field, value in field_dict.items(): fields.append(field) form[field] = value + def _add_entity_field(entity, field, value): entity_fields.append(field) form[eid_param(field, entity.eid)] = value + for entity, field_dict in entity_field_dicts: if '__maineid' not in form: form['__maineid'] = entity.eid @@ -736,9 +748,9 @@ """ req = self.request(url=url) if isinstance(url, unicode): - url = url.encode(req.encoding) # req.setup_params() expects encoded strings - querystring = urlparse.urlparse(url)[-2] - params = urlparse.parse_qs(querystring) + url = url.encode(req.encoding) # req.setup_params() expects encoded strings + querystring = urlparse(url)[-2] + params = parse_qs(querystring) req.setup_params(params) return req @@ -750,9 +762,9 @@ """ with self.admin_access.web_request(url=url) as req: if isinstance(url, unicode): - url = url.encode(req.encoding) # req.setup_params() expects encoded strings - querystring = urlparse.urlparse(url)[-2] - params = urlparse.parse_qs(querystring) + url = url.encode(req.encoding) # req.setup_params() expects encoded strings + querystring = urlparse(url)[-2] + params = parse_qs(querystring) req.setup_params(params) yield req @@ -791,9 +803,9 @@ path = location params = {} else: - cleanup = lambda p: (p[0], unquote(p[1])) + cleanup = lambda p: (p[0], urlunquote(p[1])) params = dict(cleanup(p.split('=', 1)) for p in params.split('&') if p) - if path.startswith(req.base_url()): # may be relative + if path.startswith(req.base_url()): # may be relative path = path[len(req.base_url()):] return path, params @@ -812,8 +824,8 @@ """call the publish method of the application publisher, expecting to get a Redirect exception """ - result = self.app_handle_request(req, path) - self.assertTrue(300 <= req.status_out <400, req.status_out) + self.app_handle_request(req, path) + self.assertTrue(300 <= req.status_out < 400, req.status_out) location = req.get_response_header('location') return self._parse_location(req, location) @@ -822,7 +834,6 @@ def expect_redirect_publish(self, *args, **kwargs): return self.expect_redirect_handle_request(*args, **kwargs) - def set_auth_mode(self, authmode, anonuser=None): self.set_option('auth-mode', authmode) self.set_option('anonymous-user', anonuser) @@ -871,8 +882,8 @@ # # do not set html validators here, we need HTMLValidator for html # snippets - #'text/html': DTDValidator, - #'application/xhtml+xml': DTDValidator, + # 'text/html': DTDValidator, + # 'application/xhtml+xml': DTDValidator, 'application/xml': htmlparser.XMLValidator, 'text/xml': htmlparser.XMLValidator, 'application/json': JsonValidator, @@ -884,8 +895,7 @@ } # maps vid : validator name (override content_type_validators) vid_validators = dict((vid, htmlparser.VALMAP[valkey]) - for vid, valkey in VIEW_VALIDATORS.iteritems()) - + for vid, valkey in VIEW_VALIDATORS.items()) def view(self, vid, rset=None, req=None, template='main-template', **kwargs): @@ -907,12 +917,15 @@ view = viewsreg.select(vid, req, rset=rset, **kwargs) # set explicit test description if rset is not None: + # coerce to "bytes" on py2 because the description will be sent to + # sys.stdout/stderr which takes "bytes" on py2 and "unicode" on py3 + rql = str(rset.printable_rql()) self.set_description("testing vid=%s defined in %s with (%s)" % ( - vid, view.__module__, rset.printable_rql())) + vid, view.__module__, rql)) else: self.set_description("testing vid=%s defined in %s without rset" % ( vid, view.__module__)) - if template is None: # raw view testing, no template + if template is None: # raw view testing, no template viewfunc = view.render else: kwargs['view'] = view @@ -920,7 +933,6 @@ rset=rset, **kwargs) return self._test_view(viewfunc, view, template, kwargs) - def _test_view(self, viewfunc, view, template='main-template', kwargs={}): """this method does the actual call to the view @@ -940,7 +952,9 @@ msg = '[%s in %s] %s' % (klass, view.__regid__, exc) except Exception: msg = '[%s in %s] undisplayable exception' % (klass, view.__regid__) - raise AssertionError, msg, tcbk + exc = AssertionError(msg) + exc.__traceback__ = tcbk + raise exc return self._check_html(output, view, template) def get_validator(self, view=None, content_type=None, output=None): @@ -953,11 +967,11 @@ if content_type is None: content_type = 'text/html' if content_type in ('text/html', 'application/xhtml+xml') and output: - if output.startswith(''): + if output.startswith(b''): # only check XML well-formness since HTMLValidator isn't html5 # compatible and won't like various other extensions default_validator = htmlparser.XMLSyntaxValidator - elif output.startswith(' used in progress widget, unknown in html dtd output = re.sub('', '', output) - return self.assertWellFormed(validator, output.strip(), context= view.__regid__) + return self.assertWellFormed(validator, output.strip(), context=view.__regid__) def assertWellFormed(self, validator, content, context=None): try: @@ -998,7 +1015,7 @@ str_exc = str(exc) except Exception: str_exc = 'undisplayable exception' - msg += str_exc + msg += str_exc.encode(sys.getdefaultencoding(), 'replace') if content is not None: position = getattr(exc, "position", (0,))[0] if position: @@ -1015,7 +1032,9 @@ for idx, line in enumerate(content) if line_context_filter(idx+1, position)) msg += u'\nfor content:\n%s' % content - raise AssertionError, msg, tcbk + exc = AssertionError(msg) + exc.__traceback__ = tcbk + raise exc def assertDocTestFile(self, testfile): # doctest returns tuple (failure_count, test_count) @@ -1053,6 +1072,7 @@ # XXX cleanup unprotected_entities & all mess + def how_many_dict(schema, cnx, how_many, skip): """given a schema, compute how many entities by type we need to be able to satisfy relations cardinality. @@ -1081,7 +1101,7 @@ # reverse subj and obj in the above explanation relmap.setdefault((rschema, obj), []).append(str(subj)) unprotected = unprotected_entities(schema) - for etype in skip: # XXX (syt) duh? explain or kill + for etype in skip: # XXX (syt) duh? explain or kill unprotected.add(etype) howmanydict = {} # step 1, compute a base number of each entity types: number of already @@ -1096,7 +1116,7 @@ # new num for etype = max(current num, sum(num for possible target etypes)) # # XXX we should first check there is no cycle then propagate changes - for (rschema, etype), targets in relmap.iteritems(): + for (rschema, etype), targets in relmap.items(): relfactor = sum(howmanydict[e] for e in targets) howmanydict[str(etype)] = max(relfactor, howmanydict[etype]) return howmanydict @@ -1127,7 +1147,6 @@ def post_populate(self, cnx): pass - @nocoverage def auto_populate(self, how_many): """this method populates the database with `how_many` entities @@ -1166,8 +1185,8 @@ cnx.execute(rql, args) except ValidationError as ex: # failed to satisfy some constraint - print 'error in automatic db population', ex - cnx.commit_state = None # reset uncommitable flag + print('error in automatic db population', ex) + cnx.commit_state = None # reset uncommitable flag self.post_populate(cnx) def iter_individual_rsets(self, etypes=None, limit=None): @@ -1179,7 +1198,7 @@ else: rql = 'Any X WHERE X is %s' % etype rset = req.execute(rql) - for row in xrange(len(rset)): + for row in range(len(rset)): if limit and row > limit: break # XXX iirk @@ -1202,7 +1221,8 @@ # test a mixed query (DISTINCT/GROUP to avoid getting duplicate # X which make muledit view failing for instance (html validation fails # because of some duplicate "id" attributes) - yield req.execute('DISTINCT Any X, MAX(Y) GROUPBY X WHERE X is %s, Y is %s' % (etype1, etype2)) + yield req.execute('DISTINCT Any X, MAX(Y) GROUPBY X WHERE X is %s, Y is %s' % + (etype1, etype2)) # test some application-specific queries if defined for rql in self.application_rql: yield req.execute(rql) @@ -1225,7 +1245,8 @@ # resultset's syntax tree rset = backup_rset for action in self.list_actions_for(rset): - yield InnerTest(self._testname(rset, action.__regid__, 'action'), self._test_action, action) + yield InnerTest(self._testname(rset, action.__regid__, 'action'), + self._test_action, action) for box in self.list_boxes_for(rset): w = [].append yield InnerTest(self._testname(rset, box.__regid__, 'box'), box.render, w) @@ -1243,28 +1264,28 @@ tags = AutoPopulateTest.tags | Tags('web', 'generated') def setUp(self): - assert not self.__class__ is AutomaticWebTest, 'Please subclass AutomaticWebTest to prevent database caching issue' + if self.__class__ is AutomaticWebTest: + # Prevent direct use of AutomaticWebTest to avoid database caching + # issues. + return super(AutomaticWebTest, self).setUp() # access to self.app for proper initialization of the authentication # machinery (else some views may fail) self.app - ## one each def test_one_each_config(self): self.auto_populate(1) for rset in self.iter_automatic_rsets(limit=1): for testargs in self._test_everything_for(rset): yield testargs - ## ten each def test_ten_each_config(self): self.auto_populate(10) for rset in self.iter_automatic_rsets(limit=10): for testargs in self._test_everything_for(rset): yield testargs - ## startup views def test_startup_views(self): for vid in self.list_startup_views(): with self.admin_access.web_request() as req: @@ -1284,7 +1305,7 @@ # # XXX broken # from cubicweb.devtools.apptest import TestEnvironment # env = testclass._env = TestEnvironment('data', configcls=testclass.configcls) -# for reg in env.vreg.itervalues(): +# for reg in env.vreg.values(): # reg._selected = {} # try: # orig_select_best = reg.__class__.__orig_select_best @@ -1304,10 +1325,10 @@ # def print_untested_objects(testclass, skipregs=('hooks', 'etypes')): -# for regname, reg in testclass._env.vreg.iteritems(): +# for regname, reg in testclass._env.vreg.items(): # if regname in skipregs: # continue -# for appobjects in reg.itervalues(): +# for appobjects in reg.values(): # for appobject in appobjects: # if not reg._selected.get(appobject): # print 'not tested', regname, appobject diff -r a4fcee1e9789 -r 19fcce6dc6d1 devtools/webtest.py --- a/devtools/webtest.py Thu Mar 24 09:43:25 2016 +0100 +++ b/devtools/webtest.py Thu Jun 16 14:19:20 2016 +0200 @@ -7,13 +7,11 @@ class CubicWebTestTC(CubicWebTC): - @classmethod - def init_config(cls, config): - super(CubicWebTestTC, cls).init_config(config) - config.global_set_option('base-url', 'http://localhost.local/') - def setUp(self): super(CubicWebTestTC, self).setUp() + self.config.global_set_option('base-url', 'http://localhost.local/') + # call load_configuration again to let the config reset its datadir_url + self.config.load_configuration() webapp = handler.CubicWebWSGIApplication(self.config) self.webapp = webtest.TestApp(webapp) diff -r a4fcee1e9789 -r 19fcce6dc6d1 doc/book/devrepo/fti.rst --- a/doc/book/devrepo/fti.rst Thu Mar 24 09:43:25 2016 +0100 +++ b/doc/book/devrepo/fti.rst Thu Jun 16 14:19:20 2016 +0200 @@ -94,37 +94,10 @@ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ``db-rebuild-fti`` will call the -:meth:`~cubicweb.entities.AnyEntity.cw_fti_index_rql_queries` class +:meth:`~cubicweb.entities.AnyEntity.cw_fti_index_rql_limit` class method on your entity type. -.. automethod:: cubicweb.entities.AnyEntity.cw_fti_index_rql_queries - -Now, suppose you've got a _huge_ table to index, you probably don't want to -get all entities at once. So here's a simple customized example that will -process block of 10000 entities: - -.. sourcecode:: python - - - class MyEntityClass(AnyEntity): - __regid__ = 'MyEntityClass' - - @classmethod - def cw_fti_index_rql_queries(cls, req): - # get the default RQL method and insert LIMIT / OFFSET instructions - base_rql = super(SearchIndex, cls).cw_fti_index_rql_queries(req)[0] - selected, restrictions = base_rql.split(' WHERE ') - rql_template = '%s ORDERBY X LIMIT %%(limit)s OFFSET %%(offset)s WHERE %s' % ( - selected, restrictions) - # count how many entities you'll have to index - count = req.execute('Any COUNT(X) WHERE X is MyEntityClass')[0][0] - # iterate by blocks of 10000 entities - chunksize = 10000 - for offset in xrange(0, count, chunksize): - print 'SENDING', rql_template % {'limit': chunksize, 'offset': offset} - yield rql_template % {'limit': chunksize, 'offset': offset} - -Since you have access to ``req``, you can more or less fetch whatever you want. +.. automethod:: cubicweb.entities.AnyEntity.cw_fti_index_rql_limit Customizing :meth:`~cubicweb.entities.adapters.IFTIndexableAdapter.get_words` diff -r a4fcee1e9789 -r 19fcce6dc6d1 doc/book/devrepo/testing.rst --- a/doc/book/devrepo/testing.rst Thu Mar 24 09:43:25 2016 +0100 +++ b/doc/book/devrepo/testing.rst Thu Jun 16 14:19:20 2016 +0200 @@ -324,9 +324,9 @@ def test_blog_rss(self): with self.admin_access.web_request() as req: - rset = req.execute('Any B ORDERBY D DESC WHERE B is BlogEntry, ' - 'B created_by U, U login "logilab", B creation_date D') - self.view('rss', rset, req=req) + rset = req.execute('Any B ORDERBY D DESC WHERE B is BlogEntry, ' + 'B created_by U, U login "logilab", B creation_date D') + self.view('rss', rset, req=req) Testing with other cubes diff -r a4fcee1e9789 -r 19fcce6dc6d1 doc/book/devweb/views/table.rst --- a/doc/book/devweb/views/table.rst Thu Mar 24 09:43:25 2016 +0100 +++ b/doc/book/devweb/views/table.rst Thu Jun 16 14:19:20 2016 +0200 @@ -96,8 +96,8 @@ 'resource': MainEntityColRenderer(), 'workpackage': EntityTableColRenderer( header='Workpackage', - renderfunc=worpackage_cell, - sortfunc=worpackage_sortvalue,), + renderfunc=workpackage_cell, + sortfunc=workpackage_sortvalue,), 'in_state': EntityTableColRenderer( renderfunc=lambda w,x: w(x.cw_adapt_to('IWorkflowable').printable_state), sortfunc=lambda x: x.cw_adapt_to('IWorkflowable').printable_state), diff -r a4fcee1e9789 -r 19fcce6dc6d1 doc/changes/3.21.rst --- a/doc/changes/3.21.rst Thu Mar 24 09:43:25 2016 +0100 +++ b/doc/changes/3.21.rst Thu Jun 16 14:19:20 2016 +0200 @@ -1,5 +1,5 @@ -3.21 -==== +3.21 (10 July 2015) +=================== New features ------------ diff -r a4fcee1e9789 -r 19fcce6dc6d1 doc/changes/3.22.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/changes/3.22.rst Thu Jun 16 14:19:20 2016 +0200 @@ -0,0 +1,94 @@ +3.22 (4 January 2016) +===================== + +New features +------------ + +* a huge amount of changes were done towards python 3.x support (as yet + incomplete). This introduces a new dependency on six, to handle + python2/python3 compatibility. + +* new cubicweb.dataimport.massive_store module, a postgresql-specific store + using the COPY statement to accelerate massive data imports. This + functionality was previously part of cubicweb-dataio (there are some API + differences with that previous version, however). + +* cubes custom sql scripts are executed before creating tables. This allows + them to create new types or extensions. + +* the ``ejsonexport`` view can be specialized using the new ``ISerializable`` + entity adapter. By default, it will return an entity's (non-Bytes and + non-Password) attributes plus the special ``cw_etype`` and ``cw_source`` + keys. + +* cubes that define custom final types are now handled by the ``add_cube`` + migration command. + +* synchronization of external sources can be triggered from the web interface + by suitably privileged users with a new ``cw.source-sync`` action. + +User-visible changes +-------------------- + +* the ldapfeed source now depends on the `ldap3` module instead of + `python-ldap`. + +* replies don't get an ``Expires`` header by default. However when they do, + they also get a coherent ``Cache-Control``. + +* data files are regenerated at each request, they are no longer cached by + ``cubicweb.web.PropertySheet``. Requests for data files missing the instance + hash are handled with a redirection instead of a direct reply, to allow + correct cache-related reply headers. + +API changes +----------- + +* ``config.repository()`` creates a new Repository object each time, instead of + returning a cached object. WARNING: this may cause unexpected issues if + several repositories end up being used. + +* migration scripts, as well as other scripts executed by ``cubicweb-ctl + shell``, are loaded with the print_function flag enabled (for backwards + compatibility, if that fails they are re-loaded without that flag) + +* the ``cw_fti_index_rql_queries`` method on entity classes is replaced by + ``cw_fti_index_rql_limit``, a generator which yields ``ResultSet`` objects + containing entities to be indexed. By default, entities are returned 1000 at + a time. + +* ``IDownloadableAdapter`` API is clarified: ``download_url``, + ``download_content_type`` and ``download_file_name`` return unicode objects, + ``download_data`` returns bytes. + +* the ``Repository.extid2eid()`` entry point for external sources is deprecated. + Imports should use one of the stores from the ``cubicweb.dataimport`` package + instead. + +* the ``cubicweb.repoapi.get_repository()`` function's ``uri`` argument should + no longer be used. + +* the generic datafeed xml parser is deprecated in favor of the "store" API + introduced in cubicweb 3.21. + +* the session manager lives in the ``sessions`` registry instead of ``components``. + +* ``TZDatetime`` attributes are returned as timezone-aware python datetime + objects. WARNING: this will break client applications that compare or use + arithmetic involving timezone-naive datetime objects. + +* creation_date and modification_date attributes for all entities are now + timezone-aware (``TZDatetime``) instead of localtime (``Datetime``). More + generally, the ``Datetime`` type should be considered as deprecated. + +Deprecated code drops +--------------------- + +* the ``cubicweb.server.hooksmanager`` module was removed + +* the ``Repository.pinfo()`` method was removed + +* the ``cubicweb.utils.SizeConstrainedList`` class was removed + +* the 'startorder' file in configuration directory is no longer honored + diff -r a4fcee1e9789 -r 19fcce6dc6d1 doc/changes/changelog.rst --- a/doc/changes/changelog.rst Thu Mar 24 09:43:25 2016 +0100 +++ b/doc/changes/changelog.rst Thu Jun 16 14:19:20 2016 +0200 @@ -2,6 +2,7 @@ Changelog history =================== +.. include:: 3.22.rst .. include:: 3.21.rst .. include:: 3.20.rst .. include:: 3.19.rst diff -r a4fcee1e9789 -r 19fcce6dc6d1 doc/changes/index.rst --- a/doc/changes/index.rst Thu Mar 24 09:43:25 2016 +0100 +++ b/doc/changes/index.rst Thu Jun 16 14:19:20 2016 +0200 @@ -4,6 +4,7 @@ .. toctree:: :maxdepth: 1 + 3.22 3.21 3.20 3.19 diff -r a4fcee1e9789 -r 19fcce6dc6d1 doc/dev/features_list.rst --- a/doc/dev/features_list.rst Thu Mar 24 09:43:25 2016 +0100 +++ b/doc/dev/features_list.rst Thu Jun 16 14:19:20 2016 +0200 @@ -2,21 +2,21 @@ CubicWeb features ================= -This page tries to resume features found in the bare cubicweb framework, +This page summarizes features found in the bare cubicweb framework and indicates how mature and documented they are. :code maturity (CM): - 0: experimental, not ready at all for production, may be killed - - 1: draft / unsatisfying, api may change in a near future, much probably in long - term + - 1: draft / unsatisfying, API may change in a near future, and will + certainly change in the long term - - 2: good enough, api sounds good but will probably evolve a bit with more + - 2: good enough, API sounds good but will probably evolve a bit with more hindsight - - 3: mature, backward incompatible changes unexpected (may still evolve though, - of course) + - 3: mature, backward incompatible changes unexpected (may still evolve + though, of course) :documentation level (DL): @@ -25,7 +25,7 @@ - 1: poor documentation - - 2: some valuable documentation but some parts keep uncovered + - 2: some valuable documentation but incomplete coverage - 3: good / complete documentation @@ -33,191 +33,306 @@ Instance configuration and maintainance ======================================= -+====================================================================+====+====+ -| FEATURE | CM | DL | -+====================================================================+====+====+ -| setup - installation | 2 | 3 | -| setup - environment variables | 3 | 2 | -| setup - running modes | 2 | 2 | -| setup - administration tasks | 2 | 2 | -| setup - configuration file | 2 | 1 | -+--------------------------------------------------------------------+----+----+ -| configuration - user / groups handling | 3 | 1 | -| configuration - site configuration | 3 | 1 | -| configuration - distributed configuration | 2 | 1 | -+--------------------------------------------------------------------+----+----+ -| multi-sources - capabilities | NA | 0 | -| multi-sources - configuration | 2 | 0 | -| multi-sources - ldap integration | 2 | 1 | -+--------------------------------------------------------------------+----+----+ -| usage - custom ReST markup | 2 | 0 | -| usage - personal preferences | 2 | 1 | -+--------------------------------------------------------------------+----+----+ +.. table:: + +-------------------------------------------------------------------+----+----+ + | FEATURE | CM | DL | + +===============+===================================================+====+====+ + | setup | installation | 2 | 3 | + | +---------------------------------------------------+----+----+ + | | environment variables | 3 | 2 | + | +---------------------------------------------------+----+----+ + | | running modes | 2 | 2 | + | +---------------------------------------------------+----+----+ + | | administration tasks | 2 | 2 | + | +---------------------------------------------------+----+----+ + | | configuration file | 2 | 1 | + +---------------+---------------------------------------------------+----+----+ + | configuration | user / groups handling | 3 | 1 | + | +---------------------------------------------------+----+----+ + | | site configuration | 3 | 1 | + | +---------------------------------------------------+----+----+ + | | distributed configuration | 2 | 1 | + +---------------+---------------------------------------------------+----+----+ + | multi-sources | capabilities | NA | 0 | + | +---------------------------------------------------+----+----+ + | | configuration | 2 | 0 | + | +---------------------------------------------------+----+----+ + | | ldap integration | 2 | 1 | + +---------------+---------------------------------------------------+----+----+ + | usage | custom ReST markup | 2 | 0 | + | +---------------------------------------------------+----+----+ + | | personal preferences | 2 | 1 | + +---------------+---------------------------------------------------+----+----+ Core development ================ -+====================================================================+====+====+ -| FEATURE | CM | DL | -+====================================================================+====+====+ -| base - concepts | NA | 3 | -| base - security model | NA | 2 | -| base - database initialization | 2 | 1 | -+--------------------------------------------------------------------+----+----+ -| rql - base | 2 | 2 | -| rql - write | 2 | 2 | -| rql - function | 2 | 0 | -| rql - outer joins | 2 | 1 | -| rql - aggregates | 2 | 1 | -| rql - subqueries | 2 | 0 | -+--------------------------------------------------------------------+----+----+ -| schema - base | 2 | 3 | -| schema - constraints | 3 | 2 | -| schema - security | 2 | 2 | -| schema - inheritance | 1 | 1 | -| schema - customization | 1 | 1 | -| schema - introspection | 2 | 1 | -+--------------------------------------------------------------------+----+----+ -| vregistry - appobject | 2 | 2 | -| vregistry - registration | 2 | 2 | -| vregistry - selection | 3 | 2 | -| vregistry - core selectors | 3 | 3 | -| vregistry - custom selectors | 2 | 1 | -| vregistry - debugging selection | 2 | 1 | -+--------------------------------------------------------------------+----+----+ -| entities - interfaces | 2 | ? | -| entities - customization (`dc_`, ...) | 2 | ? | -| entities - app logic | 2 | 2 | -| entities - orm configuration | 2 | 1 | -| entities - pluggable mixins | 1 | 0 | -| entities - workflow | 3 | 2 | -+--------------------------------------------------------------------+----+----+ -| dbapi - connection | 3 | 1 | -| dbapi - data management | 1 | 1 | -| dbapi - result set | 3 | 1 | -| dbapi - transaction, undo | 2 | 0 | -+--------------------------------------------------------------------+----+----+ -| cube - layout | 2 | 3 | -| cube - new cube | 2 | 2 | -+--------------------------------------------------------------------+----+----+ -| migration - context | 2 | 1 | -| migration - commands | 2 | 2 | -+--------------------------------------------------------------------+----+----+ -| testlib - CubicWebTC | 2 | 1 | -| testlib - automatic tests | 2 | 2 | -+--------------------------------------------------------------------+----+----+ -| i18n - mark string | 3 | 2 | -| i18n - customize strings from other cubes / cubicweb | 3 | 1 | -| i18n - update catalog | 3 | 2 | -+--------------------------------------------------------------------+----+----+ -| more - reloading tips | NA | 0 | -| more - site_cubicweb | 2 | ? | -| more - adding options in configuration file | 3 | 0 | -| more - adding options in site configuration / preferences | 3 | ? | -| more - optimizing / profiling | 2 | 1 | -| more - c-c plugins | 3 | 0 | -| more - crypto services | 0 | 0 | -| more - massive import | 2 | 0 | -| more - mime type based conversion | 2 | 0 | -| more - CWCache | 1 | 0 | -+--------------------------------------------------------------------+----+----+ +.. table:: + + +--------------------------------------------------------------------+----+----+ + | FEATURE | CM | DL | + +===========+========================================================+====+====+ + | base | concepts | NA | 3 | + | +--------------------------------------------------------+----+----+ + | | security model | NA | 2 | + | +--------------------------------------------------------+----+----+ + | | database initialization | 2 | 1 | + +-----------+--------------------------------------------------------+----+----+ + | rql | base | 2 | 2 | + | +--------------------------------------------------------+----+----+ + | | write | 2 | 2 | + | +--------------------------------------------------------+----+----+ + | | function | 2 | 0 | + | +--------------------------------------------------------+----+----+ + | | outer joins | 2 | 1 | + | +--------------------------------------------------------+----+----+ + | | aggregates | 2 | 1 | + | +--------------------------------------------------------+----+----+ + | | subqueries | 2 | 0 | + +-----------+--------------------------------------------------------+----+----+ + | schema | base | 2 | 3 | + | +--------------------------------------------------------+----+----+ + | | constraints | 3 | 2 | + | +--------------------------------------------------------+----+----+ + | | security | 2 | 2 | + | +--------------------------------------------------------+----+----+ + | | inheritance | 1 | 1 | + | +--------------------------------------------------------+----+----+ + | | customization | 1 | 1 | + | +--------------------------------------------------------+----+----+ + | | introspection | 2 | 1 | + +-----------+--------------------------------------------------------+----+----+ + | vregistry | appobject | 2 | 2 | + | +--------------------------------------------------------+----+----+ + | | registration | 2 | 2 | + | +--------------------------------------------------------+----+----+ + | | selection | 3 | 2 | + | +--------------------------------------------------------+----+----+ + | | core selectors | 3 | 3 | + | +--------------------------------------------------------+----+----+ + | | custom selectors | 2 | 1 | + | +--------------------------------------------------------+----+----+ + | | debugging selection | 2 | 1 | + +-----------+--------------------------------------------------------+----+----+ + | entities | interfaces | 2 | ? | + | +--------------------------------------------------------+----+----+ + | | customization (`dc_`, ...) | 2 | ? | + | +--------------------------------------------------------+----+----+ + | | app logic | 2 | 2 | + | +--------------------------------------------------------+----+----+ + | | orm configuration | 2 | 1 | + | +--------------------------------------------------------+----+----+ + | | pluggable mixins | 1 | 0 | + | +--------------------------------------------------------+----+----+ + | | workflow | 3 | 2 | + +-----------+--------------------------------------------------------+----+----+ + | dbapi | connection | 3 | 1 | + | +--------------------------------------------------------+----+----+ + | | data management | 1 | 1 | + | +--------------------------------------------------------+----+----+ + | | result set | 3 | 1 | + | +--------------------------------------------------------+----+----+ + | | transaction, undo | 2 | 0 | + +-----------+--------------------------------------------------------+----+----+ + | cube | layout | 2 | 3 | + | +--------------------------------------------------------+----+----+ + | | new cube | 2 | 2 | + +-----------+--------------------------------------------------------+----+----+ + | migration | context | 2 | 1 | + | +--------------------------------------------------------+----+----+ + | | commands | 2 | 2 | + +-----------+--------------------------------------------------------+----+----+ + | testlib | CubicWebTC | 2 | 1 | + | +--------------------------------------------------------+----+----+ + | | automatic tests | 2 | 2 | + +-----------+--------------------------------------------------------+----+----+ + | i18n | mark string | 3 | 2 | + | +--------------------------------------------------------+----+----+ + | | customize strings from other cubes / cubicweb | 3 | 1 | + | +--------------------------------------------------------+----+----+ + | | update catalog | 3 | 2 | + +-----------+--------------------------------------------------------+----+----+ + | more | reloading tips | NA | 0 | + | +--------------------------------------------------------+----+----+ + | | site_cubicweb | 2 | ? | + | +--------------------------------------------------------+----+----+ + | | adding options in configuration file | 3 | 0 | + | +--------------------------------------------------------+----+----+ + | | adding options in site configuration / preferences | 3 | ? | + | +--------------------------------------------------------+----+----+ + | | optimizing / profiling | 2 | 1 | + | +--------------------------------------------------------+----+----+ + | | c-c plugins | 3 | 0 | + | +--------------------------------------------------------+----+----+ + | | crypto services | 0 | 0 | + | +--------------------------------------------------------+----+----+ + | | massive import | 2 | 0 | + | +--------------------------------------------------------+----+----+ + | | mime type based conversion | 2 | 0 | + | +--------------------------------------------------------+----+----+ + | | CWCache | 1 | 0 | + +-----------+--------------------------------------------------------+----+----+ Web UI development ================== -+====================================================================+====+====+ -| FEATURE | CM | DL | -+====================================================================+====+====+ -| base - web request | 2 | 2 | -| base - exceptions | 2 | 0 | -| base - session, authentication | 1 | 0 | -| base - http caching | 2 | 1 | -| base - external resources | 2 | 2 | -| base - static files | 2 | ? | -| base - data sharing | 2 | 2 | -| base - graphical chart customization | 1 | 1 | -+--------------------------------------------------------------------+----+----+ -| publishing - cycle | 2 | 2 | -| publishing - error handling | 2 | 1 | -| publishing - transactions | NA | ? | -+--------------------------------------------------------------------+----+----+ -| controller - base | 2 | 2 | -| controller - view | 2 | 1 | -| controller - edit | 2 | 1 | -| controller - json | 2 | 1 | -+--------------------------------------------------------------------+----+----+ -| views - base | 2 | 2 | -| views - templates | 2 | 2 | -| views - boxes | 2 | 1 | -| views - components | 2 | 1 | -| views - primary | 2 | 1 | -| views - tabs | 2 | 1 | -| views - xml | 2 | 0 | -| views - text | 2 | 1 | -| views - table | 2 | 1 | -| views - plot | 2 | 0 | -| views - navigation | 2 | 0 | -| views - calendar, timeline | 2 | 0 | -| views - index | 2 | 2 | -| views - breadcrumbs | 2 | 1 | -| views - actions | 2 | 1 | -| views - debugging | 2 | 1 | -+--------------------------------------------------------------------+----+----+ -| form - base | 2 | 1 | -| form - fields | 2 | 1 | -| form - widgets | 2 | 1 | -| form - captcha | 2 | 0 | -| form - renderers | 2 | 0 | -| form - validation error handling | 2 | 0 | -| form - autoform | 2 | 2 | -| form - reledit | 2 | 0 | -+--------------------------------------------------------------------+----+----+ -| facets - base | 2 | ? | -| facets - configuration | 2 | 1 | -| facets - custom facets | 2 | 0 | -+--------------------------------------------------------------------+----+----+ -| css - base | 1 | 1 | -| css - customization | 1 | 1 | -+--------------------------------------------------------------------+----+----+ -| js - base | 1 | 1 | -| js - jquery | 1 | 1 | -| js - base functions | 1 | 0 | -| js - ajax | 1 | 0 | -| js - widgets | 1 | 1 | -+--------------------------------------------------------------------+----+----+ -| other - page template | 0 | 0 | -| other - inline doc (wdoc) | 2 | 0 | -| other - magic search | 2 | 0 | -| other - url mapping | 1 | 1 | -| other - apache style url rewrite | 1 | 1 | -| other - sparql | 1 | 0 | -| other - bookmarks | 2 | 1 | -+--------------------------------------------------------------------+----+----+ +.. table:: + + +--------------------------------------------------------------------+----+----+ + | FEATURE | CM | DL | + +============+=======================================================+====+====+ + | base | web request | 2 | 2 | + | +-------------------------------------------------------+----+----+ + | | exceptions | 2 | 0 | + | +-------------------------------------------------------+----+----+ + | | session, authentication | 1 | 0 | + | +-------------------------------------------------------+----+----+ + | | http caching | 2 | 1 | + | +-------------------------------------------------------+----+----+ + | | external resources | 2 | 2 | + | +-------------------------------------------------------+----+----+ + | | static files | 2 | ? | + | +-------------------------------------------------------+----+----+ + | | data sharing | 2 | 2 | + | +-------------------------------------------------------+----+----+ + | | graphical chart customization | 1 | 1 | + +------------+-------------------------------------------------------+----+----+ + | publishing | cycle | 2 | 2 | + | +-------------------------------------------------------+----+----+ + | | error handling | 2 | 1 | + | +-------------------------------------------------------+----+----+ + | | transactions | NA | ? | + +------------+-------------------------------------------------------+----+----+ + | controller | base | 2 | 2 | + | +-------------------------------------------------------+----+----+ + | | view | 2 | 1 | + | +-------------------------------------------------------+----+----+ + | | edit | 2 | 1 | + | +-------------------------------------------------------+----+----+ + | | json | 2 | 1 | + +------------+-------------------------------------------------------+----+----+ + | views | base | 2 | 2 | + | +-------------------------------------------------------+----+----+ + | | templates | 2 | 2 | + | +-------------------------------------------------------+----+----+ + | | boxes | 2 | 1 | + | +-------------------------------------------------------+----+----+ + | | components | 2 | 1 | + | +-------------------------------------------------------+----+----+ + | | primary | 2 | 1 | + | +-------------------------------------------------------+----+----+ + | | tabs | 2 | 1 | + | +-------------------------------------------------------+----+----+ + | | xml | 2 | 0 | + | +-------------------------------------------------------+----+----+ + | | text | 2 | 1 | + | +-------------------------------------------------------+----+----+ + | | table | 2 | 1 | + | +-------------------------------------------------------+----+----+ + | | plot | 2 | 0 | + | +-------------------------------------------------------+----+----+ + | | navigation | 2 | 0 | + | +-------------------------------------------------------+----+----+ + | | calendar, timeline | 2 | 0 | + | +-------------------------------------------------------+----+----+ + | | index | 2 | 2 | + | +-------------------------------------------------------+----+----+ + | | breadcrumbs | 2 | 1 | + | +-------------------------------------------------------+----+----+ + | | actions | 2 | 1 | + | +-------------------------------------------------------+----+----+ + | | debugging | 2 | 1 | + +------------+-------------------------------------------------------+----+----+ + | form | base | 2 | 1 | + | +-------------------------------------------------------+----+----+ + | | fields | 2 | 1 | + | +-------------------------------------------------------+----+----+ + | | widgets | 2 | 1 | + | +-------------------------------------------------------+----+----+ + | | captcha | 2 | 0 | + | +-------------------------------------------------------+----+----+ + | | renderers | 2 | 0 | + | +-------------------------------------------------------+----+----+ + | | validation error handling | 2 | 0 | + | +-------------------------------------------------------+----+----+ + | | autoform | 2 | 2 | + | +-------------------------------------------------------+----+----+ + | | reledit | 2 | 0 | + +------------+-------------------------------------------------------+----+----+ + | facets | base | 2 | ? | + | +-------------------------------------------------------+----+----+ + | | configuration | 2 | 1 | + | +-------------------------------------------------------+----+----+ + | | custom facets | 2 | 0 | + +------------+-------------------------------------------------------+----+----+ + | css | base | 1 | 1 | + | +-------------------------------------------------------+----+----+ + | | customization | 1 | 1 | + +------------+-------------------------------------------------------+----+----+ + | js | base | 1 | 1 | + | +-------------------------------------------------------+----+----+ + | | jquery | 1 | 1 | + | +-------------------------------------------------------+----+----+ + | | base functions | 1 | 0 | + | +-------------------------------------------------------+----+----+ + | | widgets | 1 | 1 | + | +-------------------------------------------------------+----+----+ + | | ajax | 1 | 0 | + | +-------------------------------------------------------+----+----+ + | | widgets | 1 | 1 | + +------------+-------------------------------------------------------+----+----+ + | other | page template | 0 | 0 | + | +-------------------------------------------------------+----+----+ + | | inline doc (wdoc) | 2 | 0 | + | +-------------------------------------------------------+----+----+ + | | magic search | 2 | 0 | + | +-------------------------------------------------------+----+----+ + | | url mapping | 1 | 1 | + | +-------------------------------------------------------+----+----+ + | | apache style url rewrite | 1 | 1 | + | +-------------------------------------------------------+----+----+ + | | sparql | 1 | 0 | + | +-------------------------------------------------------+----+----+ + | | bookmarks | 2 | 1 | + +------------+-------------------------------------------------------+----+----+ Repository development ====================== -+====================================================================+====+====+ -| FEATURE | CM | DL | -+====================================================================+====+====+ -| base - session | 2 | 2 | -| base - more security control | 2 | 0 | -| base - debugging | 2 | 0 | -+--------------------------------------------------------------------+----+----+ -| hooks - development | 2 | 2 | -| hooks - abstract hooks | 2 | 0 | -| hooks - core hooks | 2 | 0 | -| hooks - control | 2 | 0 | -| hooks - operation | 2 | 2 | -+--------------------------------------------------------------------+----+----+ -| notification - sending email | 2 | ? | -| notification - base views | 1 | ? | -| notification - supervisions | 1 | 0 | -+--------------------------------------------------------------------+----+----+ -| source - storages | 2 | 0 | -| source - authentication plugins | 2 | 0 | -| source - custom sources | 2 | 0 | -+--------------------------------------------------------------------+----+----+ +.. table:: + + +--------------------------------------------------------------------+----+----+ + | FEATURE | CM | DL | + +==============+=====================================================+====+====+ + | base | session | 2 | 2 | + | +-----------------------------------------------------+----+----+ + | | more security control | 2 | 0 | + | +-----------------------------------------------------+----+----+ + | | debugging | 2 | 0 | + +--------------+-----------------------------------------------------+----+----+ + | hooks | development | 2 | 2 | + | +-----------------------------------------------------+----+----+ + | | abstract hooks | 2 | 0 | + | +-----------------------------------------------------+----+----+ + | | core hooks | 2 | 0 | + | +-----------------------------------------------------+----+----+ + | | control | 2 | 0 | + | +-----------------------------------------------------+----+----+ + | | operation | 2 | 2 | + +--------------+-----------------------------------------------------+----+----+ + | notification | sending email | 2 | ? | + | +-----------------------------------------------------+----+----+ + | | base views | 1 | ? | + | +-----------------------------------------------------+----+----+ + | | supervisions | 1 | 0 | + +--------------+-----------------------------------------------------+----+----+ + | source | storages | 2 | 0 | + | +-----------------------------------------------------+----+----+ + | | authentication plugins | 2 | 0 | + | +-----------------------------------------------------+----+----+ + | | custom sources | 2 | 0 | + +--------------+-----------------------------------------------------+----+----+ + diff -r a4fcee1e9789 -r 19fcce6dc6d1 doc/tools/mode_plan.py --- a/doc/tools/mode_plan.py Thu Mar 24 09:43:25 2016 +0100 +++ b/doc/tools/mode_plan.py Thu Jun 16 14:19:20 2016 +0200 @@ -23,17 +23,19 @@ rename A010-joe.en.txt to A030-joe.en.txt accept [y/N]? """ +from __future__ import print_function + def ren(a,b): names = glob.glob('%s*'%a) for name in names : - print 'rename %s to %s' % (name, name.replace(a,b)) + print('rename %s to %s' % (name, name.replace(a,b))) if raw_input('accept [y/N]?').lower() =='y': for name in names: os.system('hg mv %s %s' % (name, name.replace(a,b))) -def ls(): print '\n'.join(sorted(os.listdir('.'))) +def ls(): print('\n'.join(sorted(os.listdir('.')))) def move(): filenames = [] @@ -47,4 +49,4 @@ for num, name in filenames: if num >= start: - print 'hg mv %s %2i%s' %(name,num+1,name[2:]) + print('hg mv %s %2i%s' %(name,num+1,name[2:])) diff -r a4fcee1e9789 -r 19fcce6dc6d1 doc/tools/pyjsrest.py --- a/doc/tools/pyjsrest.py Thu Mar 24 09:43:25 2016 +0100 +++ b/doc/tools/pyjsrest.py Thu Jun 16 14:19:20 2016 +0200 @@ -152,7 +152,6 @@ 'cubicweb.fckcwconfig.js', 'cubicweb.fckcwconfig-full.js', - 'cubicweb.goa.js', 'cubicweb.compat.js', ]) diff -r a4fcee1e9789 -r 19fcce6dc6d1 doc/tutorials/base/customizing-the-application.rst --- a/doc/tutorials/base/customizing-the-application.rst Thu Mar 24 09:43:25 2016 +0100 +++ b/doc/tutorials/base/customizing-the-application.rst Thu Jun 16 14:19:20 2016 +0200 @@ -422,7 +422,7 @@ entity = self.cw_rset.get_entity(row, col) self.w(u'

Welcome to the "%s" community

' % entity.printable_value('name')) if entity.display_cw_logo(): - self.w(u'') + self.w(u'') if entity.description: self.w(u'

%s

' % entity.printable_value('description')) @@ -522,7 +522,7 @@ def render_entity_attributes(self, entity): if entity.display_cw_logo(): - self.w(u'') + self.w(u'') if entity.description: self.w(u'

%s

' % entity.printable_value('description')) diff -r a4fcee1e9789 -r 19fcce6dc6d1 doc/tutorials/base/discovering-the-ui.rst --- a/doc/tutorials/base/discovering-the-ui.rst Thu Mar 24 09:43:25 2016 +0100 +++ b/doc/tutorials/base/discovering-the-ui.rst Thu Jun 16 14:19:20 2016 +0200 @@ -101,16 +101,16 @@ You can achieve the same thing by following the same path as we did for the blog creation, e.g. by clicking on the `[+]` at the left of the 'Blog entry' link on -the index page. The diffidence being that since there is no context information, +the index page. The difference being that since there is no context information, the 'blog entry of' selector won't be preset to the blog. If you click on the 'modify' link of the action box, you are back to the form to edit the entity you just created, except that the form now has another section with a combo-box entitled 'add relation'. It -provisos a generic way to edit relations which don't appears in the +provides a generic way to edit relations which don't appears in the above form. Choose the relation you want to add and a second combo box -appears where you can pick existing entities. If there are too many +appears where you can pick existing entities. If there are too many of them, you will be offered to navigate to the target entity, that is go away from the form and go back to it later, once you've selected the entity you want to link with. diff -r a4fcee1e9789 -r 19fcce6dc6d1 doc/tutorials/dataimport/diseasome_import.py --- a/doc/tutorials/dataimport/diseasome_import.py Thu Mar 24 09:43:25 2016 +0100 +++ b/doc/tutorials/dataimport/diseasome_import.py Thu Jun 16 14:19:20 2016 +0200 @@ -95,7 +95,7 @@ # Perform a first commit, of the entities store.flush() kwargs = {} - for uri, relations in all_relations.iteritems(): + for uri, relations in all_relations.items(): from_eid = uri_to_eid.get(uri) # ``subjtype`` should be initialized if ``SQLGenObjectStore`` is used # and there are inlined relations in the schema. @@ -108,7 +108,7 @@ kwargs['subjtype'] = uri_to_etype.get(uri) if not from_eid: continue - for rtype, rels in relations.iteritems(): + for rtype, rels in relations.items(): if rtype in ('classes', 'possible_drugs', 'omim', 'omim_page', 'chromosomal_location', 'same_as', 'gene_id', 'hgnc_id', 'hgnc_page'): diff -r a4fcee1e9789 -r 19fcce6dc6d1 doc/tutorials/dataimport/diseasome_parser.py --- a/doc/tutorials/dataimport/diseasome_parser.py Thu Mar 24 09:43:25 2016 +0100 +++ b/doc/tutorials/dataimport/diseasome_parser.py Thu Jun 16 14:19:20 2016 +0200 @@ -97,4 +97,4 @@ entities[subj]['relations'].setdefault(MAPPING_RELS[rel], set()) entities[subj]['relations'][MAPPING_RELS[rel]].add(unicode(obj)) return ((ent.get('attributes'), ent.get('relations')) - for ent in entities.itervalues()) + for ent in entities.values()) diff -r a4fcee1e9789 -r 19fcce6dc6d1 entities/__init__.py --- a/entities/__init__.py Thu Mar 24 09:43:25 2016 +0100 +++ b/entities/__init__.py Thu Jun 16 14:19:20 2016 +0200 @@ -19,8 +19,12 @@ __docformat__ = "restructuredtext en" +from warnings import warn + +from six import text_type, string_types from logilab.common.decorators import classproperty +from logilab.common.deprecation import deprecated from cubicweb import Unauthorized from cubicweb.entity import Entity @@ -35,7 +39,7 @@ @classproperty def cw_etype(cls): """entity type as a unicode string""" - return unicode(cls.__regid__) + return text_type(cls.__regid__) @classmethod def cw_create_url(cls, req, **kwargs): @@ -43,6 +47,7 @@ return req.build_url('add/%s' % cls.__regid__, **kwargs) @classmethod + @deprecated('[3.22] use cw_fti_index_rql_limit instead') def cw_fti_index_rql_queries(cls, req): """return the list of rql queries to fetch entities to FT-index @@ -60,6 +65,37 @@ return ['Any %s WHERE %s' % (', '.join(selected), ', '.join(restrictions))] + @classmethod + def cw_fti_index_rql_limit(cls, req, limit=1000): + """generate rsets of entities to FT-index + + By default, each successive result set is limited to 1000 entities + """ + if cls.cw_fti_index_rql_queries.__func__ != AnyEntity.cw_fti_index_rql_queries.__func__: + warn("[3.22] cw_fti_index_rql_queries is replaced by cw_fti_index_rql_limit", + DeprecationWarning) + for rql in cls.cw_fti_index_rql_queries(req): + yield req.execute(rql) + return + restrictions = ['X is %s' % cls.__regid__] + selected = ['X'] + start = 0 + for attrschema in sorted(cls.e_schema.indexable_attributes()): + varname = attrschema.type.upper() + restrictions.append('X %s %s' % (attrschema, varname)) + selected.append(varname) + while True: + q_restrictions = restrictions + ['X eid > %s' % start] + rset = req.execute('Any %s ORDERBY X LIMIT %s WHERE %s' % + (', '.join(selected), + limit, + ', '.join(q_restrictions))) + if rset: + start = rset[-1][0] + yield rset + else: + break + # meta data api ########################################################### def dc_title(self): @@ -134,7 +170,7 @@ return self.dc_title().lower() value = self.cw_attr_value(rtype) # do not restrict to `unicode` because Bytes will return a `str` value - if isinstance(value, basestring): + if isinstance(value, string_types): return self.printable_value(rtype, format='text/plain').lower() return value diff -r a4fcee1e9789 -r 19fcce6dc6d1 entities/adapters.py --- a/entities/adapters.py Thu Mar 24 09:43:25 2016 +0100 +++ b/entities/adapters.py Thu Jun 16 14:19:20 2016 +0200 @@ -1,4 +1,4 @@ -# copyright 2010-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2010-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -18,19 +18,15 @@ """some basic entity adapter implementations, for interfaces used in the framework itself. """ - -__docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from itertools import chain -from warnings import warn from hashlib import md5 from logilab.mtconverter import TransformError from logilab.common.decorators import cached -from cubicweb import ValidationError, view, ViolatedConstraint -from cubicweb.schema import CONSTRAINTS +from cubicweb import ValidationError, view, ViolatedConstraint, UniqueTogetherError from cubicweb.predicates import is_instance, relation_possible, match_exception @@ -63,8 +59,8 @@ NOTE: the dictionary keys should match the list returned by the `allowed_massmail_keys` method. """ - return dict( (attr, getattr(self.entity, attr)) - for attr in self.allowed_massmail_keys() ) + return dict((attr, getattr(self.entity, attr)) + for attr in self.allowed_massmail_keys()) class INotifiableAdapter(view.EntityAdapter): @@ -156,40 +152,46 @@ if role == 'subject': for entity_ in getattr(entity, rschema.type): merge_weight_dict(words, entity_.cw_adapt_to('IFTIndexable').get_words()) - else: # if role == 'object': + else: # if role == 'object': for entity_ in getattr(entity, 'reverse_%s' % rschema.type): merge_weight_dict(words, entity_.cw_adapt_to('IFTIndexable').get_words()) return words + def merge_weight_dict(maindict, newdict): - for weight, words in newdict.iteritems(): + for weight, words in newdict.items(): maindict.setdefault(weight, []).extend(words) + class IDownloadableAdapter(view.EntityAdapter): """interface for downloadable entities""" __regid__ = 'IDownloadable' __abstract__ = True - def download_url(self, **kwargs): # XXX not really part of this interface - """return a URL to download entity's content""" + def download_url(self, **kwargs): # XXX not really part of this interface + """return a URL to download entity's content + + It should be a unicode object containing url-encoded ASCII. + """ raise NotImplementedError def download_content_type(self): - """return MIME type of the downloadable content""" + """return MIME type (unicode) of the downloadable content""" raise NotImplementedError def download_encoding(self): - """return encoding of the downloadable content""" + """return encoding (unicode) of the downloadable content""" raise NotImplementedError def download_file_name(self): - """return file name of the downloadable content""" + """return file name (unicode) of the downloadable content""" raise NotImplementedError def download_data(self): - """return actual data of the downloadable content""" + """return actual data (bytes) of the downloadable content""" raise NotImplementedError + # XXX should propose to use two different relations for children/parent class ITreeAdapter(view.EntityAdapter): """This adapter provides a tree interface. @@ -337,7 +339,7 @@ try: # check we are not jumping to another tree if (adapter.tree_relation != self.tree_relation or - adapter.child_role != self.child_role): + adapter.child_role != self.child_role): break entity = adapter.parent() adapter = entity.cw_adapt_to('ITree') @@ -347,9 +349,35 @@ return path +class ISerializableAdapter(view.EntityAdapter): + """Adapter to serialize an entity to a bare python structure that may be + directly serialized to e.g. JSON. + """ + + __regid__ = 'ISerializable' + __select__ = is_instance('Any') + + def serialize(self): + entity = self.entity + entity.complete() + data = { + 'cw_etype': entity.cw_etype, + 'cw_source': entity.cw_metainformation()['source']['uri'], + 'eid': entity.eid, + } + for rschema, __ in entity.e_schema.attribute_definitions(): + attr = rschema.type + try: + value = entity.cw_attr_cache[attr] + except KeyError: + # Bytes + continue + data[attr] = value + return data + + # error handling adapters ###################################################### -from cubicweb import UniqueTogetherError class IUserFriendlyError(view.EntityAdapter): __regid__ = 'IUserFriendlyError' @@ -380,13 +408,14 @@ __select__ = match_exception(ViolatedConstraint) def raise_user_exception(self): - _ = self._cw._ cstrname = self.exc.cstrname eschema = self.entity.e_schema for rschema, attrschema in eschema.attribute_definitions(): rdef = rschema.rdef(eschema, attrschema) for constraint in rdef.constraints: - if cstrname == 'cstr' + md5(eschema.type + rschema.type + constraint.type() + (constraint.serialize() or '')).hexdigest(): + if cstrname == 'cstr' + md5( + (eschema.type + rschema.type + constraint.type() + + (constraint.serialize() or '')).encode('ascii')).hexdigest(): break else: continue @@ -394,5 +423,9 @@ else: assert 0 key = rschema.type + '-subject' - msg, args = constraint.failed_message(key, self.entity.cw_edited[rschema.type]) + # use .get since a constraint may be associated to an attribute that isn't edited (e.g. + # constraint between two attributes). This should be the purpose of an api rework at some + # point, we currently rely on the fact that such constraint will provide a dedicated user + # message not relying on the `value` argument + msg, args = constraint.failed_message(key, self.entity.cw_edited.get(rschema.type)) raise ValidationError(self.entity.eid, {key: msg}, args) diff -r a4fcee1e9789 -r 19fcce6dc6d1 entities/authobjs.py --- a/entities/authobjs.py Thu Mar 24 09:43:25 2016 +0100 +++ b/entities/authobjs.py Thu Jun 16 14:19:20 2016 +0200 @@ -19,6 +19,8 @@ __docformat__ = "restructuredtext en" +from six import string_types + from logilab.common.decorators import cached from cubicweb import Unauthorized @@ -126,7 +128,7 @@ :type groups: str or iterable(str) :param groups: a group name or an iterable on group names """ - if isinstance(groups, basestring): + if isinstance(groups, string_types): groups = frozenset((groups,)) elif isinstance(groups, (tuple, list)): groups = frozenset(groups) diff -r a4fcee1e9789 -r 19fcce6dc6d1 entities/lib.py --- a/entities/lib.py Thu Mar 24 09:43:25 2016 +0100 +++ b/entities/lib.py Thu Jun 16 14:19:20 2016 +0200 @@ -19,9 +19,10 @@ __docformat__ = "restructuredtext en" from warnings import warn +from datetime import datetime -from urlparse import urlsplit, urlunsplit -from datetime import datetime +from six.moves import range +from six.moves.urllib.parse import urlsplit, urlunsplit from logilab.mtconverter import xml_escape @@ -67,7 +68,7 @@ {'y': self.eid}) if skipeids is None: skipeids = set() - for i in xrange(len(rset)): + for i in range(len(rset)): eid = rset[i][0] if eid in skipeids: continue @@ -146,4 +147,3 @@ if date: return date > self.timestamp return False - diff -r a4fcee1e9789 -r 19fcce6dc6d1 entities/sources.py --- a/entities/sources.py Thu Mar 24 09:43:25 2016 +0100 +++ b/entities/sources.py Thu Jun 16 14:19:20 2016 +0200 @@ -42,7 +42,7 @@ cfg.update(config) options = SOURCE_TYPES[self.type].options sconfig = SourceConfiguration(self._cw.vreg.config, options=options) - for opt, val in cfg.iteritems(): + for opt, val in cfg.items(): try: sconfig.set_option(opt, val) except OptionError: diff -r a4fcee1e9789 -r 19fcce6dc6d1 entities/test/unittest_base.py --- a/entities/test/unittest_base.py Thu Mar 24 09:43:25 2016 +0100 +++ b/entities/test/unittest_base.py Thu Jun 16 14:19:20 2016 +0200 @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -21,6 +21,7 @@ from logilab.common.testlib import unittest_main from logilab.common.decorators import clear_cache +from logilab.common.registry import yes from cubicweb.devtools.testlib import CubicWebTC @@ -45,13 +46,13 @@ self.assertEqual(entity.dc_creator(), u'member') def test_type(self): - #dc_type may be translated + # dc_type may be translated with self.admin_access.client_cnx() as cnx: member = cnx.entity_from_eid(self.membereid) self.assertEqual(member.dc_type(), 'CWUser') def test_cw_etype(self): - #cw_etype is never translated + # cw_etype is never translated with self.admin_access.client_cnx() as cnx: member = cnx.entity_from_eid(self.membereid) self.assertEqual(member.cw_etype, 'CWUser') @@ -60,16 +61,37 @@ # XXX move to yams self.assertEqual(self.schema['CWUser'].meta_attributes(), {}) self.assertEqual(dict((str(k), v) - for k, v in self.schema['State'].meta_attributes().iteritems()), - {'description_format': ('format', 'description')}) + for k, v in self.schema['State'].meta_attributes().items()), + {'description_format': ('format', 'description')}) def test_fti_rql_method(self): + class EmailAddress(AnyEntity): + __regid__ = 'EmailAddress' + __select__ = AnyEntity.__select__ & yes(2) + + @classmethod + def cw_fti_index_rql_queries(cls, req): + return ['EmailAddress Y'] + with self.admin_access.web_request() as req: + req.create_entity('EmailAddress', address=u'foo@bar.com') eclass = self.vreg['etypes'].etype_class('EmailAddress') + # deprecated self.assertEqual(['Any X, ADDRESS, ALIAS WHERE X is EmailAddress, ' 'X address ADDRESS, X alias ALIAS'], eclass.cw_fti_index_rql_queries(req)) + self.assertEqual(['Any X, ADDRESS, ALIAS ORDERBY X LIMIT 1000 WHERE X is EmailAddress, ' + 'X address ADDRESS, X alias ALIAS, X eid > 0'], + [rset.rql for rset in eclass.cw_fti_index_rql_limit(req)]) + + # test backwards compatibility with custom method + with self.temporary_appobjects(EmailAddress): + self.vreg['etypes'].clear_caches() + eclass = self.vreg['etypes'].etype_class('EmailAddress') + self.assertEqual(['EmailAddress Y'], + [rset.rql for rset in eclass.cw_fti_index_rql_limit(req)]) + class EmailAddressTC(BaseEntityTC): @@ -87,14 +109,16 @@ self.assertEqual(email3.prefered.eid, email3.eid) def test_mangling(self): + query = 'INSERT EmailAddress X: X address "maarten.ter.huurne@philips.com"' with self.admin_access.repo_cnx() as cnx: - email = cnx.execute('INSERT EmailAddress X: X address "maarten.ter.huurne@philips.com"').get_entity(0, 0) + email = cnx.execute(query).get_entity(0, 0) self.assertEqual(email.display_address(), 'maarten.ter.huurne@philips.com') self.assertEqual(email.printable_value('address'), 'maarten.ter.huurne@philips.com') self.vreg.config.global_set_option('mangle-emails', True) try: self.assertEqual(email.display_address(), 'maarten.ter.huurne at philips dot com') - self.assertEqual(email.printable_value('address'), 'maarten.ter.huurne at philips dot com') + self.assertEqual(email.printable_value('address'), + 'maarten.ter.huurne at philips dot com') email = cnx.execute('INSERT EmailAddress X: X address "syt"').get_entity(0, 0) self.assertEqual(email.display_address(), 'syt') self.assertEqual(email.printable_value('address'), 'syt') @@ -110,6 +134,7 @@ self.assertEqual(email.printable_value('address', format='text/plain'), 'maarten&ter@philips.com') + class CWUserTC(BaseEntityTC): def test_complete(self): @@ -147,10 +172,11 @@ with self.admin_access.repo_cnx() as cnx: e = cnx.execute('CWUser U WHERE U login "member"').get_entity(0, 0) # Bytes/Password attributes should be omitted - self.assertEqual(e.cw_adapt_to('IEmailable').allowed_massmail_keys(), - set(('surname', 'firstname', 'login', 'last_login_time', - 'creation_date', 'modification_date', 'cwuri', 'eid')) - ) + self.assertEqual( + e.cw_adapt_to('IEmailable').allowed_massmail_keys(), + set(('surname', 'firstname', 'login', 'last_login_time', + 'creation_date', 'modification_date', 'cwuri', 'eid')) + ) def test_cw_instantiate_object_relation(self): """ a weird non regression test """ @@ -193,7 +219,7 @@ # no specific class for Subdivisions, the default one should be selected eclass = self.select_eclass('SubDivision') self.assertTrue(eclass.__autogenerated__) - #self.assertEqual(eclass.__bases__, (AnyEntity,)) + # self.assertEqual(eclass.__bases__, (AnyEntity,)) # build class from most generic to most specific and make # sure the most specific is always selected self.vreg._loadedmods[__name__] = {} @@ -212,5 +238,25 @@ eclass = self.select_eclass('Division') self.assertEqual(eclass.cw_etype, 'Division') + +class ISerializableTC(CubicWebTC): + + def test_serialization(self): + with self.admin_access.repo_cnx() as cnx: + entity = cnx.create_entity('CWGroup', name=u'tmp') + cnx.commit() + serializer = entity.cw_adapt_to('ISerializable') + expected = { + 'cw_etype': u'CWGroup', + 'cw_source': 'system', + 'eid': entity.eid, + 'cwuri': u'http://testing.fr/cubicweb/%s' % entity.eid, + 'creation_date': entity.creation_date, + 'modification_date': entity.modification_date, + 'name': u'tmp', + } + self.assertEqual(serializer.serialize(), expected) + + if __name__ == '__main__': unittest_main() diff -r a4fcee1e9789 -r 19fcce6dc6d1 entities/test/unittest_wfobjs.py --- a/entities/test/unittest_wfobjs.py Thu Mar 24 09:43:25 2016 +0100 +++ b/entities/test/unittest_wfobjs.py Thu Jun 16 14:19:20 2016 +0200 @@ -107,7 +107,7 @@ def setup_database(self): rschema = self.schema['in_state'] - for rdef in rschema.rdefs.itervalues(): + for rdef in rschema.rdefs.values(): self.assertEqual(rdef.cardinality, '1*') with self.admin_access.client_cnx() as cnx: self.member_eid = self.create_user(cnx, 'member').eid diff -r a4fcee1e9789 -r 19fcce6dc6d1 entities/wfobjs.py --- a/entities/wfobjs.py Thu Mar 24 09:43:25 2016 +0100 +++ b/entities/wfobjs.py Thu Jun 16 14:19:20 2016 +0200 @@ -21,9 +21,11 @@ * workflow history (TrInfo) * adapter for workflowable entities (IWorkflowableAdapter) """ +from __future__ import print_function __docformat__ = "restructuredtext en" +from six import text_type, string_types from logilab.common.decorators import cached, clear_cache from logilab.common.deprecation import deprecated @@ -97,7 +99,7 @@ def transition_by_name(self, trname): rset = self._cw.execute('Any T, TN WHERE T name TN, T name %(n)s, ' 'T transition_of WF, WF eid %(wf)s', - {'n': unicode(trname), 'wf': self.eid}) + {'n': text_type(trname), 'wf': self.eid}) if rset: return rset.get_entity(0, 0) return None @@ -114,7 +116,7 @@ def add_state(self, name, initial=False, **kwargs): """add a state to this workflow""" - state = self._cw.create_entity('State', name=unicode(name), **kwargs) + state = self._cw.create_entity('State', name=text_type(name), **kwargs) self._cw.execute('SET S state_of WF WHERE S eid %(s)s, WF eid %(wf)s', {'s': state.eid, 'wf': self.eid}) if initial: @@ -126,7 +128,7 @@ def _add_transition(self, trtype, name, fromstates, requiredgroups=(), conditions=(), **kwargs): - tr = self._cw.create_entity(trtype, name=unicode(name), **kwargs) + tr = self._cw.create_entity(trtype, name=text_type(name), **kwargs) self._cw.execute('SET T transition_of WF ' 'WHERE T eid %(t)s, WF eid %(wf)s', {'t': tr.eid, 'wf': self.eid}) @@ -224,19 +226,19 @@ matches = user.matching_groups(groups) if matches: if DBG: - print 'may_be_fired: %r may fire: user matches %s' % (self.name, groups) + print('may_be_fired: %r may fire: user matches %s' % (self.name, groups)) return matches if 'owners' in groups and user.owns(eid): if DBG: - print 'may_be_fired: %r may fire: user is owner' % self.name + print('may_be_fired: %r may fire: user is owner' % self.name) return True # check one of the rql expression conditions matches if any if self.condition: if DBG: - print ('my_be_fired: %r: %s' % - (self.name, [(rqlexpr.expression, + print('my_be_fired: %r: %s' % + (self.name, [(rqlexpr.expression, rqlexpr.check_expression(self._cw, eid)) - for rqlexpr in self.condition])) + for rqlexpr in self.condition])) for rqlexpr in self.condition: if rqlexpr.check_expression(self._cw, eid): return True @@ -256,13 +258,13 @@ for gname in requiredgroups: rset = self._cw.execute('SET T require_group G ' 'WHERE T eid %(x)s, G name %(gn)s', - {'x': self.eid, 'gn': unicode(gname)}) + {'x': self.eid, 'gn': text_type(gname)}) assert rset, '%s is not a known group' % gname - if isinstance(conditions, basestring): + if isinstance(conditions, string_types): conditions = (conditions,) for expr in conditions: - if isinstance(expr, basestring): - kwargs = {'expr': unicode(expr)} + if isinstance(expr, string_types): + kwargs = {'expr': text_type(expr)} else: assert isinstance(expr, dict) kwargs = expr @@ -414,7 +416,7 @@ """return the default workflow for entities of this type""" # XXX CWEType method wfrset = self._cw.execute('Any WF WHERE ET default_workflow WF, ' - 'ET name %(et)s', {'et': unicode(self.entity.cw_etype)}) + 'ET name %(et)s', {'et': text_type(self.entity.cw_etype)}) if wfrset: return wfrset.get_entity(0, 0) self.warning("can't find any workflow for %s", self.entity.cw_etype) @@ -479,7 +481,7 @@ 'Any T,TT, TN WHERE S allowed_transition T, S eid %(x)s, ' 'T type TT, T type %(type)s, ' 'T name TN, T transition_of WF, WF eid %(wfeid)s', - {'x': self.current_state.eid, 'type': unicode(type), + {'x': self.current_state.eid, 'type': text_type(type), 'wfeid': self.current_workflow.eid}) for tr in rset.entities(): if tr.may_be_fired(self.entity.eid): @@ -528,7 +530,7 @@ def _get_transition(self, tr): assert self.current_workflow - if isinstance(tr, basestring): + if isinstance(tr, string_types): _tr = self.current_workflow.transition_by_name(tr) assert _tr is not None, 'not a %s transition: %s' % ( self.__regid__, tr) @@ -549,7 +551,7 @@ tr = self._get_transition(tr) if any(tr_ for tr_ in self.possible_transitions() if tr_.eid == tr.eid): - self.fire_transition(tr) + self.fire_transition(tr, comment, commentformat) def change_state(self, statename, comment=None, commentformat=None, tr=None): """change the entity's state to the given state (name or entity) in diff -r a4fcee1e9789 -r 19fcce6dc6d1 entity.py --- a/entity.py Thu Mar 24 09:43:25 2016 +0100 +++ b/entity.py Thu Jun 16 14:19:20 2016 +0200 @@ -20,7 +20,9 @@ __docformat__ = "restructuredtext en" from warnings import warn -from functools import partial + +from six import text_type, string_types, integer_types +from six.moves import range from logilab.common.decorators import cached from logilab.common.deprecation import deprecated @@ -57,7 +59,7 @@ """return True if value can be used at the end of a Rest URL path""" if value is None: return False - value = unicode(value) + value = text_type(value) # the check for ?, /, & are to prevent problems when running # behind Apache mod_proxy if value == u'' or u'?' in value or u'/' in value or u'&' in value: @@ -105,7 +107,7 @@ """ st = cstr.snippet_rqlst.copy() # replace relations in ST by eid infos from linkto where possible - for (info_rtype, info_role), eids in lt_infos.iteritems(): + for (info_rtype, info_role), eids in lt_infos.items(): eid = eids[0] # NOTE: we currently assume a pruned lt_info with only 1 eid for rel in st.iget_nodes(RqlRelation): targetvar = rel_matches(rel, info_rtype, info_role, evar.name) @@ -132,7 +134,7 @@ def pruned_lt_info(eschema, lt_infos): pruned = {} - for (lt_rtype, lt_role), eids in lt_infos.iteritems(): + for (lt_rtype, lt_role), eids in lt_infos.items(): # we can only use lt_infos describing relation with a cardinality # of value 1 towards the linked entity if not len(eids) == 1: @@ -144,6 +146,7 @@ pruned[(lt_rtype, lt_role)] = eids return pruned + class Entity(AppObject): """an entity instance has e_schema automagically set on the class and instances has access to their issuing cursor. @@ -279,7 +282,7 @@ select = Select() mainvar = select.get_variable(mainvar) select.add_selected(mainvar) - elif isinstance(mainvar, basestring): + elif isinstance(mainvar, string_types): assert mainvar in select.defined_vars mainvar = select.get_variable(mainvar) # eases string -> syntax tree test transition: please remove once stable @@ -374,7 +377,7 @@ else: fetchattrs = etypecls.fetch_attrs etypecls._fetch_restrictions(var, select, fetchattrs, - user, ordermethod, visited=visited) + user, None, visited=visited) if ordermethod is not None: try: cmeth = getattr(cls, ordermethod) @@ -455,7 +458,7 @@ if len(value) == 0: continue # avoid crash with empty IN clause elif len(value) == 1: - value = iter(value).next() + value = next(iter(value)) else: # prepare IN clause pendingrels.append( (attr, role, value) ) @@ -514,7 +517,7 @@ prefixing the relation name by 'reverse_'. Also, relation values may be an entity or eid, a list of entities or eids. """ - rql, qargs, pendingrels, _attrcache = cls._cw_build_entity_query(kwargs) + rql, qargs, pendingrels, attrcache = cls._cw_build_entity_query(kwargs) if rql: rql = 'INSERT %s X: %s' % (cls.__regid__, rql) else: @@ -524,12 +527,14 @@ except IndexError: raise Exception('could not create a %r with %r (%r)' % (cls.__regid__, rql, qargs)) + created._cw_update_attr_cache(attrcache) cls._cw_handle_pending_relations(created.eid, pendingrels, execute) return created def __init__(self, req, rset=None, row=None, col=0): AppObject.__init__(self, req, rset=rset, row=row, col=col) self._cw_related_cache = {} + self._cw_adapters_cache = {} if rset is not None: self.eid = rset[row][col] else: @@ -545,15 +550,40 @@ raise NotImplementedError('comparison not implemented for %s' % self.__class__) def __eq__(self, other): - if isinstance(self.eid, (int, long)): + if isinstance(self.eid, integer_types): return self.eid == other.eid return self is other def __hash__(self): - if isinstance(self.eid, (int, long)): + if isinstance(self.eid, integer_types): return self.eid return super(Entity, self).__hash__() + def _cw_update_attr_cache(self, attrcache): + trdata = self._cw.transaction_data + uncached_attrs = trdata.get('%s.storage-special-process-attrs' % self.eid, set()) + uncached_attrs.update(trdata.get('%s.dont-cache-attrs' % self.eid, set())) + for attr in uncached_attrs: + attrcache.pop(attr, None) + self.cw_attr_cache.pop(attr, None) + self.cw_attr_cache.update(attrcache) + + def _cw_dont_cache_attribute(self, attr, repo_side=False): + """Called when some attribute has been transformed by a *storage*, + hence the original value should not be cached **by anyone**. + + For example we have a special "fs_importing" mode in BFSS + where a file path is given as attribute value and stored as is + in the data base. Later access to the attribute will provide + the content of the file at the specified path. We do not want + the "filepath" value to be cached. + + """ + trdata = self._cw.transaction_data + trdata.setdefault('%s.dont-cache-attrs' % self.eid, set()).add(attr) + if repo_side: + trdata.setdefault('%s.storage-special-process-attrs' % self.eid, set()).add(attr) + def __json_encode__(self): """custom json dumps hook to dump the entity's eid which is not part of dict structure itself @@ -567,10 +597,7 @@ return None if it can not be adapted. """ - try: - cache = self._cw_adapters_cache - except AttributeError: - self._cw_adapters_cache = cache = {} + cache = self._cw_adapters_cache try: return cache[interface] except KeyError: @@ -677,8 +704,8 @@ if path is None: # fallback url: / url is used as cw entities uri, # prefer it to //eid/ - return unicode(value) - return '%s/%s' % (path, self._cw.url_quote(value)) + return text_type(value) + return u'%s/%s' % (path, self._cw.url_quote(value)) def cw_attr_metadata(self, attr, metadata): """return a metadata for an attribute (None if unspecified)""" @@ -695,7 +722,7 @@ attr = str(attr) if value is _marker: value = getattr(self, attr) - if isinstance(value, basestring): + if isinstance(value, string_types): value = value.strip() if value is None or value == '': # don't use "not", 0 is an acceptable value return u'' @@ -756,7 +783,7 @@ for rschema in self.e_schema.subject_relations(): if rschema.type in skip_copy_for['subject']: continue - if rschema.final or rschema.meta: + if rschema.final or rschema.meta or rschema.rule: continue # skip already defined relations if getattr(self, rschema.type): @@ -775,7 +802,7 @@ execute(rql, {'x': self.eid, 'y': ceid}) self.cw_clear_relation_cache(rschema.type, 'subject') for rschema in self.e_schema.object_relations(): - if rschema.meta: + if rschema.meta or rschema.rule: continue # skip already defined relations if self.related(rschema.type, 'object'): @@ -798,6 +825,7 @@ # data fetching methods ################################################### + @cached def as_rset(self): # XXX .cw_as_rset """returns a resultset containing `self` information""" rset = ResultSet([(self.eid,)], 'Any X WHERE X eid %(x)s', @@ -849,7 +877,7 @@ if attributes is None: self._cw_completed = True varmaker = rqlvar_maker() - V = varmaker.next() + V = next(varmaker) rql = ['WHERE %s eid %%(x)s' % V] selected = [] for attr in (attributes or self._cw_to_complete_attributes(skip_bytes, skip_pwd)): @@ -857,7 +885,7 @@ if attr in self.cw_attr_cache: continue # case where attribute must be completed, but is not yet in entity - var = varmaker.next() + var = next(varmaker) rql.append('%s %s %s' % (V, attr, var)) selected.append((attr, var)) # +1 since this doesn't include the main variable @@ -876,7 +904,7 @@ # * user has read perm on the relation and on the target entity assert rschema.inlined assert role == 'subject' - var = varmaker.next() + var = next(varmaker) # keep outer join anyway, we don't want .complete to crash on # missing mandatory relation (see #1058267) rql.append('%s %s %s?' % (V, rtype, var)) @@ -892,10 +920,10 @@ raise Exception('unable to fetch attributes for entity with eid %s' % self.eid) # handle attributes - for i in xrange(1, lastattr): + for i in range(1, lastattr): self.cw_attr_cache[str(selected[i-1][0])] = rset[i] # handle relations - for i in xrange(lastattr, len(rset)): + for i in range(lastattr, len(rset)): rtype, role = selected[i-1][0] value = rset[i] if value is None: @@ -1145,9 +1173,7 @@ self._cw.vreg.solutions(self._cw, select, args) # insert RQL expressions for schema constraints into the rql syntax tree if vocabconstraints: - # RQLConstraint is a subclass for RQLVocabularyConstraint, so they - # will be included as well - cstrcls = RQLVocabularyConstraint + cstrcls = (RQLVocabularyConstraint, RQLConstraint) else: cstrcls = RQLConstraint lt_infos = pruned_lt_info(self.e_schema, lt_infos or {}) @@ -1236,8 +1262,8 @@ no relation is given """ if rtype is None: - self._cw_related_cache = {} - self._cw_adapters_cache = {} + self._cw_related_cache.clear() + self._cw_adapters_cache.clear() else: assert role self._cw_related_cache.pop('%s_%s' % (rtype, role), None) @@ -1290,6 +1316,10 @@ else: rql += ' WHERE X eid %(x)s' self._cw.execute(rql, qargs) + # update current local object _after_ the rql query to avoid + # interferences between the query execution itself and the cw_edited / + # skip_security machinery + self._cw_update_attr_cache(attrcache) self._cw_handle_pending_relations(self.eid, pendingrels, self._cw.execute) # XXX update relation cache diff -r a4fcee1e9789 -r 19fcce6dc6d1 etwist/__init__.py --- a/etwist/__init__.py Thu Mar 24 09:43:25 2016 +0100 +++ b/etwist/__init__.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,4 +18,3 @@ """ CW - nevow/twisted client """ - diff -r a4fcee1e9789 -r 19fcce6dc6d1 etwist/request.py --- a/etwist/request.py Thu Mar 24 09:43:25 2016 +0100 +++ b/etwist/request.py Thu Jun 16 14:19:20 2016 +0200 @@ -31,7 +31,7 @@ self._twreq = req super(CubicWebTwistedRequestAdapter, self).__init__( vreg, https, req.args, headers=req.received_headers) - for key, name_stream_list in req.files.iteritems(): + for key, name_stream_list in req.files.items(): for name, stream in name_stream_list: if name is not None: name = unicode(name, self.encoding) diff -r a4fcee1e9789 -r 19fcce6dc6d1 etwist/server.py --- a/etwist/server.py Thu Mar 24 09:43:25 2016 +0100 +++ b/etwist/server.py Thu Jun 16 14:19:20 2016 +0200 @@ -22,8 +22,10 @@ import select import traceback import threading -from urlparse import urlsplit, urlunsplit from cgi import FieldStorage, parse_header + +from six.moves.urllib.parse import urlsplit, urlunsplit + from cubicweb.statsd_logger import statsd_timeit from twisted.internet import reactor, task, threads diff -r a4fcee1e9789 -r 19fcce6dc6d1 etwist/service.py --- a/etwist/service.py Thu Mar 24 09:43:25 2016 +0100 +++ b/etwist/service.py Thu Jun 16 14:19:20 2016 +0200 @@ -15,6 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . +from __future__ import print_function + import os import sys @@ -22,7 +24,7 @@ import win32serviceutil import win32service except ImportError: - print 'Win32 extensions for Python are likely not installed.' + print('Win32 extensions for Python are likely not installed.') sys.exit(3) from os.path import join diff -r a4fcee1e9789 -r 19fcce6dc6d1 etwist/test/requirements.txt --- a/etwist/test/requirements.txt Thu Mar 24 09:43:25 2016 +0100 +++ b/etwist/test/requirements.txt Thu Jun 16 14:19:20 2016 +0200 @@ -1,1 +1,1 @@ -Twisted +Twisted < 16.0.0 diff -r a4fcee1e9789 -r 19fcce6dc6d1 etwist/twctl.py --- a/etwist/twctl.py Thu Mar 24 09:43:25 2016 +0100 +++ b/etwist/twctl.py Thu Jun 16 14:19:20 2016 +0200 @@ -17,10 +17,6 @@ # with CubicWeb. If not, see . """cubicweb-clt handlers for twisted""" -from os.path import join - -from logilab.common.shellutils import rm - from cubicweb.toolsutils import CommandHandler from cubicweb.web.webctl import WebCreateHandler, WebUpgradeHandler @@ -36,9 +32,6 @@ def start_server(self, config): from cubicweb.etwist import server - config.info('clear ui caches') - for cachedir in ('uicache', 'uicachehttps'): - rm(join(config.appdatahome, cachedir, '*')) return server.run(config) class TWStopHandler(CommandHandler): diff -r a4fcee1e9789 -r 19fcce6dc6d1 ext/rest.py --- a/ext/rest.py Thu Mar 24 09:43:25 2016 +0100 +++ b/ext/rest.py Thu Jun 16 14:19:20 2016 +0200 @@ -38,7 +38,9 @@ from itertools import chain from logging import getLogger from os.path import join -from urlparse import urlsplit + +from six import text_type +from six.moves.urllib.parse import urlsplit from docutils import statemachine, nodes, utils, io from docutils.core import Publisher @@ -172,7 +174,7 @@ rql = params['rql'] if vid is None: vid = params.get('vid') - except (ValueError, KeyError), exc: + except (ValueError, KeyError) as exc: msg = inliner.reporter.error('Could not parse bookmark path %s [%s].' % (bookmark.path, exc), line=lineno) prb = inliner.problematic(rawtext, rawtext, msg) @@ -186,7 +188,7 @@ vid = 'noresult' view = _cw.vreg['views'].select(vid, _cw, rset=rset) content = view.render() - except Exception, exc: + except Exception as exc: content = 'An error occurred while interpreting directive bookmark: %r' % exc set_classes(options) return [nodes.raw('', content, format='html')], [] @@ -401,7 +403,7 @@ the data formatted as HTML or the original data if an error occurred """ req = context._cw - if isinstance(data, unicode): + if isinstance(data, text_type): encoding = 'unicode' # remove unprintable characters unauthorized in xml data = data.translate(ESC_UCAR_TABLE) @@ -446,8 +448,8 @@ return res except BaseException: LOGGER.exception('error while publishing ReST text') - if not isinstance(data, unicode): - data = unicode(data, encoding, 'replace') + if not isinstance(data, text_type): + data = text_type(data, encoding, 'replace') return xml_escape(req._('error while publishing ReST text') + '\n\n' + data) diff -r a4fcee1e9789 -r 19fcce6dc6d1 ext/tal.py --- a/ext/tal.py Thu Mar 24 09:43:25 2016 +0100 +++ b/ext/tal.py Thu Jun 16 14:19:20 2016 +0200 @@ -184,7 +184,10 @@ interpreter.execute(self) except UnicodeError as unierror: LOGGER.exception(str(unierror)) - raise simpleTALES.ContextContentException("found non-unicode %r string in Context!" % unierror.args[1]), None, sys.exc_info()[-1] + exc = simpleTALES.ContextContentException( + "found non-unicode %r string in Context!" % unierror.args[1]) + exc.__traceback__ = sys.exc_info()[-1] + raise exc def compile_template(template): @@ -203,7 +206,7 @@ :type filepath: str :param template: path of the file to compile """ - fp = file(filepath) + fp = open(filepath) file_content = unicode(fp.read()) # template file should be pure ASCII fp.close() return compile_template(file_content) @@ -232,7 +235,8 @@ result = eval(expr, globals, locals) except Exception as ex: ex = ex.__class__('in %r: %s' % (expr, ex)) - raise ex, None, sys.exc_info()[-1] + ex.__traceback__ = sys.exc_info()[-1] + raise ex if (isinstance (result, simpleTALES.ContextVariable)): return result.value() return result diff -r a4fcee1e9789 -r 19fcce6dc6d1 ext/test/data/views.py --- a/ext/test/data/views.py Thu Mar 24 09:43:25 2016 +0100 +++ b/ext/test/data/views.py Thu Jun 16 14:19:20 2016 +0200 @@ -21,4 +21,3 @@ class CustomRsetTableView(tableview.RsetTableView): __regid__ = 'mytable' - diff -r a4fcee1e9789 -r 19fcce6dc6d1 ext/test/unittest_rest.py --- a/ext/test/unittest_rest.py Thu Mar 24 09:43:25 2016 +0100 +++ b/ext/test/unittest_rest.py Thu Jun 16 14:19:20 2016 +0200 @@ -15,6 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . +from six import PY3 + from logilab.common.testlib import unittest_main from cubicweb.devtools.testlib import CubicWebTC @@ -87,7 +89,9 @@ context = self.context(req) out = rest_publish(context, ':rql:`Any X WHERE X is CWUser:toto`') self.assertTrue(out.startswith("

an error occurred while interpreting this " - "rql directive: ObjectNotFound(u'toto',)

")) + "rql directive: ObjectNotFound(%s'toto',)

" % + ('' if PY3 else 'u')), + out) def test_rql_role_without_vid(self): with self.admin_access.web_request() as req: @@ -229,7 +233,7 @@ %(rql)s """ % {'rql': rql, 'colvids': ', '.join(["%d=%s" % (k, v) - for k, v in colvids.iteritems()]) + for k, v in colvids.items()]) }) view = self.vreg['views'].select('table', req, rset=req.execute(rql)) view.cellvids = colvids diff -r a4fcee1e9789 -r 19fcce6dc6d1 gettext.py --- a/gettext.py Thu Mar 24 09:43:25 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,795 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Internationalization and localization support. - -This module provides internationalization (I18N) and localization (L10N) -support for your Python programs by providing an interface to the GNU gettext -message catalog library. - -I18N refers to the operation by which a program is made aware of multiple -languages. L10N refers to the adaptation of your program, once -internationalized, to the local language and cultural habits. - -""" - -# This module represents the integration of work, contributions, feedback, and -# suggestions from the following people: -# -# Martin von Loewis, who wrote the initial implementation of the underlying -# C-based libintlmodule (later renamed _gettext), along with a skeletal -# gettext.py implementation. -# -# Peter Funk, who wrote fintl.py, a fairly complete wrapper around intlmodule, -# which also included a pure-Python implementation to read .mo files if -# intlmodule wasn't available. -# -# James Henstridge, who also wrote a gettext.py module, which has some -# interesting, but currently unsupported experimental features: the notion of -# a Catalog class and instances, and the ability to add to a catalog file via -# a Python API. -# -# Barry Warsaw integrated these modules, wrote the .install() API and code, -# and conformed all C and Python code to Python's coding standards. -# -# Francois Pinard and Marc-Andre Lemburg also contributed valuably to this -# module. -# -# J. David Ibanez implemented plural forms. Bruno Haible fixed some bugs. -# -# TODO: -# - Lazy loading of .mo files. Currently the entire catalog is loaded into -# memory, but that's probably bad for large translated programs. Instead, -# the lexical sort of original strings in GNU .mo files should be exploited -# to do binary searches and lazy initializations. Or you might want to use -# the undocumented double-hash algorithm for .mo files with hash tables, but -# you'll need to study the GNU gettext code to do this. -# -# - Support Solaris .mo file formats. Unfortunately, we've been unable to -# find this format documented anywhere. - - -import locale, copy, os, re, struct, sys -from errno import ENOENT - - -__all__ = ['NullTranslations', 'GNUTranslations', 'Catalog', - 'find', 'translation', 'install', 'textdomain', 'bindtextdomain', - 'dgettext', 'dngettext', 'gettext', 'ngettext', - ] - -_default_localedir = os.path.join(sys.prefix, 'share', 'locale') - - -def test(condition, true, false): - """ - Implements the C expression: - - condition ? true : false - - Required to correctly interpret plural forms. - """ - if condition: - return true - else: - return false - - -def c2py(plural): - """Gets a C expression as used in PO files for plural forms and returns a - Python lambda function that implements an equivalent expression. - """ - # Security check, allow only the "n" identifier - try: - from cStringIO import StringIO - except ImportError: - from StringIO import StringIO - import token, tokenize - tokens = tokenize.generate_tokens(StringIO(plural).readline) - try: - danger = [x for x in tokens if x[0] == token.NAME and x[1] != 'n'] - except tokenize.TokenError: - raise ValueError, \ - 'plural forms expression error, maybe unbalanced parenthesis' - else: - if danger: - raise ValueError, 'plural forms expression could be dangerous' - - # Replace some C operators by their Python equivalents - plural = plural.replace('&&', ' and ') - plural = plural.replace('||', ' or ') - - expr = re.compile(r'\!([^=])') - plural = expr.sub(' not \\1', plural) - - # Regular expression and replacement function used to transform - # "a?b:c" to "test(a,b,c)". - expr = re.compile(r'(.*?)\?(.*?):(.*)') - def repl(x): - return "test(%s, %s, %s)" % (x.group(1), x.group(2), - expr.sub(repl, x.group(3))) - - # Code to transform the plural expression, taking care of parentheses - stack = [''] - for c in plural: - if c == '(': - stack.append('') - elif c == ')': - if len(stack) == 1: - # Actually, we never reach this code, because unbalanced - # parentheses get caught in the security check at the - # beginning. - raise ValueError, 'unbalanced parenthesis in plural form' - s = expr.sub(repl, stack.pop()) - stack[-1] += '(%s)' % s - else: - stack[-1] += c - plural = expr.sub(repl, stack.pop()) - - return eval('lambda n: int(%s)' % plural) - - - -def _expand_lang(locale): - from locale import normalize - locale = normalize(locale) - COMPONENT_CODESET = 1 << 0 - COMPONENT_TERRITORY = 1 << 1 - COMPONENT_MODIFIER = 1 << 2 - # split up the locale into its base components - mask = 0 - pos = locale.find('@') - if pos >= 0: - modifier = locale[pos:] - locale = locale[:pos] - mask |= COMPONENT_MODIFIER - else: - modifier = '' - pos = locale.find('.') - if pos >= 0: - codeset = locale[pos:] - locale = locale[:pos] - mask |= COMPONENT_CODESET - else: - codeset = '' - pos = locale.find('_') - if pos >= 0: - territory = locale[pos:] - locale = locale[:pos] - mask |= COMPONENT_TERRITORY - else: - territory = '' - language = locale - ret = [] - for i in range(mask+1): - if not (i & ~mask): # if all components for this combo exist ... - val = language - if i & COMPONENT_TERRITORY: val += territory - if i & COMPONENT_CODESET: val += codeset - if i & COMPONENT_MODIFIER: val += modifier - ret.append(val) - ret.reverse() - return ret - - - -class NullTranslations: - def __init__(self, fp=None): - self._info = {} - self._charset = None - self._output_charset = None - self._fallback = None - if fp is not None: - self._parse(fp) - - def _parse(self, fp): - pass - - def add_fallback(self, fallback): - if self._fallback: - self._fallback.add_fallback(fallback) - else: - self._fallback = fallback - - def gettext(self, message): - if self._fallback: - return self._fallback.gettext(message) - return message - - def pgettext(self, context, message): - if self._fallback: - return self._fallback.pgettext(context, message) - return message - - def lgettext(self, message): - if self._fallback: - return self._fallback.lgettext(message) - return message - - def lpgettext(self, context, message): - if self._fallback: - return self._fallback.lpgettext(context, message) - return message - - def ngettext(self, msgid1, msgid2, n): - if self._fallback: - return self._fallback.ngettext(msgid1, msgid2, n) - if n == 1: - return msgid1 - else: - return msgid2 - - def npgettext(self, context, msgid1, msgid2, n): - if self._fallback: - return self._fallback.npgettext(context, msgid1, msgid2, n) - if n == 1: - return msgid1 - else: - return msgid2 - - def lngettext(self, msgid1, msgid2, n): - if self._fallback: - return self._fallback.lngettext(msgid1, msgid2, n) - if n == 1: - return msgid1 - else: - return msgid2 - - def lnpgettext(self, context, msgid1, msgid2, n): - if self._fallback: - return self._fallback.lnpgettext(context, msgid1, msgid2, n) - if n == 1: - return msgid1 - else: - return msgid2 - - def ugettext(self, message): - if self._fallback: - return self._fallback.ugettext(message) - return unicode(message) - - def upgettext(self, context, message): - if self._fallback: - return self._fallback.upgettext(context, message) - return unicode(message) - - def ungettext(self, msgid1, msgid2, n): - if self._fallback: - return self._fallback.ungettext(msgid1, msgid2, n) - if n == 1: - return unicode(msgid1) - else: - return unicode(msgid2) - - def unpgettext(self, context, msgid1, msgid2, n): - if self._fallback: - return self._fallback.unpgettext(context, msgid1, msgid2, n) - if n == 1: - return unicode(msgid1) - else: - return unicode(msgid2) - - def info(self): - return self._info - - def charset(self): - return self._charset - - def output_charset(self): - return self._output_charset - - def set_output_charset(self, charset): - self._output_charset = charset - - def install(self, unicode=False, names=None): - import __builtin__ - __builtin__.__dict__['_'] = unicode and self.ugettext or self.gettext - if hasattr(names, "__contains__"): - if "gettext" in names: - __builtin__.__dict__['gettext'] = __builtin__.__dict__['_'] - if "pgettext" in names: - __builtin__.__dict__['pgettext'] = (unicode and self.upgettext - or self.pgettext) - if "ngettext" in names: - __builtin__.__dict__['ngettext'] = (unicode and self.ungettext - or self.ngettext) - if "npgettext" in names: - __builtin__.__dict__['npgettext'] = \ - (unicode and self.unpgettext or self.npgettext) - if "lgettext" in names: - __builtin__.__dict__['lgettext'] = self.lgettext - if "lpgettext" in names: - __builtin__.__dict__['lpgettext'] = self.lpgettext - if "lngettext" in names: - __builtin__.__dict__['lngettext'] = self.lngettext - if "lnpgettext" in names: - __builtin__.__dict__['lnpgettext'] = self.lnpgettext - - -class GNUTranslations(NullTranslations): - # Magic number of .mo files - LE_MAGIC = 0x950412deL - BE_MAGIC = 0xde120495L - - # The encoding of a msgctxt and a msgid in a .mo file is - # msgctxt + "\x04" + msgid (gettext version >= 0.15) - CONTEXT_ENCODING = "%s\x04%s" - - def _parse(self, fp): - """Override this method to support alternative .mo formats.""" - unpack = struct.unpack - filename = getattr(fp, 'name', '') - # Parse the .mo file header, which consists of 5 little endian 32 - # bit words. - self._catalog = catalog = {} - self.plural = lambda n: int(n != 1) # germanic plural by default - buf = fp.read() - buflen = len(buf) - # Are we big endian or little endian? - magic = unpack('4I', buf[4:20]) - ii = '>II' - else: - raise IOError(0, 'Bad magic number', filename) - # Now put all messages from the .mo file buffer into the catalog - # dictionary. - for i in xrange(0, msgcount): - mlen, moff = unpack(ii, buf[masteridx:masteridx+8]) - mend = moff + mlen - tlen, toff = unpack(ii, buf[transidx:transidx+8]) - tend = toff + tlen - if mend < buflen and tend < buflen: - msg = buf[moff:mend] - tmsg = buf[toff:tend] - else: - raise IOError(0, 'File is corrupt', filename) - # See if we're looking at GNU .mo conventions for metadata - if mlen == 0: - # Catalog description - lastk = k = None - for item in tmsg.splitlines(): - item = item.strip() - if not item: - continue - if ':' in item: - k, v = item.split(':', 1) - k = k.strip().lower() - v = v.strip() - self._info[k] = v - lastk = k - elif lastk: - self._info[lastk] += '\n' + item - if k == 'content-type': - self._charset = v.split('charset=')[1] - elif k == 'plural-forms': - v = v.split(';') - plural = v[1].split('plural=')[1] - self.plural = c2py(plural) - # Note: we unconditionally convert both msgids and msgstrs to - # Unicode using the character encoding specified in the charset - # parameter of the Content-Type header. The gettext documentation - # strongly encourages msgids to be us-ascii, but some appliations - # require alternative encodings (e.g. Zope's ZCML and ZPT). For - # traditional gettext applications, the msgid conversion will - # cause no problems since us-ascii should always be a subset of - # the charset encoding. We may want to fall back to 8-bit msgids - # if the Unicode conversion fails. - if '\x00' in msg: - # Plural forms - msgid1, msgid2 = msg.split('\x00') - tmsg = tmsg.split('\x00') - if self._charset: - msgid1 = unicode(msgid1, self._charset) - tmsg = [unicode(x, self._charset) for x in tmsg] - for i in range(len(tmsg)): - catalog[(msgid1, i)] = tmsg[i] - else: - if self._charset: - msg = unicode(msg, self._charset) - tmsg = unicode(tmsg, self._charset) - catalog[msg] = tmsg - # advance to next entry in the seek tables - masteridx += 8 - transidx += 8 - - def gettext(self, message): - missing = object() - tmsg = self._catalog.get(message, missing) - if tmsg is missing: - if self._fallback: - return self._fallback.gettext(message) - return message - # Encode the Unicode tmsg back to an 8-bit string, if possible - if self._output_charset: - return tmsg.encode(self._output_charset) - elif self._charset: - return tmsg.encode(self._charset) - return tmsg - - def pgettext(self, context, message): - ctxt_msg_id = self.CONTEXT_ENCODING % (context, message) - missing = object() - tmsg = self._catalog.get(ctxt_msg_id, missing) - if tmsg is missing: - if self._fallback: - return self._fallback.pgettext(context, message) - return message - # Encode the Unicode tmsg back to an 8-bit string, if possible - if self._output_charset: - return tmsg.encode(self._output_charset) - elif self._charset: - return tmsg.encode(self._charset) - return tmsg - - def lgettext(self, message): - missing = object() - tmsg = self._catalog.get(message, missing) - if tmsg is missing: - if self._fallback: - return self._fallback.lgettext(message) - return message - if self._output_charset: - return tmsg.encode(self._output_charset) - return tmsg.encode(locale.getpreferredencoding()) - - def lpgettext(self, context, message): - ctxt_msg_id = self.CONTEXT_ENCODING % (context, message) - missing = object() - tmsg = self._catalog.get(ctxt_msg_id, missing) - if tmsg is missing: - if self._fallback: - return self._fallback.lpgettext(context, message) - return message - if self._output_charset: - return tmsg.encode(self._output_charset) - return tmsg.encode(locale.getpreferredencoding()) - - def ngettext(self, msgid1, msgid2, n): - try: - tmsg = self._catalog[(msgid1, self.plural(n))] - if self._output_charset: - return tmsg.encode(self._output_charset) - elif self._charset: - return tmsg.encode(self._charset) - return tmsg - except KeyError: - if self._fallback: - return self._fallback.ngettext(msgid1, msgid2, n) - if n == 1: - return msgid1 - else: - return msgid2 - - def npgettext(self, context, msgid1, msgid2, n): - ctxt_msg_id = self.CONTEXT_ENCODING % (context, msgid1) - try: - tmsg = self._catalog[(ctxt_msg_id, self.plural(n))] - if self._output_charset: - return tmsg.encode(self._output_charset) - elif self._charset: - return tmsg.encode(self._charset) - return tmsg - except KeyError: - if self._fallback: - return self._fallback.npgettext(context, msgid1, msgid2, n) - if n == 1: - return msgid1 - else: - return msgid2 - - def lngettext(self, msgid1, msgid2, n): - try: - tmsg = self._catalog[(msgid1, self.plural(n))] - if self._output_charset: - return tmsg.encode(self._output_charset) - return tmsg.encode(locale.getpreferredencoding()) - except KeyError: - if self._fallback: - return self._fallback.lngettext(msgid1, msgid2, n) - if n == 1: - return msgid1 - else: - return msgid2 - - def lnpgettext(self, context, msgid1, msgid2, n): - ctxt_msg_id = self.CONTEXT_ENCODING % (context, msgid1) - try: - tmsg = self._catalog[(ctxt_msg_id, self.plural(n))] - if self._output_charset: - return tmsg.encode(self._output_charset) - return tmsg.encode(locale.getpreferredencoding()) - except KeyError: - if self._fallback: - return self._fallback.lnpgettext(context, msgid1, msgid2, n) - if n == 1: - return msgid1 - else: - return msgid2 - - def ugettext(self, message): - missing = object() - tmsg = self._catalog.get(message, missing) - if tmsg is missing: - if self._fallback: - return self._fallback.ugettext(message) - return unicode(message) - return tmsg - - def upgettext(self, context, message): - ctxt_message_id = self.CONTEXT_ENCODING % (context, message) - missing = object() - tmsg = self._catalog.get(ctxt_message_id, missing) - if tmsg is missing: - # XXX logilab patch for compat w/ catalog generated by cw < 3.5 - return self.ugettext(message) - if self._fallback: - return self._fallback.upgettext(context, message) - return unicode(message) - return tmsg - - def ungettext(self, msgid1, msgid2, n): - try: - tmsg = self._catalog[(msgid1, self.plural(n))] - except KeyError: - if self._fallback: - return self._fallback.ungettext(msgid1, msgid2, n) - if n == 1: - tmsg = unicode(msgid1) - else: - tmsg = unicode(msgid2) - return tmsg - - def unpgettext(self, context, msgid1, msgid2, n): - ctxt_message_id = self.CONTEXT_ENCODING % (context, msgid1) - try: - tmsg = self._catalog[(ctxt_message_id, self.plural(n))] - except KeyError: - if self._fallback: - return self._fallback.unpgettext(context, msgid1, msgid2, n) - if n == 1: - tmsg = unicode(msgid1) - else: - tmsg = unicode(msgid2) - return tmsg - - -# Locate a .mo file using the gettext strategy -def find(domain, localedir=None, languages=None, all=0): - # Get some reasonable defaults for arguments that were not supplied - if localedir is None: - localedir = _default_localedir - if languages is None: - languages = [] - for envar in ('LANGUAGE', 'LC_ALL', 'LC_MESSAGES', 'LANG'): - val = os.environ.get(envar) - if val: - languages = val.split(':') - break - if 'C' not in languages: - languages.append('C') - # now normalize and expand the languages - nelangs = [] - for lang in languages: - for nelang in _expand_lang(lang): - if nelang not in nelangs: - nelangs.append(nelang) - # select a language - if all: - result = [] - else: - result = None - for lang in nelangs: - if lang == 'C': - break - mofile = os.path.join(localedir, lang, 'LC_MESSAGES', '%s.mo' % domain) - if os.path.exists(mofile): - if all: - result.append(mofile) - else: - return mofile - return result - - - -# a mapping between absolute .mo file path and Translation object -_translations = {} - -def translation(domain, localedir=None, languages=None, - class_=None, fallback=False, codeset=None): - if class_ is None: - class_ = GNUTranslations - mofiles = find(domain, localedir, languages, all=1) - if not mofiles: - if fallback: - return NullTranslations() - raise IOError(ENOENT, 'No translation file found for domain', domain) - # TBD: do we need to worry about the file pointer getting collected? - # Avoid opening, reading, and parsing the .mo file after it's been done - # once. - result = None - for mofile in mofiles: - key = os.path.abspath(mofile) - t = _translations.get(key) - if t is None: - t = _translations.setdefault(key, class_(open(mofile, 'rb'))) - # Copy the translation object to allow setting fallbacks and - # output charset. All other instance data is shared with the - # cached object. - t = copy.copy(t) - if codeset: - t.set_output_charset(codeset) - if result is None: - result = t - else: - result.add_fallback(t) - return result - - -def install(domain, localedir=None, unicode=False, codeset=None, names=None): - t = translation(domain, localedir, fallback=True, codeset=codeset) - t.install(unicode, names) - - - -# a mapping b/w domains and locale directories -_localedirs = {} -# a mapping b/w domains and codesets -_localecodesets = {} -# current global domain, `messages' used for compatibility w/ GNU gettext -_current_domain = 'messages' - - -def textdomain(domain=None): - global _current_domain - if domain is not None: - _current_domain = domain - return _current_domain - - -def bindtextdomain(domain, localedir=None): - global _localedirs - if localedir is not None: - _localedirs[domain] = localedir - return _localedirs.get(domain, _default_localedir) - - -def bind_textdomain_codeset(domain, codeset=None): - global _localecodesets - if codeset is not None: - _localecodesets[domain] = codeset - return _localecodesets.get(domain) - - -def dgettext(domain, message): - try: - t = translation(domain, _localedirs.get(domain, None), - codeset=_localecodesets.get(domain)) - except IOError: - return message - return t.gettext(message) - -def dpgettext(domain, context, message): - try: - t = translation(domain, _localedirs.get(domain, None), - codeset=_localecodesets.get(domain)) - except IOError: - return message - return t.pgettext(context, message) - -def ldgettext(domain, message): - try: - t = translation(domain, _localedirs.get(domain, None), - codeset=_localecodesets.get(domain)) - except IOError: - return message - return t.lgettext(message) - -def ldpgettext(domain, context, message): - try: - t = translation(domain, _localedirs.get(domain, None), - codeset=_localecodesets.get(domain)) - except IOError: - return message - return t.lpgettext(context, message) - -def dngettext(domain, msgid1, msgid2, n): - try: - t = translation(domain, _localedirs.get(domain, None), - codeset=_localecodesets.get(domain)) - except IOError: - if n == 1: - return msgid1 - else: - return msgid2 - return t.ngettext(msgid1, msgid2, n) - -def dnpgettext(domain, context, msgid1, msgid2, n): - try: - t = translation(domain, _localedirs.get(domain, None), - codeset=_localecodesets.get(domain)) - except IOError: - if n == 1: - return msgid1 - else: - return msgid2 - return t.npgettext(context, msgid1, msgid2, n) - -def ldngettext(domain, msgid1, msgid2, n): - try: - t = translation(domain, _localedirs.get(domain, None), - codeset=_localecodesets.get(domain)) - except IOError: - if n == 1: - return msgid1 - else: - return msgid2 - return t.lngettext(msgid1, msgid2, n) - -def ldnpgettext(domain, context, msgid1, msgid2, n): - try: - t = translation(domain, _localedirs.get(domain, None), - codeset=_localecodesets.get(domain)) - except IOError: - if n == 1: - return msgid1 - else: - return msgid2 - return t.lnpgettext(context, msgid1, msgid2, n) - -def gettext(message): - return dgettext(_current_domain, message) - -def pgettext(context, message): - return dpgettext(_current_domain, context, message) - -def lgettext(message): - return ldgettext(_current_domain, message) - -def lpgettext(context, message): - return ldpgettext(_current_domain, context, message) - -def ngettext(msgid1, msgid2, n): - return dngettext(_current_domain, msgid1, msgid2, n) - -def npgettext(context, msgid1, msgid2, n): - return dnpgettext(_current_domain, context, msgid1, msgid2, n) - -def lngettext(msgid1, msgid2, n): - return ldngettext(_current_domain, msgid1, msgid2, n) - -def lnpgettext(context, msgid1, msgid2, n): - return ldnpgettext(_current_domain, context, msgid1, msgid2, n) - -# dcgettext() has been deemed unnecessary and is not implemented. - -# James Henstridge's Catalog constructor from GNOME gettext. Documented usage -# was: -# -# import gettext -# cat = gettext.Catalog(PACKAGE, localedir=LOCALEDIR) -# _ = cat.gettext -# print _('Hello World') - -# The resulting catalog object currently don't support access through a -# dictionary API, which was supported (but apparently unused) in GNOME -# gettext. - -Catalog = translation diff -r a4fcee1e9789 -r 19fcce6dc6d1 hooks/__init__.py --- a/hooks/__init__.py Thu Mar 24 09:43:25 2016 +0100 +++ b/hooks/__init__.py Thu Jun 16 14:19:20 2016 +0200 @@ -33,7 +33,7 @@ # which may cause reloading pb lifetime = timedelta(days=self.repo.config['keep-transaction-lifetime']) def cleanup_old_transactions(repo=self.repo, lifetime=lifetime): - mindate = datetime.now() - lifetime + mindate = datetime.utcnow() - lifetime with repo.internal_cnx() as cnx: cnx.system_sql( 'DELETE FROM transactions WHERE tx_time < %(time)s', @@ -52,7 +52,7 @@ def update_feeds(repo): # take a list to avoid iterating on a dictionary whose size may # change - for uri, source in list(repo.sources_by_uri.iteritems()): + for uri, source in list(repo.sources_by_uri.items()): if (uri == 'system' or not repo.config.source_enabled(source) or not source.config['synchronize']): @@ -72,12 +72,12 @@ def __call__(self): def expire_dataimports(repo=self.repo): - for uri, source in repo.sources_by_uri.iteritems(): + for uri, source in repo.sources_by_uri.items(): if (uri == 'system' or not repo.config.source_enabled(source)): continue with repo.internal_cnx() as cnx: - mindate = datetime.now() - timedelta(seconds=source.config['logs-lifetime']) + mindate = datetime.utcnow() - timedelta(seconds=source.config['logs-lifetime']) cnx.execute('DELETE CWDataImport X WHERE X start_timestamp < %(time)s', {'time': mindate}) cnx.commit() diff -r a4fcee1e9789 -r 19fcce6dc6d1 hooks/integrity.py --- a/hooks/integrity.py Thu Mar 24 09:43:25 2016 +0100 +++ b/hooks/integrity.py Thu Jun 16 14:19:20 2016 +0200 @@ -20,10 +20,12 @@ """ __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from threading import Lock +from six import text_type + from cubicweb import validation_error, neg_role from cubicweb.schema import (META_RTYPES, WORKFLOW_RTYPES, RQLConstraint, RQLUniqueConstraint) @@ -45,7 +47,7 @@ This lock used to avoid potential integrity pb when checking RQLUniqueConstraint in two different transactions, as explained in - http://intranet.logilab.fr/jpl/ticket/36564 + https://extranet.logilab.fr/3577926 """ if 'uniquecstrholder' in cnx.transaction_data: return @@ -109,9 +111,10 @@ category = 'integrity' -class EnsureSymmetricRelationsAdd(hook.Hook): +class _EnsureSymmetricRelationsAdd(hook.Hook): """ ensure X r Y => Y r X iff r is symmetric """ __regid__ = 'cw.add_ensure_symmetry' + __abstract__ = True category = 'activeintegrity' events = ('after_add_relation',) # __select__ is set in the registration callback @@ -121,9 +124,10 @@ self.rtype, self.eidfrom) -class EnsureSymmetricRelationsDelete(hook.Hook): +class _EnsureSymmetricRelationsDelete(hook.Hook): """ ensure X r Y => Y r X iff r is symmetric """ __regid__ = 'cw.delete_ensure_symmetry' + __abstract__ = True category = 'activeintegrity' events = ('after_delete_relation',) # __select__ is set in the registration callback @@ -247,7 +251,7 @@ def __call__(self): entity = self.entity eschema = entity.e_schema - for attr, val in entity.cw_edited.iteritems(): + for attr, val in entity.cw_edited.items(): if eschema.subjrels[attr].final and eschema.has_unique_values(attr): if val is None: continue @@ -286,13 +290,13 @@ entity = self.entity metaattrs = entity.e_schema.meta_attributes() edited = entity.cw_edited - for metaattr, (metadata, attr) in metaattrs.iteritems(): + for metaattr, (metadata, attr) in metaattrs.items(): if metadata == 'format' and attr in edited: try: value = edited[attr] except KeyError: continue # no text to tidy - if isinstance(value, unicode): # filter out None and Binary + if isinstance(value, text_type): # filter out None and Binary if getattr(entity, str(metaattr)) == 'text/html': edited[attr] = soup2xhtml(value, self._cw.encoding) @@ -335,6 +339,9 @@ vreg.register_all(globals().values(), __name__) symmetric_rtypes = [rschema.type for rschema in vreg.schema.relations() if rschema.symmetric] - EnsureSymmetricRelationsAdd.__select__ = hook.Hook.__select__ & hook.match_rtype(*symmetric_rtypes) - EnsureSymmetricRelationsDelete.__select__ = hook.Hook.__select__ & hook.match_rtype(*symmetric_rtypes) - + class EnsureSymmetricRelationsAdd(_EnsureSymmetricRelationsAdd): + __select__ = _EnsureSymmetricRelationsAdd.__select__ & hook.match_rtype(*symmetric_rtypes) + vreg.register(EnsureSymmetricRelationsAdd) + class EnsureSymmetricRelationsDelete(_EnsureSymmetricRelationsDelete): + __select__ = _EnsureSymmetricRelationsDelete.__select__ & hook.match_rtype(*symmetric_rtypes) + vreg.register(EnsureSymmetricRelationsDelete) diff -r a4fcee1e9789 -r 19fcce6dc6d1 hooks/metadata.py --- a/hooks/metadata.py Thu Mar 24 09:43:25 2016 +0100 +++ b/hooks/metadata.py Thu Jun 16 14:19:20 2016 +0200 @@ -22,6 +22,8 @@ from datetime import datetime from base64 import b64encode +from pytz import utc + from cubicweb.predicates import is_instance from cubicweb.server import hook from cubicweb.server.edition import EditedEntity @@ -41,7 +43,7 @@ events = ('before_add_entity',) def __call__(self): - timestamp = datetime.now() + timestamp = datetime.now(utc) edited = self.entity.cw_edited if not edited.get('creation_date'): edited['creation_date'] = timestamp @@ -64,7 +66,7 @@ # XXX to be really clean, we should turn off modification_date update # explicitly on each command where we do not want that behaviour. if not self._cw.vreg.config.repairing: - self.entity.cw_edited.setdefault('modification_date', datetime.now()) + self.entity.cw_edited.setdefault('modification_date', datetime.now(utc)) class SetCreatorOp(hook.DataOperationMixIn, hook.Operation): diff -r a4fcee1e9789 -r 19fcce6dc6d1 hooks/notification.py --- a/hooks/notification.py Thu Mar 24 09:43:25 2016 +0100 +++ b/hooks/notification.py Thu Jun 16 14:19:20 2016 +0200 @@ -167,7 +167,7 @@ __abstract__ = True # do not register by default __select__ = NotificationHook.__select__ & hook.issued_from_user_query() events = ('before_update_entity',) - skip_attrs = set() + skip_attrs = set(['modification_date']) def __call__(self): cnx = self._cw diff -r a4fcee1e9789 -r 19fcce6dc6d1 hooks/security.py --- a/hooks/security.py Thu Mar 24 09:43:25 2016 +0100 +++ b/hooks/security.py Thu Jun 16 14:19:20 2016 +0200 @@ -207,4 +207,3 @@ rdef = rschema.rdef(self._cw.entity_metas(self.eidfrom)['type'], self._cw.entity_metas(self.eidto)['type']) rdef.check_perm(self._cw, 'delete', fromeid=self.eidfrom, toeid=self.eidto) - diff -r a4fcee1e9789 -r 19fcce6dc6d1 hooks/synccomputed.py --- a/hooks/synccomputed.py Thu Mar 24 09:43:25 2016 +0100 +++ b/hooks/synccomputed.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,7 +18,7 @@ """Hooks for synchronizing computed attributes""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from collections import defaultdict @@ -40,7 +40,7 @@ self._container[computed_attribute] = set((eid,)) def precommit_event(self): - for computed_attribute_rdef, eids in self.get_data().iteritems(): + for computed_attribute_rdef, eids in self.get_data().items(): attr = computed_attribute_rdef.rtype formula = computed_attribute_rdef.formula select = self.cnx.repo.vreg.rqlhelper.parse(formula).children[0] @@ -110,7 +110,7 @@ def __call__(self): edited_attributes = frozenset(self.entity.cw_edited) - for rdef, used_attributes in self.attributes_computed_attributes.iteritems(): + for rdef, used_attributes in self.attributes_computed_attributes.items(): if edited_attributes.intersection(used_attributes): # XXX optimize if the modified attributes belong to the same # entity as the computed attribute @@ -178,7 +178,7 @@ self.computed_attribute_by_relation[depend_on_rdef].append(rdef) def generate_entity_creation_hooks(self): - for etype, computed_attributes in self.computed_attribute_by_etype.iteritems(): + for etype, computed_attributes in self.computed_attribute_by_etype.items(): regid = 'computed_attribute.%s_created' % etype selector = hook.is_instance(etype) yield type('%sCreatedHook' % etype, @@ -188,7 +188,7 @@ 'computed_attributes': computed_attributes}) def generate_relation_change_hooks(self): - for rdef, computed_attributes in self.computed_attribute_by_relation.iteritems(): + for rdef, computed_attributes in self.computed_attribute_by_relation.items(): regid = 'computed_attribute.%s_modified' % rdef.rtype selector = hook.match_rtype(rdef.rtype.type, frometypes=(rdef.subject.type,), @@ -206,7 +206,7 @@ 'optimized_computed_attributes': optimized_computed_attributes}) def generate_entity_update_hooks(self): - for etype, attributes_computed_attributes in self.computed_attribute_by_etype_attrs.iteritems(): + for etype, attributes_computed_attributes in self.computed_attribute_by_etype_attrs.items(): regid = 'computed_attribute.%s_updated' % etype selector = hook.is_instance(etype) yield type('%sModifiedHook' % etype, diff -r a4fcee1e9789 -r 19fcce6dc6d1 hooks/syncschema.py --- a/hooks/syncschema.py Thu Mar 24 09:43:25 2016 +0100 +++ b/hooks/syncschema.py Thu Jun 16 14:19:20 2016 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -24,10 +24,12 @@ """ __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ +import json from copy import copy from hashlib import md5 + from yams.schema import (BASE_TYPES, BadSchemaDefinition, RelationSchema, RelationDefinitionSchema) from yams import buildobjs as ybo, convert_default_value @@ -37,7 +39,7 @@ from cubicweb import validation_error from cubicweb.predicates import is_instance from cubicweb.schema import (SCHEMA_TYPES, META_RTYPES, VIRTUAL_RTYPES, - CONSTRAINTS, ETYPE_NAME_MAP, display_name) + CONSTRAINTS, UNIQUE_CONSTRAINTS, ETYPE_NAME_MAP) from cubicweb.server import hook, schemaserial as ss, schema2sql as y2sql from cubicweb.server.sqlutils import SQL_PREFIX from cubicweb.hooks.synccomputed import RecomputeAttributeOperation @@ -158,24 +160,26 @@ cnx.transaction_data.setdefault('pendingrtypes', set()).add(rtype) -class DropColumn(hook.Operation): +class DropColumn(hook.DataOperationMixIn, hook.Operation): """actually remove the attribut's column from entity table in the system database """ - table = column = None # make pylint happy def precommit_event(self): - cnx, table, column = self.cnx, self.table, self.column - source = cnx.repo.system_source - # drop index if any - source.drop_index(cnx, table, column) - if source.dbhelper.alter_column_support: - cnx.system_sql('ALTER TABLE %s DROP COLUMN %s' % (table, column), - rollback_on_failure=False) - self.info('dropped column %s from table %s', column, table) - else: - # not supported by sqlite for instance - self.error('dropping column not supported by the backend, handle ' - 'it yourself (%s.%s)', table, column) + cnx = self.cnx + for etype, attr in self.get_data(): + table = SQL_PREFIX + etype + column = SQL_PREFIX + attr + source = cnx.repo.system_source + # drop index if any + source.drop_index(cnx, table, column) + if source.dbhelper.alter_column_support: + cnx.system_sql('ALTER TABLE %s DROP COLUMN %s' % (table, column), + rollback_on_failure=False) + self.info('dropped column %s from table %s', column, table) + else: + # not supported by sqlite for instance + self.error('dropping column not supported by the backend, handle ' + 'it yourself (%s.%s)', table, column) # XXX revertprecommit_event @@ -208,7 +212,7 @@ repo.set_schema(repo.schema) # CWUser class might have changed, update current session users cwuser_cls = self.cnx.vreg['etypes'].etype_class('CWUser') - for session in repo._sessions.itervalues(): + for session in repo._sessions.values(): session.user.__class__ = cwuser_cls except Exception: self.critical('error while setting schema', exc_info=True) @@ -330,7 +334,7 @@ self.oldvalues = dict( (attr, getattr(rschema, attr)) for attr in self.values) self.rschema.__dict__.update(self.values) # then make necessary changes to the system source database - if not 'inlined' in self.values: + if 'inlined' not in self.values: return # nothing to do inlined = self.values['inlined'] # check in-lining is possible when inlined @@ -360,8 +364,7 @@ # drop existant columns #if cnx.repo.system_source.dbhelper.alter_column_support: for etype in rschema.subjects(): - DropColumn(cnx, table=SQL_PREFIX + str(etype), - column=SQL_PREFIX + rtype) + DropColumn.get_instance(cnx).add_data((str(etype), rtype)) else: for etype in rschema.subjects(): try: @@ -437,12 +440,15 @@ # probably buggy) rdef = self.cnx.vreg.schema.rschema(rdefdef.name).rdefs[rdefdef.subject, rdefdef.object] assert rdef.infered + else: + rdef = self.cnx.vreg.schema.rschema(rdefdef.name).rdefs[rdefdef.subject, rdefdef.object] + self.cnx.execute('SET X ordernum Y+1 ' 'WHERE X from_entity SE, SE eid %(se)s, X ordernum Y, ' 'X ordernum >= %(order)s, NOT X eid %(x)s', {'x': entity.eid, 'se': fromentity.eid, 'order': entity.ordernum or 0}) - return rdefdef + return rdefdef, rdef def precommit_event(self): cnx = self.cnx @@ -456,15 +462,16 @@ 'indexed': entity.indexed, 'fulltextindexed': entity.fulltextindexed, 'internationalizable': entity.internationalizable} + if entity.extra_props: + props.update(json.loads(entity.extra_props.getvalue().decode('ascii'))) # entity.formula may not exist yet if we're migrating to 3.20 if hasattr(entity, 'formula'): props['formula'] = entity.formula # update the in-memory schema first - rdefdef = self.init_rdef(**props) + rdefdef, rdef = self.init_rdef(**props) # then make necessary changes to the system source database syssource = cnx.repo.system_source - attrtype = y2sql.type_from_constraints( - syssource.dbhelper, rdefdef.object, rdefdef.constraints) + attrtype = y2sql.type_from_rdef(syssource.dbhelper, rdef) # XXX should be moved somehow into lgdb: sqlite doesn't support to # add a new column with UNIQUE, it should be added after the ALTER TABLE # using ADD INDEX @@ -544,7 +551,7 @@ cnx = self.cnx entity = self.entity # update the in-memory schema first - rdefdef = self.init_rdef(composite=entity.composite) + rdefdef, rdef = self.init_rdef(composite=entity.composite) # then make necessary changes to the system source database schema = cnx.vreg.schema rtype = rdefdef.name @@ -599,17 +606,32 @@ # relations, but only if it's the last instance for this relation type # for other relations if (rschema.final or rschema.inlined): - rset = execute('Any COUNT(X) WHERE X is %s, X relation_type R, ' - 'R eid %%(r)s, X from_entity E, E eid %%(e)s' - % rdeftype, - {'r': rschema.eid, 'e': rdef.subject.eid}) - if rset[0][0] == 0 and not cnx.deleted_in_transaction(rdef.subject.eid): - ptypes = cnx.transaction_data.setdefault('pendingrtypes', set()) - ptypes.add(rschema.type) - DropColumn(cnx, table=SQL_PREFIX + str(rdef.subject), - column=SQL_PREFIX + str(rschema)) + if not cnx.deleted_in_transaction(rdef.subject.eid): + rset = execute('Any COUNT(X) WHERE X is %s, X relation_type R, ' + 'R eid %%(r)s, X from_entity E, E eid %%(e)s' + % rdeftype, + {'r': rschema.eid, 'e': rdef.subject.eid}) + if rset[0][0] == 0: + ptypes = cnx.transaction_data.setdefault('pendingrtypes', set()) + ptypes.add(rschema.type) + DropColumn.get_instance(cnx).add_data((str(rdef.subject), str(rschema))) + elif rschema.inlined: + cnx.system_sql('UPDATE %s%s SET %s%s=NULL WHERE ' + 'EXISTS(SELECT 1 FROM entities ' + ' WHERE eid=%s%s AND type=%%(to_etype)s)' + % (SQL_PREFIX, rdef.subject, SQL_PREFIX, rdef.rtype, + SQL_PREFIX, rdef.rtype), + {'to_etype': rdef.object.type}) elif lastrel: DropRelationTable(cnx, str(rschema)) + else: + cnx.system_sql('DELETE FROM %s_relation WHERE ' + 'EXISTS(SELECT 1 FROM entities ' + ' WHERE eid=eid_from AND type=%%(from_etype)s)' + ' AND EXISTS(SELECT 1 FROM entities ' + ' WHERE eid=eid_to AND type=%%(to_etype)s)' + % rschema, + {'from_etype': rdef.subject.type, 'to_etype': rdef.object.type}) # then update the in-memory schema if rdef.subject not in ETYPE_NAME_MAP and rdef.object not in ETYPE_NAME_MAP: rschema.del_relation_def(rdef.subject, rdef.object) @@ -647,8 +669,7 @@ if 'indexed' in self.values: syssource.update_rdef_indexed(cnx, rdef) self.indexed_changed = True - if 'cardinality' in self.values and (rdef.rtype.final or - rdef.rtype.inlined) \ + if 'cardinality' in self.values and rdef.rtype.final \ and self.values['cardinality'][0] != self.oldvalues['cardinality'][0]: syssource.update_rdef_null_allowed(self.cnx, rdef) self.null_allowed_changed = True @@ -717,8 +738,8 @@ syssource.update_rdef_unique(cnx, rdef) self.unique_changed = True if cstrtype in ('BoundaryConstraint', 'IntervalBoundConstraint', 'StaticVocabularyConstraint'): - cstrname = 'cstr' + md5(rdef.subject.type + rdef.rtype.type + cstrtype + - (self.oldcstr.serialize() or '')).hexdigest() + cstrname = 'cstr' + md5((rdef.subject.type + rdef.rtype.type + cstrtype + + (self.oldcstr.serialize() or '')).encode('utf-8')).hexdigest() cnx.system_sql('ALTER TABLE %s%s DROP CONSTRAINT %s' % (SQL_PREFIX, rdef.subject.type, cstrname)) def revertprecommit_event(self): @@ -749,7 +770,10 @@ return rdef = self.rdef = cnx.vreg.schema.schema_by_eid(rdefentity.eid) cstrtype = self.entity.type - oldcstr = self.oldcstr = rdef.constraint_by_type(cstrtype) + if cstrtype in UNIQUE_CONSTRAINTS: + oldcstr = self.oldcstr = rdef.constraint_by_type(cstrtype) + else: + oldcstr = None newcstr = self.newcstr = CONSTRAINTS[cstrtype].deserialize(self.entity.value) # in-place modification of in-memory schema first _set_modifiable_constraints(rdef) @@ -769,8 +793,8 @@ self.unique_changed = True if cstrtype in ('BoundaryConstraint', 'IntervalBoundConstraint', 'StaticVocabularyConstraint'): if oldcstr is not None: - oldcstrname = 'cstr' + md5(rdef.subject.type + rdef.rtype.type + cstrtype + - (self.oldcstr.serialize() or '')).hexdigest() + oldcstrname = 'cstr' + md5((rdef.subject.type + rdef.rtype.type + cstrtype + + (self.oldcstr.serialize() or '')).encode('ascii')).hexdigest() cnx.system_sql('ALTER TABLE %s%s DROP CONSTRAINT %s' % (SQL_PREFIX, rdef.subject.type, oldcstrname)) cstrname, check = y2sql.check_constraint(rdef.subject, rdef.object, rdef.rtype.type, @@ -905,11 +929,6 @@ # duh, schema not found, log error and skip operation self.warning('no schema for %s', self.eid) return - if isinstance(erschema, RelationSchema): # XXX 3.6 migration - return - if isinstance(erschema, RelationDefinitionSchema) and \ - self.action in ('delete', 'add'): # XXX 3.6.1 migration - return perms = list(erschema.action_permissions(self.action)) if self.group_eid is not None: perm = self.cnx.entity_from_eid(self.group_eid).name @@ -972,7 +991,6 @@ raise validation_error(self.entity, {None: _("can't be deleted")}) # delete every entities of this type if name not in ETYPE_NAME_MAP: - self._cw.execute('DELETE %s X' % name) MemSchemaCWETypeDel(self._cw, etype=name) DropTable(self._cw, table=SQL_PREFIX + name) @@ -1155,10 +1173,6 @@ else: rdeftype = 'CWRelation' pendingrdefs.add((subjschema, rschema, objschema)) - if not (cnx.deleted_in_transaction(subjschema.eid) or - cnx.deleted_in_transaction(objschema.eid)): - cnx.execute('DELETE X %s Y WHERE X is %s, Y is %s' - % (rschema, subjschema, objschema)) RDefDelOp(cnx, rdef=rdef) diff -r a4fcee1e9789 -r 19fcce6dc6d1 hooks/syncsession.py --- a/hooks/syncsession.py Thu Mar 24 09:43:25 2016 +0100 +++ b/hooks/syncsession.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,7 +18,7 @@ """Core hooks: synchronize living session on persistent data changes""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from cubicweb import UnknownProperty, BadConnectionId, validation_error from cubicweb.predicates import is_instance @@ -26,7 +26,7 @@ def get_user_sessions(repo, ueid): - for session in repo._sessions.itervalues(): + for session in repo._sessions.values(): if ueid == session.user.eid: yield session @@ -114,7 +114,7 @@ def __call__(self): """modify user permission, need to update users""" for session in get_user_sessions(self._cw.repo, self.entity.eid): - _DelUserOp(self._cw, session.id) + _DelUserOp(self._cw, session.sessionid) # CWProperty hooks ############################################################# diff -r a4fcee1e9789 -r 19fcce6dc6d1 hooks/syncsources.py --- a/hooks/syncsources.py Thu Mar 24 09:43:25 2016 +0100 +++ b/hooks/syncsources.py Thu Jun 16 14:19:20 2016 +0200 @@ -17,7 +17,7 @@ # with CubicWeb. If not, see . """hooks for repository sources synchronization""" -_ = unicode +from cubicweb import _ from socket import gethostname @@ -119,7 +119,7 @@ msg = _("You cannot rename the system source") raise validation_error(self.entity, {('name', 'subject'): msg}) SourceRenamedOp(self._cw, oldname=oldname, newname=newname) - if 'config' in self.entity.cw_edited: + if 'config' in self.entity.cw_edited or 'url' in self.entity.cw_edited: if self.entity.name == 'system' and self.entity.config: msg = _("Configuration of the system source goes to " "the 'sources' file, not in the database") diff -r a4fcee1e9789 -r 19fcce6dc6d1 hooks/test/data/hooks.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hooks/test/data/hooks.py Thu Jun 16 14:19:20 2016 +0200 @@ -0,0 +1,8 @@ +from cubicweb.predicates import is_instance +from cubicweb.hooks import notification + + +class FolderUpdateHook(notification.EntityUpdateHook): + __select__ = (notification.EntityUpdateHook.__select__ & + is_instance('Folder')) + order = 100 # late trigger so that metadata hooks come before. diff -r a4fcee1e9789 -r 19fcce6dc6d1 hooks/test/data/schema.py --- a/hooks/test/data/schema.py Thu Mar 24 09:43:25 2016 +0100 +++ b/hooks/test/data/schema.py Thu Jun 16 14:19:20 2016 +0200 @@ -22,7 +22,7 @@ from cubicweb.schema import ERQLExpression -_ = unicode +from cubicweb import _ class friend(RelationDefinition): subject = ('CWUser', 'CWGroup') diff -r a4fcee1e9789 -r 19fcce6dc6d1 hooks/test/unittest_hooks.py --- a/hooks/test/unittest_hooks.py Thu Mar 24 09:43:25 2016 +0100 +++ b/hooks/test/unittest_hooks.py Thu Jun 16 14:19:20 2016 +0200 @@ -24,9 +24,13 @@ from datetime import datetime +from six import text_type + +from pytz import utc from cubicweb import ValidationError, AuthenticationError, BadConnectionId from cubicweb.devtools.testlib import CubicWebTC + class CoreHooksTC(CubicWebTC): def test_inlined(self): @@ -112,7 +116,7 @@ def test_metadata_creation_modification_date(self): with self.admin_access.repo_cnx() as cnx: - _now = datetime.now() + _now = datetime.now(utc) entity = cnx.create_entity('Workflow', name=u'wf1') self.assertEqual((entity.creation_date - _now).seconds, 0) self.assertEqual((entity.modification_date - _now).seconds, 0) @@ -207,7 +211,7 @@ with self.assertRaises(ValidationError) as cm: cnx.execute('INSERT CWUser X: X login "admin"') ex = cm.exception - ex.translate(unicode) + ex.translate(text_type) self.assertIsInstance(ex.entity, int) self.assertEqual(ex.errors, {'login-subject': 'the value "admin" is already used, use another one'}) diff -r a4fcee1e9789 -r 19fcce6dc6d1 hooks/test/unittest_notification.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/hooks/test/unittest_notification.py Thu Jun 16 14:19:20 2016 +0200 @@ -0,0 +1,39 @@ +# copyright 2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""tests for notification hooks""" + +from cubicweb.devtools.testlib import CubicWebTC + + +class NotificationHooksTC(CubicWebTC): + + def test_entity_update(self): + """Check transaction_data['changes'] filled by "notifentityupdated" hook. + """ + with self.admin_access.repo_cnx() as cnx: + root = cnx.create_entity('Folder', name=u'a') + cnx.commit() + root.cw_set(name=u'b') + self.assertIn('changes', cnx.transaction_data) + self.assertEqual(cnx.transaction_data['changes'], + {root.eid: set([('name', u'a', u'b')])}) + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r a4fcee1e9789 -r 19fcce6dc6d1 hooks/test/unittest_synccomputed.py --- a/hooks/test/unittest_synccomputed.py Thu Mar 24 09:43:25 2016 +0100 +++ b/hooks/test/unittest_synccomputed.py Thu Jun 16 14:19:20 2016 +0200 @@ -62,7 +62,7 @@ def test_computed_attribute_by_relation(self): comp_by_rdef = self.dependencies.computed_attribute_by_relation self.assertEqual(len(comp_by_rdef), 1) - key, values = iter(comp_by_rdef.iteritems()).next() + key, values = next(iter(comp_by_rdef.items())) self.assertEqual(key.rtype, 'works_for') self.assertEqual(len(values), 1) self.assertEqual(values[0].rtype, 'total_salary') @@ -73,7 +73,7 @@ values = comp_by_attr['Person'] self.assertEqual(len(values), 2) values = set((rdef.formula, tuple(v)) - for rdef, v in values.iteritems()) + for rdef, v in values.items()) self.assertEquals(values, set((('Any 2014 - D WHERE X birth_year D', tuple(('birth_year',))), ('Any SUM(SA) GROUPBY X WHERE P works_for X, P salary SA', tuple(('salary',))))) diff -r a4fcee1e9789 -r 19fcce6dc6d1 hooks/test/unittest_syncschema.py --- a/hooks/test/unittest_syncschema.py Thu Mar 24 09:43:25 2016 +0100 +++ b/hooks/test/unittest_syncschema.py Thu Jun 16 14:19:20 2016 +0200 @@ -19,6 +19,7 @@ from logilab.common.testlib import unittest_main +from yams.constraints import BoundaryConstraint from cubicweb import ValidationError, Binary from cubicweb.schema import META_RTYPES from cubicweb.devtools import startpgcluster, stoppgcluster, PostgresApptestConfiguration @@ -382,5 +383,23 @@ self.assertEqual(cstr.values, (u'normal', u'auto', u'new')) cnx.execute('INSERT Transition T: T name "hop", T type "new"') + def test_add_constraint(self): + with self.admin_access.repo_cnx() as cnx: + rdef = self.schema['EmailPart'].rdef('ordernum') + cstr = BoundaryConstraint('>=', 0) + cnx.execute('INSERT CWConstraint X: X value %(v)s, X cstrtype CT, ' + 'EDEF constrained_by X WHERE CT name %(ct)s, EDEF eid %(x)s', + {'ct': cstr.__class__.__name__, 'v': cstr.serialize(), 'x': rdef.eid}) + cnx.commit() + cstr2 = rdef.constraint_by_type('BoundaryConstraint') + self.assertEqual(cstr, cstr2) + cstr3 = BoundaryConstraint('<=', 1000) + cnx.execute('INSERT CWConstraint X: X value %(v)s, X cstrtype CT, ' + 'EDEF constrained_by X WHERE CT name %(ct)s, EDEF eid %(x)s', + {'ct': cstr3.__class__.__name__, 'v': cstr3.serialize(), 'x': rdef.eid}) + cnx.commit() + self.assertCountEqual(rdef.constraints, [cstr, cstr3]) + + if __name__ == '__main__': unittest_main() diff -r a4fcee1e9789 -r 19fcce6dc6d1 hooks/test/unittest_syncsession.py --- a/hooks/test/unittest_syncsession.py Thu Mar 24 09:43:25 2016 +0100 +++ b/hooks/test/unittest_syncsession.py Thu Jun 16 14:19:20 2016 +0200 @@ -22,6 +22,8 @@ syncschema.py hooks are mostly tested in server/test/unittest_migrations.py """ +from six import text_type + from cubicweb import ValidationError from cubicweb.devtools.testlib import CubicWebTC @@ -32,13 +34,13 @@ with self.assertRaises(ValidationError) as cm: req.execute('INSERT CWProperty X: X pkey "bla.bla", ' 'X value "hop", X for_user U') - cm.exception.translate(unicode) + cm.exception.translate(text_type) self.assertEqual(cm.exception.errors, {'pkey-subject': 'unknown property key bla.bla'}) with self.assertRaises(ValidationError) as cm: req.execute('INSERT CWProperty X: X pkey "bla.bla", X value "hop"') - cm.exception.translate(unicode) + cm.exception.translate(text_type) self.assertEqual(cm.exception.errors, {'pkey-subject': 'unknown property key bla.bla'}) diff -r a4fcee1e9789 -r 19fcce6dc6d1 hooks/workflow.py --- a/hooks/workflow.py Thu Mar 24 09:43:25 2016 +0100 +++ b/hooks/workflow.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,7 +18,7 @@ """Core hooks: workflow related hooks""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from datetime import datetime @@ -320,7 +320,7 @@ return entity = self._cw.entity_from_eid(self.eidfrom) try: - entity.cw_set(modification_date=datetime.now()) + entity.cw_set(modification_date=datetime.utcnow()) except RepositoryError as ex: # usually occurs if entity is coming from a read-only source # (eg ldap user) @@ -355,4 +355,3 @@ typewf = entity.cw_adapt_to('IWorkflowable').cwetype_workflow() if typewf is not None: _WorkflowChangedOp(self._cw, eid=self.eidfrom, wfeid=typewf.eid) - diff -r a4fcee1e9789 -r 19fcce6dc6d1 hooks/zmq.py --- a/hooks/zmq.py Thu Mar 24 09:43:25 2016 +0100 +++ b/hooks/zmq.py Thu Jun 16 14:19:20 2016 +0200 @@ -48,5 +48,3 @@ for address in address_sub: self.repo.app_instances_bus.add_subscriber(address) self.repo.app_instances_bus.start() - - diff -r a4fcee1e9789 -r 19fcce6dc6d1 i18n.py --- a/i18n.py Thu Mar 24 09:43:25 2016 +0100 +++ b/i18n.py Thu Jun 16 14:19:20 2016 +0200 @@ -16,6 +16,7 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . """Some i18n/gettext utilities.""" +from __future__ import print_function __docformat__ = "restructuredtext en" @@ -24,6 +25,8 @@ from os.path import join, basename, splitext, exists from glob import glob +from six import PY2 + from cubicweb.toolsutils import create_dir def extract_from_tal(files, output_file): @@ -39,10 +42,10 @@ def add_msg(w, msgid, msgctx=None): """write an empty pot msgid definition""" - if isinstance(msgid, unicode): + if PY2 and isinstance(msgid, unicode): msgid = msgid.encode('utf-8') if msgctx: - if isinstance(msgctx, unicode): + if PY2 and isinstance(msgctx, unicode): msgctx = msgctx.encode('utf-8') w('msgctxt "%s"\n' % msgctx) msgid = msgid.replace('"', r'\"').splitlines() @@ -80,7 +83,7 @@ """ from subprocess import CalledProcessError from logilab.common.fileutils import ensure_fs_mode - print '-> compiling message catalogs to %s' % destdir + print('-> compiling message catalogs to %s' % destdir) errors = [] for lang in langs: langdir = join(destdir, lang, 'LC_MESSAGES') diff -r a4fcee1e9789 -r 19fcce6dc6d1 jshintrc --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/jshintrc Thu Jun 16 14:19:20 2016 +0200 @@ -0,0 +1,115 @@ +{ + // -------------------------------------------------------------------- + // JSHint Configuration, Strict Edition + // -------------------------------------------------------------------- + // + // This is a options template for [JSHint][1], using [JSHint example][2] + // and [Ory Band's example][3] as basis and setting config values to + // be most strict: + // + // * set all enforcing options to true + // * set all relaxing options to false + // * set all environment options to false, except the browser value + // * set all JSLint legacy options to false + // + // [1]: http://www.jshint.com/ + // [2]: https://github.com/jshint/node-jshint/blob/master/example/config.json + // [3]: https://github.com/oryband/dotfiles/blob/master/jshintrc + // + // @author http://michael.haschke.biz/ + // @license http://unlicense.org/ + + // == Enforcing Options =============================================== + // + // These options tell JSHint to be more strict towards your code. Use + // them if you want to allow only a safe subset of JavaScript, very + // useful when your codebase is shared with a big number of developers + // with different skill levels. + + "bitwise" : true, // Prohibit bitwise operators (&, |, ^, etc.). + "curly" : true, // Require {} for every new block or scope. + "eqeqeq" : true, // Require triple equals i.e. `===`. + "forin" : true, // Tolerate `for in` loops without `hasOwnPrototype`. + "immed" : true, // Require immediate invocations to be wrapped in parens e.g. `( function(){}() );` + "latedef" : true, // Prohibit variable use before definition. + "newcap" : true, // Require capitalization of all constructor functions e.g. `new F()`. + "noarg" : true, // Prohibit use of `arguments.caller` and `arguments.callee`. + "noempty" : true, // Prohibit use of empty blocks. + "nonew" : true, // Prohibit use of constructors for side-effects. + "plusplus" : true, // Prohibit use of `++` & `--`. + "regexp" : true, // Prohibit `.` and `[^...]` in regular expressions. + "undef" : true, // Require all non-global variables be declared before they are used. + "strict" : true, // Require `use strict` pragma in every file. + "trailing" : true, // Prohibit trailing whitespaces. + + // == Relaxing Options ================================================ + // + // These options allow you to suppress certain types of warnings. Use + // them only if you are absolutely positive that you know what you are + // doing. + + "asi" : false, // Tolerate Automatic Semicolon Insertion (no semicolons). + "boss" : false, // Tolerate assignments inside if, for & while. Usually conditions & loops are for comparison, not assignments. + "debug" : false, // Allow debugger statements e.g. browser breakpoints. + "eqnull" : false, // Tolerate use of `== null`. + "es5" : false, // Allow EcmaScript 5 syntax. + "esnext" : false, // Allow ES.next specific features such as `const` and `let`. + "evil" : false, // Tolerate use of `eval`. + "expr" : false, // Tolerate `ExpressionStatement` as Programs. + "funcscope" : false, // Tolerate declarations of variables inside of control structures while accessing them later from the outside. + "globalstrict" : false, // Allow global "use strict" (also enables 'strict'). + "iterator" : false, // Allow usage of __iterator__ property. + "lastsemic" : false, // Tolerat missing semicolons when the it is omitted for the last statement in a one-line block. + "laxbreak" : false, // Tolerate unsafe line breaks e.g. `return [\n] x` without semicolons. + "laxcomma" : false, // Suppress warnings about comma-first coding style. + "loopfunc" : false, // Allow functions to be defined within loops. + "multistr" : false, // Tolerate multi-line strings. + "onecase" : false, // Tolerate switches with just one case. + "proto" : false, // Tolerate __proto__ property. This property is deprecated. + "regexdash" : false, // Tolerate unescaped last dash i.e. `[-...]`. + "scripturl" : false, // Tolerate script-targeted URLs. + "smarttabs" : false, // Tolerate mixed tabs and spaces when the latter are used for alignmnent only. + "shadow" : false, // Allows re-define variables later in code e.g. `var x=1; x=2;`. + "sub" : false, // Tolerate all forms of subscript notation besides dot notation e.g. `dict['key']` instead of `dict.key`. + "supernew" : false, // Tolerate `new function () { ... };` and `new Object;`. + "validthis" : false, // Tolerate strict violations when the code is running in strict mode and you use this in a non-constructor function. + + // == Environments ==================================================== + // + // These options pre-define global variables that are exposed by + // popular JavaScript libraries and runtime environments—such as + // browser or node.js. + + "browser" : true, // Standard browser globals e.g. `window`, `document`. + "couch" : false, // Enable globals exposed by CouchDB. + "devel" : false, // Allow development statements e.g. `console.log();`. + "dojo" : false, // Enable globals exposed by Dojo Toolkit. + "jquery" : false, // Enable globals exposed by jQuery JavaScript library. + "mootools" : false, // Enable globals exposed by MooTools JavaScript framework. + "node" : false, // Enable globals available when code is running inside of the NodeJS runtime environment. + "nonstandard" : false, // Define non-standard but widely adopted globals such as escape and unescape. + "prototypejs" : false, // Enable globals exposed by Prototype JavaScript framework. + "rhino" : false, // Enable globals available when your code is running inside of the Rhino runtime environment. + "wsh" : false, // Enable globals available when your code is running as a script for the Windows Script Host. + + // == JSLint Legacy =================================================== + // + // These options are legacy from JSLint. Aside from bug fixes they will + // not be improved in any way and might be removed at any point. + + "nomen" : false, // Prohibit use of initial or trailing underbars in names. + "onevar" : false, // Allow only one `var` statement per function. + "passfail" : false, // Stop on first error. + "white" : false, // Check against strict whitespace and indentation rules. + + // == Undocumented Options ============================================ + // + // While I've found these options in [example1][2] and [example2][3] + // they are not described in the [JSHint Options documentation][4]. + // + // [4]: http://www.jshint.com/options/ + + "maxerr" : 100, // Maximum errors before stopping. + "predef" : [], // Extra globals. + "indent" : 4 // Specify indentation spacing +} diff -r a4fcee1e9789 -r 19fcce6dc6d1 mail.py --- a/mail.py Thu Mar 24 09:43:25 2016 +0100 +++ b/mail.py Thu Jun 16 14:19:20 2016 +0200 @@ -28,16 +28,27 @@ from email.utils import formatdate from socket import gethostname +from six import PY2, PY3, text_type + + def header(ustring): + if PY3: + return Header(ustring, 'utf-8') return Header(ustring.encode('UTF-8'), 'UTF-8') def addrheader(uaddr, uname=None): # even if an email address should be ascii, encode it using utf8 since # automatic tests may generate non ascii email address - addr = uaddr.encode('UTF-8') + if PY2: + addr = uaddr.encode('UTF-8') + else: + addr = uaddr if uname: - return '%s <%s>' % (header(uname).encode(), addr) - return addr + val = '%s <%s>' % (header(uname).encode(), addr) + else: + val = addr + assert isinstance(val, str) # bytes in py2, ascii-encoded unicode in py3 + return val def construct_message_id(appid, eid, withtimestamp=True): @@ -46,7 +57,7 @@ else: addrpart = 'eid=%s' % eid # we don't want any equal sign nor trailing newlines - leftpart = b64encode(addrpart, '.-').rstrip().rstrip('=') + leftpart = b64encode(addrpart.encode('ascii'), b'.-').decode('ascii').rstrip().rstrip('=') return '<%s@%s.%s>' % (leftpart, appid, gethostname()) @@ -58,7 +69,7 @@ try: values, qualif = msgid.split('@') padding = len(values) % 4 - values = b64decode(str(values + '='*padding), '.-') + values = b64decode(str(values + '='*padding), '.-').decode('ascii') values = dict(v.split('=') for v in values.split('&')) fromappid, host = qualif.split('.', 1) except Exception: @@ -75,7 +86,7 @@ to_addrs and cc_addrs are expected to be a list of email address without name """ - assert type(content) is unicode, repr(content) + assert isinstance(content, text_type), repr(content) msg = MIMEText(content.encode('UTF-8'), 'plain', 'UTF-8') # safety: keep only the first newline try: @@ -86,13 +97,13 @@ if uinfo.get('email'): email = uinfo['email'] elif config and config['sender-addr']: - email = unicode(config['sender-addr']) + email = text_type(config['sender-addr']) else: email = u'' if uinfo.get('name'): name = uinfo['name'] elif config and config['sender-name']: - name = unicode(config['sender-name']) + name = text_type(config['sender-name']) else: name = u'' msg['From'] = addrheader(email, name) diff -r a4fcee1e9789 -r 19fcce6dc6d1 md5crypt.py --- a/md5crypt.py Thu Mar 24 09:43:25 2016 +0100 +++ b/md5crypt.py Thu Jun 16 14:19:20 2016 +0200 @@ -38,31 +38,37 @@ this stuff is worth it, you can buy me a beer in return. Poul-Henning Kamp """ -MAGIC = '$1$' # Magic string -ITOA64 = "./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" +MAGIC = b'$1$' # Magic string +ITOA64 = b"./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" from hashlib import md5 # pylint: disable=E0611 +from six import text_type, indexbytes +from six.moves import range + + def to64 (v, n): - ret = '' + ret = bytearray() while (n - 1 >= 0): n = n - 1 - ret = ret + ITOA64[v & 0x3f] + ret.append(ITOA64[v & 0x3f]) v = v >> 6 return ret def crypt(pw, salt): - if isinstance(pw, unicode): + if isinstance(pw, text_type): pw = pw.encode('utf-8') + if isinstance(salt, text_type): + salt = salt.encode('ascii') # Take care of the magic string if present if salt.startswith(MAGIC): salt = salt[len(MAGIC):] # salt can have up to 8 characters: - salt = salt.split('$', 1)[0] + salt = salt.split(b'$', 1)[0] salt = salt[:8] ctx = pw + MAGIC + salt final = md5(pw + salt + pw).digest() - for pl in xrange(len(pw), 0, -16): + for pl in range(len(pw), 0, -16): if pl > 16: ctx = ctx + final[:16] else: @@ -71,7 +77,7 @@ i = len(pw) while i: if i & 1: - ctx = ctx + chr(0) #if ($i & 1) { $ctx->add(pack("C", 0)); } + ctx = ctx + b'\0' #if ($i & 1) { $ctx->add(pack("C", 0)); } else: ctx = ctx + pw[0] i = i >> 1 @@ -79,8 +85,8 @@ # The following is supposed to make # things run slower. # my question: WTF??? - for i in xrange(1000): - ctx1 = '' + for i in range(1000): + ctx1 = b'' if i & 1: ctx1 = ctx1 + pw else: @@ -95,21 +101,21 @@ ctx1 = ctx1 + pw final = md5(ctx1).digest() # Final xform - passwd = '' - passwd = passwd + to64((int(ord(final[0])) << 16) - |(int(ord(final[6])) << 8) - |(int(ord(final[12]))),4) - passwd = passwd + to64((int(ord(final[1])) << 16) - |(int(ord(final[7])) << 8) - |(int(ord(final[13]))), 4) - passwd = passwd + to64((int(ord(final[2])) << 16) - |(int(ord(final[8])) << 8) - |(int(ord(final[14]))), 4) - passwd = passwd + to64((int(ord(final[3])) << 16) - |(int(ord(final[9])) << 8) - |(int(ord(final[15]))), 4) - passwd = passwd + to64((int(ord(final[4])) << 16) - |(int(ord(final[10])) << 8) - |(int(ord(final[5]))), 4) - passwd = passwd + to64((int(ord(final[11]))), 2) + passwd = b'' + passwd += to64((indexbytes(final, 0) << 16) + |(indexbytes(final, 6) << 8) + |(indexbytes(final, 12)),4) + passwd += to64((indexbytes(final, 1) << 16) + |(indexbytes(final, 7) << 8) + |(indexbytes(final, 13)), 4) + passwd += to64((indexbytes(final, 2) << 16) + |(indexbytes(final, 8) << 8) + |(indexbytes(final, 14)), 4) + passwd += to64((indexbytes(final, 3) << 16) + |(indexbytes(final, 9) << 8) + |(indexbytes(final, 15)), 4) + passwd += to64((indexbytes(final, 4) << 16) + |(indexbytes(final, 10) << 8) + |(indexbytes(final, 5)), 4) + passwd += to64((indexbytes(final, 11)), 2) return passwd diff -r a4fcee1e9789 -r 19fcce6dc6d1 migration.py --- a/migration.py Thu Mar 24 09:43:25 2016 +0100 +++ b/migration.py Thu Jun 16 14:19:20 2016 +0200 @@ -16,6 +16,7 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . """utilities for instances migration""" +from __future__ import print_function __docformat__ = "restructuredtext en" @@ -25,6 +26,9 @@ import tempfile from os.path import exists, join, basename, splitext from itertools import chain +from warnings import warn + +from six import string_types from logilab.common import IGNORED_EXTENSIONS from logilab.common.decorators import cached @@ -49,7 +53,7 @@ assert fromversion <= toversion, (fromversion, toversion) if not exists(directory): if not quiet: - print directory, "doesn't exists, no migration path" + print(directory, "doesn't exists, no migration path") return [] if fromversion == toversion: return [] @@ -93,9 +97,9 @@ stream = open(scriptpath) scriptcontent = stream.read() stream.close() - print - print scriptcontent - print + print() + print(scriptcontent) + print() else: return True @@ -139,9 +143,6 @@ raise raise AttributeError(name) - def repo_connect(self): - return self.config.repository() - def migrate(self, vcconf, toupgrade, options): """upgrade the given set of cubes @@ -243,7 +244,7 @@ # avoid '_' to be added to builtins by sys.display_hook def do_not_add___to_builtins(obj): if obj is not None: - print repr(obj) + print(repr(obj)) sys.displayhook = do_not_add___to_builtins local_ctx = self._create_context() try: @@ -349,7 +350,16 @@ else: pyname = splitext(basename(migrscript))[0] scriptlocals['__name__'] = pyname - execfile(migrscript, scriptlocals) + with open(migrscript, 'rb') as fobj: + fcontent = fobj.read() + try: + code = compile(fcontent, migrscript, 'exec') + except SyntaxError: + # try without print_function + code = compile(fcontent, migrscript, 'exec', 0, True) + warn('[3.22] script %r should be updated to work with print_function' + % migrscript, DeprecationWarning) + exec(code, scriptlocals) if funcname is not None: try: func = scriptlocals[funcname] @@ -399,7 +409,7 @@ """modify the list of used cubes in the in-memory config returns newly inserted cubes, including dependencies """ - if isinstance(cubes, basestring): + if isinstance(cubes, string_types): cubes = (cubes,) origcubes = self.config.cubes() newcubes = [p for p in self.config.expand_cubes(cubes) @@ -454,6 +464,10 @@ def version_strictly_lower(a, b): + if a is None: + return True + if b is None: + return False if a: a = Version(a) if b: @@ -491,8 +505,8 @@ self.dependencies[cube] = dict(self.config.cube_dependencies(cube)) self.dependencies[cube]['cubicweb'] = self.config.cube_depends_cubicweb_version(cube) # compute reverse dependencies - for cube, dependencies in self.dependencies.iteritems(): - for name, constraint in dependencies.iteritems(): + for cube, dependencies in self.dependencies.items(): + for name, constraint in dependencies.items(): self.reverse_dependencies.setdefault(name,set()) if constraint: try: @@ -522,9 +536,9 @@ elif op == None: continue else: - print ('unable to handle %s in %s, set to `%s %s` ' - 'but currently up to `%s %s`' % - (cube, source, oper, version, op, ver)) + print('unable to handle %s in %s, set to `%s %s` ' + 'but currently up to `%s %s`' % + (cube, source, oper, version, op, ver)) # "solve" constraint satisfaction problem if cube not in self.cubes: self.errors.append( ('add', cube, version, source) ) @@ -536,4 +550,4 @@ elif oper is None: pass # no constraint on version else: - print 'unknown operator', oper + print('unknown operator', oper) diff -r a4fcee1e9789 -r 19fcce6dc6d1 misc/cwfs/cwfs.py --- a/misc/cwfs/cwfs.py Thu Mar 24 09:43:25 2016 +0100 +++ b/misc/cwfs/cwfs.py Thu Jun 16 14:19:20 2016 +0200 @@ -80,17 +80,17 @@ self._restrictions = [] def parse(self) : - self._entity = self._components.next() + self._entity = next(self._components) try: self.process_entity() except StopIteration : pass def process_entity(self) : - _next = self._components.next() + _next = next(self._components) if _next in self.schema.get_attrs(self._entity) : self._attr = _next - _next = self._components.next() + _next = next(self._components) self._restrictions.append( (self._entity, self._attr, _next) ) self._attr = None self._rel = None @@ -136,7 +136,7 @@ def parse(self): self._var = self._alphabet.pop(0) - self._e_type = self._components.next() + self._e_type = next(self._components) e_type = self._e_type.capitalize() self._restrictions.append('%s is %s' % (self._var, e_type)) try: @@ -146,11 +146,11 @@ return 'Any %s WHERE %s' % (self._var, ', '.join(self._restrictions)) def process_entity(self) : - _next = self._components.next() + _next = next(self._components) if _next in self.schema.get_attrs(self._e_type) : attr = _next try: - _next = self._components.next() + _next = next(self._components) self._restrictions.append('%s %s %s' % (self._var, attr, _next)) except StopIteration: a_var = self._alphabet.pop(0) @@ -163,7 +163,7 @@ self._restrictions.append('%s %s %s' % (self._var, rel, r_var)) self._var = r_var try: - _next = self._components.next() + _next = next(self._components) self._restrictions.append('%s is %s' % (r_var, _next.capitalize())) except StopIteration: raise @@ -173,4 +173,3 @@ def to_rql(path) : p = SytPathParser(SCHEMA,path) return p.parse() - diff -r a4fcee1e9789 -r 19fcce6dc6d1 misc/cwfs/cwfs_test.py --- a/misc/cwfs/cwfs_test.py Thu Mar 24 09:43:25 2016 +0100 +++ b/misc/cwfs/cwfs_test.py Thu Jun 16 14:19:20 2016 +0200 @@ -30,7 +30,7 @@ sections = [] buffer = "" in_section = False - for line in file(filename) : + for line in open(filename) : if line.startswith('Test::'): in_section = True buffer = "" diff -r a4fcee1e9789 -r 19fcce6dc6d1 misc/cwzope/cwzope.py --- a/misc/cwzope/cwzope.py Thu Mar 24 09:43:25 2016 +0100 +++ b/misc/cwzope/cwzope.py Thu Jun 16 14:19:20 2016 +0200 @@ -48,4 +48,3 @@ cnx = connect(user, password, host, database, group) CNX_CACHE[key] = cnx return cnx - diff -r a4fcee1e9789 -r 19fcce6dc6d1 misc/migration/3.10.0_Any.py --- a/misc/migration/3.10.0_Any.py Thu Mar 24 09:43:25 2016 +0100 +++ b/misc/migration/3.10.0_Any.py Thu Jun 16 14:19:20 2016 +0200 @@ -1,3 +1,4 @@ +from six import text_type for uri, cfg in config.read_sources_file().items(): if uri in ('system', 'admin'): @@ -23,7 +24,7 @@ repo.sources_by_uri.pop(uri) config = u'\n'.join('%s=%s' % (key, value) for key, value in cfg.items() if key != 'adapter' and value is not None) - create_entity('CWSource', name=unicode(uri), type=unicode(cfg['adapter']), + create_entity('CWSource', name=text_type(uri), type=text_type(cfg['adapter']), config=config) commit() @@ -32,4 +33,3 @@ 'X pkey ~= "boxes.%" OR ' 'X pkey ~= "contentnavigation.%"').entities(): x.cw_set(pkey=u'ctxcomponents.' + x.pkey.split('.', 1)[1]) - diff -r a4fcee1e9789 -r 19fcce6dc6d1 misc/migration/3.14.0_Any.py --- a/misc/migration/3.14.0_Any.py Thu Mar 24 09:43:25 2016 +0100 +++ b/misc/migration/3.14.0_Any.py Thu Jun 16 14:19:20 2016 +0200 @@ -1,3 +1,5 @@ +from __future__ import print_function + config['rql-cache-size'] = config['rql-cache-size'] * 10 add_entity_type('CWDataImport') @@ -10,4 +12,4 @@ mainvars = guess_rrqlexpr_mainvars(expression) yamscstr = CONSTRAINTS[rqlcstr.type](expression, mainvars) rqlcstr.cw_set(value=yamscstr.serialize()) - print 'updated', rqlcstr.type, rqlcstr.value.strip() + print('updated', rqlcstr.type, rqlcstr.value.strip()) diff -r a4fcee1e9789 -r 19fcce6dc6d1 misc/migration/3.15.4_Any.py --- a/misc/migration/3.15.4_Any.py Thu Mar 24 09:43:25 2016 +0100 +++ b/misc/migration/3.15.4_Any.py Thu Jun 16 14:19:20 2016 +0200 @@ -1,3 +1,5 @@ +from __future__ import print_function + from logilab.common.shellutils import generate_password from cubicweb.server.utils import crypt_password @@ -5,7 +7,7 @@ salt = user.upassword.getvalue() if crypt_password('', salt) == salt: passwd = generate_password() - print 'setting random password for user %s' % user.login + print('setting random password for user %s' % user.login) user.set_attributes(upassword=passwd) commit() diff -r a4fcee1e9789 -r 19fcce6dc6d1 misc/migration/3.21.0_Any.py --- a/misc/migration/3.21.0_Any.py Thu Mar 24 09:43:25 2016 +0100 +++ b/misc/migration/3.21.0_Any.py Thu Jun 16 14:19:20 2016 +0200 @@ -1,3 +1,5 @@ +from __future__ import print_function + from cubicweb.schema import PURE_VIRTUAL_RTYPES from cubicweb.server.schema2sql import rschema_has_table @@ -27,7 +29,7 @@ ' SELECT eid FROM entities) AS eids' % args, ask_confirm=False)[0][0] if count: - print '%s references %d unknown entities, deleting' % (rschema, count) + print('%s references %d unknown entities, deleting' % (rschema, count)) sql('DELETE FROM %(r)s_relation ' 'WHERE eid_from IN (SELECT eid_from FROM %(r)s_relation EXCEPT SELECT eid FROM entities)' % args) sql('DELETE FROM %(r)s_relation ' @@ -65,14 +67,14 @@ broken_eids = sql('SELECT cw_eid FROM cw_%(e)s WHERE cw_%(r)s IS NULL' % args, ask_confirm=False) if broken_eids: - print 'Required relation %(e)s.%(r)s missing' % args + print('Required relation %(e)s.%(r)s missing' % args) args['eids'] = ', '.join(str(eid) for eid, in broken_eids) rql('DELETE %(e)s X WHERE X eid IN (%(eids)s)' % args) broken_eids = sql('SELECT cw_eid FROM cw_%(e)s WHERE cw_%(r)s IN (SELECT cw_%(r)s FROM cw_%(e)s ' 'EXCEPT SELECT eid FROM entities)' % args, ask_confirm=False) if broken_eids: - print 'Required relation %(e)s.%(r)s references unknown objects, deleting subject entities' % args + print('Required relation %(e)s.%(r)s references unknown objects, deleting subject entities' % args) args['eids'] = ', '.join(str(eid) for eid, in broken_eids) rql('DELETE %(e)s X WHERE X eid IN (%(eids)s)' % args) else: @@ -81,7 +83,7 @@ ' EXCEPT' ' SELECT eid FROM entities) AS eids' % args, ask_confirm=False)[0][0]: - print '%(e)s.%(r)s references unknown entities, deleting relation' % args + print('%(e)s.%(r)s references unknown entities, deleting relation' % args) sql('UPDATE cw_%(e)s SET cw_%(r)s = NULL WHERE cw_%(r)s IS NOT NULL AND cw_%(r)s IN ' '(SELECT cw_%(r)s FROM cw_%(e)s EXCEPT SELECT eid FROM entities)' % args) @@ -104,7 +106,7 @@ ' EXCEPT' ' SELECT eid FROM entities) AS eids' % args, ask_confirm=False)[0][0]: - print '%(e)s has nonexistent entities, deleting' % args + print('%(e)s has nonexistent entities, deleting' % args) sql('DELETE FROM cw_%(e)s WHERE cw_eid IN ' '(SELECT cw_eid FROM cw_%(e)s EXCEPT SELECT eid FROM entities)' % args) args['c'] = 'cw_%(e)s_cw_eid_fkey' % args @@ -164,9 +166,9 @@ cstr, helper, prefix='cw_') args = {'e': rdef.subject.type, 'c': cstrname, 'v': check} if repo.system_source.dbdriver == 'postgres': - sql('ALTER TABLE cw_%(e)s DROP CONSTRAINT IF EXISTS %(c)s' % args) + sql('ALTER TABLE cw_%(e)s DROP CONSTRAINT IF EXISTS %(c)s' % args, ask_confirm=False) elif repo.system_source.dbdriver.startswith('sqlserver'): sql("IF OBJECT_ID('%(c)s', 'C') IS NOT NULL " - "ALTER TABLE cw_%(e)s DROP CONSTRAINT %(c)s" % args) - sql('ALTER TABLE cw_%(e)s ADD CONSTRAINT %(c)s CHECK(%(v)s)' % args) + "ALTER TABLE cw_%(e)s DROP CONSTRAINT %(c)s" % args, ask_confirm=False) + sql('ALTER TABLE cw_%(e)s ADD CONSTRAINT %(c)s CHECK(%(v)s)' % args, ask_confirm=False) commit() diff -r a4fcee1e9789 -r 19fcce6dc6d1 misc/migration/3.22.0_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/misc/migration/3.22.0_Any.py Thu Jun 16 14:19:20 2016 +0200 @@ -0,0 +1,21 @@ +if confirm('use Europe/Paris as timezone?'): + timezone = 'Europe/Paris' +else: + import pytz + while True: + timezone = raw_input('enter your timezone') + if timezone in pytz.common_timezones: + break + +dbdriver = repo.system_source.dbdriver +if dbdriver == 'postgres': + sql("SET TIME ZONE '%s'" % timezone) + +for entity in schema.entities(): + if entity.final or entity.type not in fsschema: + continue + change_attribute_type(entity.type, 'creation_date', 'TZDatetime', ask_confirm=False) + change_attribute_type(entity.type, 'modification_date', 'TZDatetime', ask_confirm=False) + +if dbdriver == 'postgres': + sql("SET TIME ZONE UTC") diff -r a4fcee1e9789 -r 19fcce6dc6d1 misc/migration/3.22.1_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/misc/migration/3.22.1_Any.py Thu Jun 16 14:19:20 2016 +0200 @@ -0,0 +1,12 @@ +from os import unlink +from os.path import isfile, join +from cubicweb.cwconfig import CubicWebConfiguration as cwcfg + +regdir = cwcfg.instances_dir() + +if isfile(join(regdir, 'startorder')): + if confirm('The startorder file is not used anymore in Cubicweb 3.22. ' + 'Should I delete it?', + shell=False, pdb=False): + unlink(join(regdir, 'startorder')) + diff -r a4fcee1e9789 -r 19fcce6dc6d1 misc/migration/3.8.5_Any.py --- a/misc/migration/3.8.5_Any.py Thu Mar 24 09:43:25 2016 +0100 +++ b/misc/migration/3.8.5_Any.py Thu Jun 16 14:19:20 2016 +0200 @@ -1,3 +1,5 @@ +from __future__ import print_function + def migrate_varchar_to_nvarchar(): dbdriver = config.system_source_config['db-driver'] if dbdriver != "sqlserver2005": @@ -52,7 +54,7 @@ for statement in generated_statements: - print statement + print(statement) sql(statement, ask_confirm=False) commit() diff -r a4fcee1e9789 -r 19fcce6dc6d1 misc/migration/bootstrapmigration_repository.py --- a/misc/migration/bootstrapmigration_repository.py Thu Mar 24 09:43:25 2016 +0100 +++ b/misc/migration/bootstrapmigration_repository.py Thu Jun 16 14:19:20 2016 +0200 @@ -19,6 +19,9 @@ it should only include low level schema changes """ +from __future__ import print_function + +from six import text_type from cubicweb import ConfigurationError from cubicweb.server.session import hooks_control @@ -77,8 +80,8 @@ sql('ALTER TABLE "entities" DROP COLUMN "mtime"') sql('ALTER TABLE "entities" DROP COLUMN "source"') except: # programming error, already migrated - print "Failed to drop mtime or source database columns" - print "'entities' table of the database has probably been already updated" + print("Failed to drop mtime or source database columns") + print("'entities' table of the database has probably been already updated") commit() @@ -101,7 +104,7 @@ driver = config.system_source_config['db-driver'] if not (driver == 'postgres' or driver.startswith('sqlserver')): import sys - print >>sys.stderr, 'This migration is not supported for backends other than sqlserver or postgres (yet).' + print('This migration is not supported for backends other than sqlserver or postgres (yet).', file=sys.stderr) sys.exit(1) add_relation_definition('CWAttribute', 'add_permission', 'CWGroup') @@ -148,7 +151,7 @@ default = yams.DATE_FACTORY_MAP[atype](default) else: assert atype == 'String', atype - default = unicode(default) + default = text_type(default) return Binary.zpickle(default) dbh = repo.system_source.dbhelper @@ -196,7 +199,7 @@ (rschema.type, ','.join(subjects)))) if martians: martians = ','.join(martians) - print 'deleting broken relations %s for eids %s' % (rschema.type, martians) + print('deleting broken relations %s for eids %s' % (rschema.type, martians)) sql('DELETE FROM %s_relation WHERE eid_from IN (%s) OR eid_to IN (%s)' % (rschema.type, martians, martians)) with session.deny_all_hooks_but(): rql('SET X %(r)s Y WHERE Y %(r)s X, NOT X %(r)s Y' % {'r': rschema.type}) @@ -219,20 +222,20 @@ if driver == 'postgres': for indexname, in sql('select indexname from pg_indexes'): if indexname.startswith('unique_'): - print 'dropping index', indexname + print('dropping index', indexname) sql('DROP INDEX %s' % indexname) commit() elif driver.startswith('sqlserver'): for viewname, in sql('select name from sys.views'): if viewname.startswith('utv_'): - print 'dropping view (index should be cascade-deleted)', viewname + print('dropping view (index should be cascade-deleted)', viewname) sql('DROP VIEW %s' % viewname) commit() # recreate the constraints, hook will lead to low-level recreation for eschema in sorted(schema.entities()): if eschema._unique_together: - print 'recreate unique indexes for', eschema + print('recreate unique indexes for', eschema) rql_args = schemaserial.uniquetogether2rqls(eschema) for rql, args in rql_args: args['x'] = eschema.eid @@ -243,10 +246,10 @@ for rschema in sorted(schema.relations()): if rschema.final: if rschema.type in fsschema: - print 'sync perms for', rschema.type + print('sync perms for', rschema.type) sync_schema_props_perms(rschema.type, syncprops=False, ask_confirm=False, commit=False) else: - print 'WARNING: attribute %s missing from fs schema' % rschema.type + print('WARNING: attribute %s missing from fs schema' % rschema.type) commit() if applcubicwebversion < (3, 17, 0) and cubicwebversion >= (3, 17, 0): @@ -298,7 +301,7 @@ with hooks_control(session, session.HOOKS_ALLOW_ALL, 'integrity'): for rschema in repo.schema.relations(): rpermsdict = permsdict.get(rschema.eid, {}) - for rdef in rschema.rdefs.itervalues(): + for rdef in rschema.rdefs.values(): for action in rdef.ACTIONS: actperms = [] for something in rpermsdict.get(action == 'update' and 'add' or action, ()): diff -r a4fcee1e9789 -r 19fcce6dc6d1 misc/migration/postcreate.py --- a/misc/migration/postcreate.py Thu Mar 24 09:43:25 2016 +0100 +++ b/misc/migration/postcreate.py Thu Jun 16 14:19:20 2016 +0200 @@ -16,22 +16,28 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . """cubicweb post creation script, set user's workflow""" +from __future__ import print_function + +from six import text_type + +from cubicweb import _ + # insert versions create_entity('CWProperty', pkey=u'system.version.cubicweb', - value=unicode(config.cubicweb_version())) + value=text_type(config.cubicweb_version())) for cube in config.cubes(): create_entity('CWProperty', pkey=u'system.version.%s' % cube.lower(), - value=unicode(config.cube_version(cube))) + value=text_type(config.cube_version(cube))) -# some entities have been added before schema entities, fix the 'is' and +# some entities have been added before schema entities, add their missing 'is' and # 'is_instance_of' relations for rtype in ('is', 'is_instance_of'): sql('INSERT INTO %s_relation ' 'SELECT X.eid, ET.cw_eid FROM entities as X, cw_CWEType as ET ' 'WHERE X.type=ET.cw_name AND NOT EXISTS(' - ' SELECT 1 from is_relation ' - ' WHERE eid_from=X.eid AND eid_to=ET.cw_eid)' % rtype) + ' SELECT 1 from %s_relation ' + ' WHERE eid_from=X.eid AND eid_to=ET.cw_eid)' % (rtype, rtype)) # user workflow userwf = add_workflow(_('default user workflow'), 'CWUser') @@ -46,11 +52,11 @@ if hasattr(config, 'anonymous_user'): anonlogin, anonpwd = config.anonymous_user() if anonlogin == session.user.login: - print 'you are using a manager account as anonymous user.' - print 'Hopefully this is not a production instance...' + print('you are using a manager account as anonymous user.') + print('Hopefully this is not a production instance...') elif anonlogin: from cubicweb.server import create_user - create_user(session, unicode(anonlogin), anonpwd, u'guests') + create_user(session, text_type(anonlogin), anonpwd, u'guests') # need this since we already have at least one user in the database (the default admin) for user in rql('Any X WHERE X is CWUser').entities(): diff -r a4fcee1e9789 -r 19fcce6dc6d1 misc/scripts/cwuser_ldap2system.py --- a/misc/scripts/cwuser_ldap2system.py Thu Mar 24 09:43:25 2016 +0100 +++ b/misc/scripts/cwuser_ldap2system.py Thu Jun 16 14:19:20 2016 +0200 @@ -1,3 +1,5 @@ +from __future__ import print_function + import base64 from cubicweb.server.utils import crypt_password @@ -20,10 +22,10 @@ rset = sql("SELECT eid,type,source,extid,mtime FROM entities WHERE source!='system'", ask_confirm=False) for eid, type, source, extid, mtime in rset: if type != 'CWUser': - print "don't know what to do with entity type", type + print("don't know what to do with entity type", type) continue if not source.lower().startswith('ldap'): - print "don't know what to do with source type", source + print("don't know what to do with source type", source) continue extid = base64.decodestring(extid) ldapinfos = [x.strip().split('=') for x in extid.split(',')] @@ -33,7 +35,7 @@ args = dict(eid=eid, type=type, source=source, login=login, firstname=firstname, surname=surname, mtime=mtime, pwd=dbhelper.binary_value(crypt_password('toto'))) - print args + print(args) sql(insert, args) sql(update, args) diff -r a4fcee1e9789 -r 19fcce6dc6d1 misc/scripts/detect_cycle.py --- a/misc/scripts/detect_cycle.py Thu Mar 24 09:43:25 2016 +0100 +++ b/misc/scripts/detect_cycle.py Thu Jun 16 14:19:20 2016 +0200 @@ -1,9 +1,10 @@ +from __future__ import print_function try: rtype, = __args__ except ValueError: - print 'USAGE: cubicweb-ctl shell detect_cycle.py -- ' - print + print('USAGE: cubicweb-ctl shell detect_cycle.py -- ') + print() graph = {} for fromeid, toeid in rql('Any X,Y WHERE X %s Y' % rtype): @@ -12,4 +13,4 @@ from logilab.common.graph import get_cycles for cycle in get_cycles(graph): - print 'cycle', '->'.join(str(n) for n in cycle) + print('cycle', '->'.join(str(n) for n in cycle)) diff -r a4fcee1e9789 -r 19fcce6dc6d1 misc/scripts/ldap_change_base_dn.py --- a/misc/scripts/ldap_change_base_dn.py Thu Mar 24 09:43:25 2016 +0100 +++ b/misc/scripts/ldap_change_base_dn.py Thu Jun 16 14:19:20 2016 +0200 @@ -1,10 +1,12 @@ +from __future__ import print_function + from base64 import b64decode, b64encode try: uri, newdn = __args__ except ValueError: - print 'USAGE: cubicweb-ctl shell ldap_change_base_dn.py -- ' - print - print 'you should not have updated your sources file yet' + print('USAGE: cubicweb-ctl shell ldap_change_base_dn.py -- ') + print() + print('you should not have updated your sources file yet') olddn = repo.sources_by_uri[uri].config['user-base-dn'] @@ -16,9 +18,9 @@ olduserdn = b64decode(extid) newuserdn = olduserdn.replace(olddn, newdn) if newuserdn != olduserdn: - print olduserdn, '->', newuserdn + print(olduserdn, '->', newuserdn) sql("UPDATE entities SET extid='%s' WHERE eid=%s" % (b64encode(newuserdn), eid)) commit() -print 'you can now update the sources file to the new dn and restart the instance' +print('you can now update the sources file to the new dn and restart the instance') diff -r a4fcee1e9789 -r 19fcce6dc6d1 misc/scripts/ldapuser2ldapfeed.py --- a/misc/scripts/ldapuser2ldapfeed.py Thu Mar 24 09:43:25 2016 +0100 +++ b/misc/scripts/ldapuser2ldapfeed.py Thu Jun 16 14:19:20 2016 +0200 @@ -2,6 +2,8 @@ Once this script is run, execute c-c db-check to cleanup relation tables. """ +from __future__ import print_function + import sys from collections import defaultdict from logilab.common.shellutils import generate_password @@ -14,12 +16,12 @@ ' on the command line)') sys.exit(1) except KeyError: - print '%s is not an active source' % source_name + print('%s is not an active source' % source_name) sys.exit(1) # check source is reachable before doing anything if not source.get_connection().cnx: - print '%s is not reachable. Fix this before running this script' % source_name + print('%s is not reachable. Fix this before running this script' % source_name) sys.exit(1) raw_input('Ensure you have shutdown all instances of this application before continuing.' @@ -31,7 +33,7 @@ from cubicweb.server.edition import EditedEntity -print '******************** backport entity content ***************************' +print('******************** backport entity content ***************************') todelete = defaultdict(list) extids = set() @@ -39,29 +41,29 @@ for entity in rql('Any X WHERE X cw_source S, S eid %(s)s', {'s': source.eid}).entities(): etype = entity.cw_etype if not source.support_entity(etype): - print "source doesn't support %s, delete %s" % (etype, entity.eid) + print("source doesn't support %s, delete %s" % (etype, entity.eid)) todelete[etype].append(entity) continue try: entity.complete() except Exception: - print '%s %s much probably deleted, delete it (extid %s)' % ( - etype, entity.eid, entity.cw_metainformation()['extid']) + print('%s %s much probably deleted, delete it (extid %s)' % ( + etype, entity.eid, entity.cw_metainformation()['extid'])) todelete[etype].append(entity) continue - print 'get back', etype, entity.eid + print('get back', etype, entity.eid) entity.cw_edited = EditedEntity(entity, **entity.cw_attr_cache) if not entity.creation_date: - entity.cw_edited['creation_date'] = datetime.now() + entity.cw_edited['creation_date'] = datetime.utcnow() if not entity.modification_date: - entity.cw_edited['modification_date'] = datetime.now() + entity.cw_edited['modification_date'] = datetime.utcnow() if not entity.upassword: entity.cw_edited['upassword'] = generate_password() extid = entity.cw_metainformation()['extid'] if not entity.cwuri: entity.cw_edited['cwuri'] = '%s/?dn=%s' % ( source.urls[0], extid.decode('utf-8', 'ignore')) - print entity.cw_edited + print(entity.cw_edited) if extid in extids: duplicates.append(extid) continue @@ -73,13 +75,13 @@ # only cleanup entities table, remaining stuff should be cleaned by a c-c # db-check to be run after this script if duplicates: - print 'found %s duplicate entries' % len(duplicates) + print('found %s duplicate entries' % len(duplicates)) from pprint import pprint pprint(duplicates) -print len(todelete), 'entities will be deleted' -for etype, entities in todelete.iteritems(): - print 'deleting', etype, [e.login for e in entities] +print(len(todelete), 'entities will be deleted') +for etype, entities in todelete.items(): + print('deleting', etype, [e.login for e in entities]) system_source.delete_info_multi(session, entities, source_name) @@ -89,9 +91,8 @@ if raw_input('Commit?') in 'yY': - print 'committing' + print('committing') commit() else: rollback() - print 'rolled back' - + print('rolled back') diff -r a4fcee1e9789 -r 19fcce6dc6d1 misc/scripts/pyroforge2datafeed.py --- a/misc/scripts/pyroforge2datafeed.py Thu Mar 24 09:43:25 2016 +0100 +++ b/misc/scripts/pyroforge2datafeed.py Thu Jun 16 14:19:20 2016 +0200 @@ -2,6 +2,8 @@ Once this script is run, execute c-c db-check to cleanup relation tables. """ +from __future__ import print_function + import sys try: @@ -12,14 +14,14 @@ ' on the command line)') sys.exit(1) except KeyError: - print '%s is not an active source' % source_name + print('%s is not an active source' % source_name) sys.exit(1) # check source is reachable before doing anything try: source.get_connection()._repo except AttributeError: - print '%s is not reachable. Fix this before running this script' % source_name + print('%s is not reachable. Fix this before running this script' % source_name) sys.exit(1) raw_input('Ensure you have shutdown all instances of this application before continuing.' @@ -39,7 +41,7 @@ )) -print '******************** backport entity content ***************************' +print('******************** backport entity content ***************************') from cubicweb.server import debugged todelete = {} @@ -47,20 +49,20 @@ for entity in rql('Any X WHERE X cw_source S, S eid %(s)s', {'s': source.eid}).entities(): etype = entity.cw_etype if not source.support_entity(etype): - print "source doesn't support %s, delete %s" % (etype, entity.eid) + print("source doesn't support %s, delete %s" % (etype, entity.eid)) elif etype in DONT_GET_BACK_ETYPES: - print 'ignore %s, delete %s' % (etype, entity.eid) + print('ignore %s, delete %s' % (etype, entity.eid)) else: try: entity.complete() if not host in entity.cwuri: - print 'SKIP foreign entity', entity.cwuri, source.config['base-url'] + print('SKIP foreign entity', entity.cwuri, source.config['base-url']) continue except Exception: - print '%s %s much probably deleted, delete it (extid %s)' % ( - etype, entity.eid, entity.cw_metainformation()['extid']) + print('%s %s much probably deleted, delete it (extid %s)' % ( + etype, entity.eid, entity.cw_metainformation()['extid'])) else: - print 'get back', etype, entity.eid + print('get back', etype, entity.eid) entity.cw_edited = EditedEntity(entity, **entity.cw_attr_cache) system_source.add_entity(session, entity) sql("UPDATE entities SET asource=%(asource)s, source='system', extid=%(extid)s " @@ -72,11 +74,11 @@ # only cleanup entities table, remaining stuff should be cleaned by a c-c # db-check to be run after this script -for entities in todelete.itervalues(): +for entities in todelete.values(): system_source.delete_info_multi(session, entities, source_name) -print '******************** backport mapping **********************************' +print('******************** backport mapping **********************************') session.disable_hook_categories('cw.sources') mapping = [] for mappart in rql('Any X,SCH WHERE X cw_schema SCH, X cw_for_source S, S eid %(s)s', @@ -85,13 +87,13 @@ if schemaent.cw_etype != 'CWEType': assert schemaent.cw_etype == 'CWRType' sch = schema._eid_index[schemaent.eid] - for rdef in sch.rdefs.itervalues(): + for rdef in sch.rdefs.values(): if not source.support_entity(rdef.subject) \ or not source.support_entity(rdef.object): continue if rdef.subject in DONT_GET_BACK_ETYPES \ and rdef.object in DONT_GET_BACK_ETYPES: - print 'dont map', rdef + print('dont map', rdef) continue if rdef.subject in DONT_GET_BACK_ETYPES: options = u'action=link\nlinkattr=name' @@ -105,7 +107,7 @@ roles = 'object', else: roles = 'subject', - print 'map', rdef, options, roles + print('map', rdef, options, roles) for role in roles: mapping.append( ( (str(rdef.subject), str(rdef.rtype), str(rdef.object)), diff -r a4fcee1e9789 -r 19fcce6dc6d1 misc/scripts/repair_file_1-9_migration.py --- a/misc/scripts/repair_file_1-9_migration.py Thu Mar 24 09:43:25 2016 +0100 +++ b/misc/scripts/repair_file_1-9_migration.py Thu Jun 16 14:19:20 2016 +0200 @@ -4,13 +4,14 @@ * on our intranet on July 07 2010 * on our extranet on July 16 2010 """ +from __future__ import print_function try: backupinstance, = __args__ except ValueError: - print 'USAGE: cubicweb-ctl shell repair_file_1-9_migration.py -- ' - print - print 'you should restored the backup on a new instance, accessible through pyro' + print('USAGE: cubicweb-ctl shell repair_file_1-9_migration.py -- ') + print() + print('you should restored the backup on a new instance, accessible through pyro') from cubicweb import cwconfig, dbapi from cubicweb.server.session import hooks_control @@ -32,20 +33,20 @@ 'XX from_entity YY, YY name "File")'): if rtype in ('is', 'is_instance_of'): continue - print rtype + print(rtype) for feid, xeid in backupcu.execute('Any F,X WHERE F %s X, F is IN (File,Image)' % rtype): - print 'restoring relation %s between file %s and %s' % (rtype, feid, xeid), - print rql('SET F %s X WHERE F eid %%(f)s, X eid %%(x)s, NOT F %s X' % (rtype, rtype), - {'f': feid, 'x': xeid}) + print('restoring relation %s between file %s and %s' % (rtype, feid, xeid), end=' ') + print(rql('SET F %s X WHERE F eid %%(f)s, X eid %%(x)s, NOT F %s X' % (rtype, rtype), + {'f': feid, 'x': xeid})) for rtype, in backupcu.execute('DISTINCT Any RTN WHERE X relation_type RT, RT name RTN,' 'X to_entity Y, Y name "Image", X is CWRelation, ' 'EXISTS(XX is CWRelation, XX relation_type RT, ' 'XX to_entity YY, YY name "File")'): - print rtype + print(rtype) for feid, xeid in backupcu.execute('Any F,X WHERE X %s F, F is IN (File,Image)' % rtype): - print 'restoring relation %s between %s and file %s' % (rtype, xeid, feid), - print rql('SET X %s F WHERE F eid %%(f)s, X eid %%(x)s, NOT X %s F' % (rtype, rtype), - {'f': feid, 'x': xeid}) + print('restoring relation %s between %s and file %s' % (rtype, xeid, feid), end=' ') + print(rql('SET X %s F WHERE F eid %%(f)s, X eid %%(x)s, NOT X %s F' % (rtype, rtype), + {'f': feid, 'x': xeid})) commit() diff -r a4fcee1e9789 -r 19fcce6dc6d1 misc/scripts/repair_splitbrain_ldapuser_source.py --- a/misc/scripts/repair_splitbrain_ldapuser_source.py Thu Mar 24 09:43:25 2016 +0100 +++ b/misc/scripts/repair_splitbrain_ldapuser_source.py Thu Jun 16 14:19:20 2016 +0200 @@ -14,6 +14,7 @@ deciding to apply it for you. And then ADAPT it tou your needs. """ +from __future__ import print_function import base64 from collections import defaultdict @@ -28,12 +29,12 @@ ' on the command line)') sys.exit(1) except KeyError: - print '%s is not an active source' % source_name + print('%s is not an active source' % source_name) sys.exit(1) # check source is reachable before doing anything if not source.get_connection().cnx: - print '%s is not reachable. Fix this before running this script' % source_name + print('%s is not reachable. Fix this before running this script' % source_name) sys.exit(1) def find_dupes(): @@ -52,11 +53,11 @@ CWUser = schema['CWUser'] for extid, eids in dupes.items(): newest = eids.pop() # we merge everything on the newest - print 'merging ghosts of', extid, 'into', newest + print('merging ghosts of', extid, 'into', newest) # now we merge pairwise into the newest for old in eids: subst = {'old': old, 'new': newest} - print ' merging', old + print(' merging', old) gone_eids.append(old) for rschema in CWUser.subject_relations(): if rschema.final or rschema == 'identity': @@ -83,24 +84,24 @@ rollback() return commit() # XXX flushing operations is wanted rather than really committing - print 'clean up entities table' + print('clean up entities table') sql('DELETE FROM entities WHERE eid IN (%s)' % (', '.join(str(x) for x in gone_eids))) commit() def main(): dupes = find_dupes() if not dupes: - print 'No duplicate user' + print('No duplicate user') return - print 'Found %s duplicate user instances' % len(dupes) + print('Found %s duplicate user instances' % len(dupes)) while True: - print 'Fix or dry-run? (f/d) ... or Ctrl-C to break out' + print('Fix or dry-run? (f/d) ... or Ctrl-C to break out') answer = raw_input('> ') if answer.lower() not in 'fd': continue - print 'Please STOP THE APPLICATION INSTANCES (service or interactive), and press Return when done.' + print('Please STOP THE APPLICATION INSTANCES (service or interactive), and press Return when done.') raw_input('') with hooks_control(session, session.HOOKS_DENY_ALL): merge_dupes(dupes, docommit=answer=='f') diff -r a4fcee1e9789 -r 19fcce6dc6d1 multipart.py --- a/multipart.py Thu Mar 24 09:43:25 2016 +0100 +++ b/multipart.py Thu Jun 16 14:19:20 2016 +0200 @@ -41,14 +41,13 @@ from wsgiref.headers import Headers import re, sys try: - from urlparse import parse_qs -except ImportError: # pragma: no cover (fallback for Python 2.5) - from cgi import parse_qs -try: from io import BytesIO except ImportError: # pragma: no cover (fallback for Python 2.5) from StringIO import StringIO as BytesIO +from six import PY3, text_type +from six.moves.urllib.parse import parse_qs + ############################################################################## ################################ Helper & Misc ################################ ############################################################################## @@ -63,7 +62,7 @@ """ A dict that remembers old values for each key """ def __init__(self, *a, **k): self.dict = dict() - for k, v in dict(*a, **k).iteritems(): + for k, v in dict(*a, **k).items(): self[k] = v def __len__(self): return len(self.dict) @@ -84,12 +83,12 @@ return self.dict[key][index] def iterallitems(self): - for key, values in self.dict.iteritems(): + for key, values in self.dict.items(): for value in values: yield key, value def tob(data, enc='utf8'): # Convert strings to bytes (py2 and py3) - return data.encode(enc) if isinstance(data, unicode) else data + return data.encode(enc) if isinstance(data, text_type) else data def copy_file(stream, target, maxread=-1, buffer_size=2*16): ''' Read from :stream and write to :target until :maxread or EOF. ''' @@ -397,17 +396,21 @@ 'application/x-url-encoded'): mem_limit = kw.get('mem_limit', 2**20) if content_length > mem_limit: - raise MultipartError("Request to big. Increase MAXMEM.") + raise MultipartError("Request too big. Increase MAXMEM.") data = stream.read(mem_limit) if stream.read(1): # These is more that does not fit mem_limit - raise MultipartError("Request to big. Increase MAXMEM.") + raise MultipartError("Request too big. Increase MAXMEM.") + if PY3: + data = data.decode('ascii') data = parse_qs(data, keep_blank_values=True) - for key, values in data.iteritems(): + for key, values in data.items(): for value in values: - forms[key] = value.decode(charset) + if PY3: + forms[key] = value + else: + forms[key.decode(charset)] = value.decode(charset) else: raise MultipartError("Unsupported content type.") except MultipartError: if strict: raise return forms, files - diff -r a4fcee1e9789 -r 19fcce6dc6d1 predicates.py --- a/predicates.py Thu Mar 24 09:43:25 2016 +0100 +++ b/predicates.py Thu Jun 16 14:19:20 2016 +0200 @@ -24,6 +24,9 @@ from warnings import warn from operator import eq +from six import string_types, integer_types +from six.moves import range + from logilab.common.deprecation import deprecated from logilab.common.registry import Predicate, objectify_predicate, yes @@ -106,7 +109,7 @@ if accept_none is None: accept_none = self.accept_none if not accept_none and \ - any(rset[i][col] is None for i in xrange(len(rset))): + any(row[col] is None for row in rset): return 0 etypes = rset.column_types(col) else: @@ -332,7 +335,7 @@ # on rset containing several entity types, each row may be # individually adaptable, while the whole rset won't be if the # same adapter can't be used for each type - for row in xrange(len(kwargs['rset'])): + for row in range(len(kwargs['rset'])): kwargs.setdefault('col', 0) _score = super(adaptable, self).__call__(cls, req, row=row, **kwargs) if not _score: @@ -489,10 +492,14 @@ page_size = kwargs.get('page_size') if page_size is None: page_size = req.form.get('page_size') + if page_size is not None: + try: + page_size = int(page_size) + except ValueError: + page_size = None if page_size is None: - page_size = req.property_value('navigation.page-size') - else: - page_size = int(page_size) + page_size_prop = getattr(cls, 'page_size_property', 'navigation.page-size') + page_size = req.property_value(page_size_prop) if len(rset) <= (page_size*self.nbpages): return 0 return self.nbpages @@ -611,7 +618,7 @@ super(is_instance, self).__init__(**kwargs) self.expected_etypes = expected_etypes for etype in self.expected_etypes: - assert isinstance(etype, basestring), etype + assert isinstance(etype, string_types), etype def __str__(self): return '%s(%s)' % (self.__class__.__name__, @@ -671,7 +678,7 @@ score = scorefunc(*args, **kwargs) if not score: return 0 - if isinstance(score, (int, long)): + if isinstance(score, integer_types): return score return 1 self.score_entity = intscore @@ -828,7 +835,7 @@ class has_related_entities(EntityPredicate): """Return 1 if entity support the specified relation and has some linked - entities by this relation , optionaly filtered according to the specified + entities by this relation , optionally filtered according to the specified target type. The relation is specified by the following initializer arguments: @@ -1091,7 +1098,7 @@ """ if from_state_name is not None: warn("on_fire_transition's from_state_name argument is unused", DeprecationWarning) - if isinstance(tr_names, basestring): + if isinstance(tr_names, string_types): tr_names = set((tr_names,)) def match_etype_and_transition(trinfo): # take care trinfo.transition is None when calling change_state @@ -1291,7 +1298,7 @@ raise ValueError("match_form_params() can't be called with both " "positional and named arguments") if expected: - if len(expected) == 1 and not isinstance(expected[0], basestring): + if len(expected) == 1 and not isinstance(expected[0], string_types): raise ValueError("match_form_params() positional arguments " "must be strings") super(match_form_params, self).__init__(*expected) diff -r a4fcee1e9789 -r 19fcce6dc6d1 pylintext.py --- a/pylintext.py Thu Mar 24 09:43:25 2016 +0100 +++ b/pylintext.py Thu Jun 16 14:19:20 2016 +0200 @@ -17,7 +17,7 @@ def cubicweb_transform(module): # handle objectify_predicate decorator (and its former name until bw compat # is kept). Only look at module level functions, should be enough. - for assnodes in module.locals.itervalues(): + for assnodes in module.locals.values(): for node in assnodes: if isinstance(node, scoped_nodes.Function) and node.decorators: for decorator in node.decorators.nodes: @@ -48,4 +48,3 @@ def register(linter): """called when loaded by pylint --load-plugins, nothing to do here""" MANAGER.register_transform(nodes.Module, cubicweb_transform) - diff -r a4fcee1e9789 -r 19fcce6dc6d1 repoapi.py --- a/repoapi.py Thu Mar 24 09:43:25 2016 +0100 +++ b/repoapi.py Thu Jun 16 14:19:20 2016 +0200 @@ -17,21 +17,17 @@ # with CubicWeb. If not, see . """Official API to access the content of a repository """ +from warnings import warn + +from six import add_metaclass + from logilab.common.deprecation import class_deprecated from cubicweb.utils import parse_repo_uri -from cubicweb import ConnectionError, AuthenticationError +from cubicweb import AuthenticationError from cubicweb.server.session import Connection -### private function for specific method ############################ - -def _get_inmemory_repo(config, vreg=None): - from cubicweb.server.repository import Repository - from cubicweb.server.utils import TasksManager - return Repository(config, TasksManager(), vreg=vreg) - - ### public API ###################################################### def get_repository(uri=None, config=None, vreg=None): @@ -41,16 +37,11 @@ The returned repository may be an in-memory repository or a proxy object using a specific RPC method, depending on the given URI. """ - if uri is None: - return _get_inmemory_repo(config, vreg) - - protocol, hostport, appid = parse_repo_uri(uri) + if uri is not None: + warn('[3.22] get_repository only wants a config') - if protocol == 'inmemory': - # me may have been called with a dummy 'inmemory://' uri ... - return _get_inmemory_repo(config, vreg) - - raise ConnectionError('unknown protocol: `%s`' % protocol) + assert config is not None, 'get_repository(config=config)' + return config.repository(vreg) def connect(repo, login, **kwargs): """Take credential and return associated Connection. @@ -75,6 +66,6 @@ return connect(repo, anon_login, password=anon_password) +@add_metaclass(class_deprecated) class ClientConnection(Connection): - __metaclass__ = class_deprecated __deprecation_warning__ = '[3.20] %(cls)s is deprecated, use Connection instead' diff -r a4fcee1e9789 -r 19fcce6dc6d1 req.py --- a/req.py Thu Mar 24 09:43:25 2016 +0100 +++ b/req.py Thu Jun 16 14:19:20 2016 +0200 @@ -20,10 +20,10 @@ __docformat__ = "restructuredtext en" from warnings import warn -from urlparse import urlsplit, urlunsplit -from urllib import quote as urlquote, unquote as urlunquote from datetime import time, datetime, timedelta -from cgi import parse_qs, parse_qsl + +from six import PY2, PY3, text_type +from six.moves.urllib.parse import parse_qs, parse_qsl, quote as urlquote, unquote as urlunquote, urlsplit, urlunsplit from logilab.common.decorators import cached from logilab.common.deprecation import deprecated @@ -73,7 +73,7 @@ # connection self.user = None self.local_perm_cache = {} - self._ = unicode + self._ = text_type def _set_user(self, orig_user): """set the user for this req_session_base @@ -219,7 +219,7 @@ parts.append( '%(varname)s %(attr)s X, ' '%(varname)s eid %%(reverse_%(attr)s)s' - % {'attr': attr, 'varname': varmaker.next()}) + % {'attr': attr, 'varname': next(varmaker)}) else: assert attr in eschema.subjrels, \ '%s not in %s subject relations' % (attr, eschema) @@ -300,7 +300,7 @@ def build_url_params(self, **kwargs): """return encoded params to incorporate them in a URL""" args = [] - for param, values in kwargs.iteritems(): + for param, values in kwargs.items(): if not isinstance(values, (list, tuple)): values = (values,) for value in values: @@ -313,7 +313,7 @@ necessary encoding / decoding. Also it's designed to quote each part of a url path and so the '/' character will be encoded as well. """ - if isinstance(value, unicode): + if PY2 and isinstance(value, unicode): quoted = urlquote(value.encode(self.encoding), safe=safe) return unicode(quoted, self.encoding) return urlquote(str(value), safe=safe) @@ -324,6 +324,8 @@ decoding is based on `self.encoding` which is the encoding used in `url_quote` """ + if PY3: + return urlunquote(quoted) if isinstance(quoted, unicode): quoted = quoted.encode(self.encoding) try: @@ -333,6 +335,10 @@ def url_parse_qsl(self, querystring): """return a list of (key, val) found in the url quoted query string""" + if PY3: + for key, val in parse_qsl(querystring): + yield key, val + return if isinstance(querystring, unicode): querystring = querystring.encode(self.encoding) for key, val in parse_qsl(querystring): @@ -348,12 +354,12 @@ newparams may only be mono-valued. """ - if isinstance(url, unicode): + if PY2 and isinstance(url, unicode): url = url.encode(self.encoding) schema, netloc, path, query, fragment = urlsplit(url) query = parse_qs(query) # sort for testing predictability - for key, val in sorted(newparams.iteritems()): + for key, val in sorted(newparams.items()): query[key] = (self.url_quote(val),) query = '&'.join(u'%s=%s' % (param, value) for param, values in sorted(query.items()) diff -r a4fcee1e9789 -r 19fcce6dc6d1 rqlrewrite.py --- a/rqlrewrite.py Thu Mar 24 09:43:25 2016 +0100 +++ b/rqlrewrite.py Thu Jun 16 14:19:20 2016 +0200 @@ -22,6 +22,8 @@ """ __docformat__ = "restructuredtext en" +from six import text_type, string_types + from rql import nodes as n, stmts, TypeResolverException from rql.utils import common_parent @@ -54,7 +56,7 @@ eschema = schema.eschema allpossibletypes = {} for solution in solutions: - for varname, etype in solution.iteritems(): + for varname, etype in solution.items(): # XXX not considering aliases by design, right ? if varname not in newroot.defined_vars or eschema(etype).final: continue @@ -92,7 +94,7 @@ for etype in sorted(possibletypes): node.append(n.Constant(etype, 'etype')) else: - etype = iter(possibletypes).next() + etype = next(iter(possibletypes)) node = n.Constant(etype, 'etype') comp = mytyperel.children[1] comp.replace(comp.children[0], node) @@ -286,7 +288,7 @@ if fnode.name == 'FTIRANK': # we've to fetch the has_text relation as well var = fnode.children[0].variable - rel = iter(var.stinfo['ftirels']).next() + rel = next(iter(var.stinfo['ftirels'])) assert not rel.ored(), 'unsupported' newselect.add_restriction(rel.copy(newselect)) # remove relation from the orig select and @@ -330,7 +332,7 @@ union.replace(select, newselect) elif not () in localchecks: union.remove(select) - for lcheckdef, lchecksolutions in localchecks.iteritems(): + for lcheckdef, lchecksolutions in localchecks.items(): if not lcheckdef: continue myrqlst = select.copy(solutions=lchecksolutions) @@ -427,7 +429,7 @@ def insert_varmap_snippets(self, varmap, rqlexprs, varexistsmap): try: self.init_from_varmap(varmap, varexistsmap) - except VariableFromSubQuery, ex: + except VariableFromSubQuery as ex: # variable may have been moved to a newly inserted subquery # we should insert snippet in that subquery subquery = self.select.aliases[ex.variable].query @@ -548,7 +550,7 @@ 'cant check security of %s, ambigous type for %s in %s', stmt, varname, key[0]) # key[0] == the rql expression raise Unauthorized() - etype = iter(ptypes).next() + etype = next(iter(ptypes)) eschema = self.schema.eschema(etype) if not eschema.has_perm(self.session, action): rqlexprs = eschema.get_rqlexprs(action) @@ -621,7 +623,7 @@ while argname in self.kwargs: argname = subselect.allocate_varname() subselect.add_constant_restriction(subselect.get_variable(self.u_varname), - 'eid', unicode(argname), 'Substitute') + 'eid', text_type(argname), 'Substitute') self.kwargs[argname] = self.session.user.eid add_types_restriction(self.schema, subselect, subselect, solutions=self.solutions) @@ -646,7 +648,7 @@ # insert "is" where necessary varexistsmap = {} self.removing_ambiguity = True - for (erqlexpr, varmap, oldvarname), etype in variantes[0].iteritems(): + for (erqlexpr, varmap, oldvarname), etype in variantes[0].items(): varname = self.rewritten[(erqlexpr, varmap, oldvarname)] var = self.select.defined_vars[varname] exists = var.references()[0].scope @@ -655,7 +657,7 @@ # insert ORED exists where necessary for variante in variantes[1:]: self.insert_snippets(snippets, varexistsmap) - for key, etype in variante.iteritems(): + for key, etype in variante.items(): varname = self.rewritten[key] try: var = self.select.defined_vars[varname] @@ -674,7 +676,7 @@ variantes = set() for sol in newsolutions: variante = [] - for key, newvar in self.rewritten.iteritems(): + for key, newvar in self.rewritten.items(): variante.append( (key, sol[newvar]) ) variantes.add(tuple(variante)) # rebuild variantes as dict @@ -682,7 +684,7 @@ # remove variable which have always the same type for key in self.rewritten: it = iter(variantes) - etype = it.next()[key] + etype = next(it)[key] for variante in it: if variante[key] != etype: break @@ -700,7 +702,7 @@ # no more references, undefine the variable del self.select.defined_vars[vref.name] removed.add(vref.name) - for key, newvar in self.rewritten.items(): # I mean items we alter it + for key, newvar in list(self.rewritten.items()): if newvar in removed: del self.rewritten[key] @@ -760,7 +762,7 @@ # insert "U eid %(u)s" stmt.add_constant_restriction( stmt.get_variable(self.u_varname), - 'eid', unicode(argname), 'Substitute') + 'eid', text_type(argname), 'Substitute') self.kwargs[argname] = self.session.user.eid return self.u_varname key = (self.current_expr, self.varmap, vname) @@ -883,7 +885,7 @@ return n.Constant(vi['const'], 'Int') return n.VariableRef(stmt.get_variable(selectvar)) vname_or_term = self._get_varname_or_term(node.name) - if isinstance(vname_or_term, basestring): + if isinstance(vname_or_term, string_types): return n.VariableRef(stmt.get_variable(vname_or_term)) # shared term return vname_or_term.copy(stmt) diff -r a4fcee1e9789 -r 19fcce6dc6d1 rset.py --- a/rset.py Thu Mar 24 09:43:25 2016 +0100 +++ b/rset.py Thu Jun 16 14:19:20 2016 +0200 @@ -16,11 +16,13 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . """The `ResultSet` class which is returned as result of an rql query""" - __docformat__ = "restructuredtext en" from warnings import warn +from six import PY3 +from six.moves import range + from logilab.common import nullobject from logilab.common.decorators import cached, clear_cache, copy_cache from rql import nodes, stmts @@ -101,7 +103,7 @@ if self._rsetactions is None: self._rsetactions = {} if kwargs: - key = tuple(sorted(kwargs.iteritems())) + key = tuple(sorted(kwargs.items())) else: key = None try: @@ -120,10 +122,6 @@ """returns the ith element of the result set""" return self.rows[i] #ResultSetRow(self.rows[i]) - def __getslice__(self, i, j): - """returns slice [i:j] of the result set""" - return self.rows[i:j] - def __iter__(self): """Returns an iterator over rows""" return iter(self.rows) @@ -186,7 +184,7 @@ """ rows, descr = [], [] rset = self.copy(rows, descr) - for i in xrange(len(self)): + for i in range(len(self)): if not filtercb(self.get_entity(i, col)): continue rows.append(self.rows[i]) @@ -215,10 +213,10 @@ rset = self.copy(rows, descr) if col >= 0: entities = sorted(enumerate(self.entities(col)), - key=lambda (i, e): keyfunc(e), reverse=reverse) + key=lambda t: keyfunc(t[1]), reverse=reverse) else: entities = sorted(enumerate(self), - key=lambda (i, e): keyfunc(e), reverse=reverse) + key=lambda t: keyfunc(t[1]), reverse=reverse) for index, _ in entities: rows.append(self.rows[index]) descr.append(self.description[index]) @@ -311,7 +309,7 @@ newselect.limit = limit newselect.offset = offset aliases = [nodes.VariableRef(newselect.get_variable(chr(65+i), i)) - for i in xrange(len(rqlst.children[0].selection))] + for i in range(len(rqlst.children[0].selection))] for vref in aliases: newselect.append_selected(nodes.VariableRef(vref.variable)) newselect.set_with([nodes.SubQuery(aliases, rqlst)], check=False) @@ -322,7 +320,7 @@ return rql def limit(self, limit, offset=0, inplace=False): - """limit the result set to the given number of rows optionaly starting + """limit the result set to the given number of rows optionally starting from an index different than 0 :type limit: int @@ -373,6 +371,8 @@ warn('[3.21] the "encoded" argument is deprecated', DeprecationWarning) encoding = self.req.encoding rqlstr = self.syntax_tree().as_string(kwargs=self.args) + if PY3: + return rqlstr # sounds like we get encoded or unicode string due to a bug in as_string if not encoded: if isinstance(rqlstr, unicode): @@ -387,7 +387,7 @@ def entities(self, col=0): """iter on entities with eid in the `col` column of the result set""" - for i in xrange(len(self)): + for i in range(len(self)): # may have None values in case of outer join (or aggregat on eid # hacks) if self.rows[i][col] is not None: @@ -483,7 +483,6 @@ # new attributes found in this resultset ? try: entity = req.entity_cache(eid) - entity._cw = req except KeyError: pass else: @@ -507,9 +506,9 @@ eschema = entity.e_schema eid_col, attr_cols, rel_cols = self._rset_structure(eschema, col) entity.eid = rowvalues[eid_col] - for attr, col_idx in attr_cols.iteritems(): + for attr, col_idx in attr_cols.items(): entity.cw_attr_cache[attr] = rowvalues[col_idx] - for (rtype, role), col_idx in rel_cols.iteritems(): + for (rtype, role), col_idx in rel_cols.items(): value = rowvalues[col_idx] if value is None: if role == 'subject': @@ -606,7 +605,7 @@ except AttributeError: # not a variable continue - for i in xrange(len(select.selection)): + for i in range(len(select.selection)): if i == col: continue coletype = self.description[row][i] diff -r a4fcee1e9789 -r 19fcce6dc6d1 rtags.py --- a/rtags.py Thu Mar 24 09:43:25 2016 +0100 +++ b/rtags.py Thu Jun 16 14:19:20 2016 +0200 @@ -40,6 +40,8 @@ import logging from warnings import warn +from six import string_types + from logilab.common.logging_ext import set_log_methods from logilab.common.registry import RegistrableInstance, yes @@ -95,7 +97,7 @@ def init(self, schema, check=True): # XXX check existing keys against schema if check: - for (stype, rtype, otype, tagged), value in self._tagdefs.items(): + for (stype, rtype, otype, tagged), value in list(self._tagdefs.items()): for ertype in (stype, rtype, otype): if ertype != '*' and not ertype in schema: self.warning('removing rtag %s: %s, %s undefined in schema', @@ -145,7 +147,7 @@ return tag def _tag_etype_attr(self, etype, attr, desttype='*', *args, **kwargs): - if isinstance(attr, basestring): + if isinstance(attr, string_types): attr, role = attr, 'subject' else: attr, role = attr diff -r a4fcee1e9789 -r 19fcce6dc6d1 schema.py --- a/schema.py Thu Mar 24 09:43:25 2016 +0100 +++ b/schema.py Thu Jun 16 14:19:20 2016 +0200 @@ -16,15 +16,18 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . """classes to define schemas for CubicWeb""" +from __future__ import print_function __docformat__ = "restructuredtext en" -_ = unicode import re from os.path import join, basename from logging import getLogger from warnings import warn +from six import PY2, text_type, string_types, add_metaclass +from six.moves import range + from logilab.common import tempattr from logilab.common.decorators import cached, clear_cache, monkeypatch, cachedproperty from logilab.common.logging_ext import set_log_methods @@ -45,7 +48,7 @@ from rql.analyze import ETypeResolver import cubicweb -from cubicweb import ETYPE_NAME_MAP, ValidationError, Unauthorized +from cubicweb import ETYPE_NAME_MAP, ValidationError, Unauthorized, _ try: from cubicweb import server @@ -102,6 +105,9 @@ INTERNAL_TYPES = set(('CWProperty', 'CWCache', 'ExternalUri', 'CWDataImport', 'CWSource', 'CWSourceHostConfig', 'CWSourceSchemaConfig')) +UNIQUE_CONSTRAINTS = ('SizeConstraint', 'FormatConstraint', + 'StaticVocabularyConstraint', + 'RQLVocabularyConstraint') _LOGGER = getLogger('cubicweb.schemaloader') @@ -142,7 +148,10 @@ suppressing and reinserting an expression if only a space has been added/removed for instance) """ - return u', '.join(' '.join(expr.split()) for expr in rqlstring.split(',')) + union = parse(u'Any 1 WHERE %s' % rqlstring).as_string() + if PY2 and isinstance(union, str): + union = union.decode('utf-8') + return union.split(' WHERE ', 1)[1] def _check_valid_formula(rdef, formula_rqlst): @@ -204,7 +213,7 @@ """ self.eid = eid # eid of the entity representing this rql expression assert mainvars, 'bad mainvars %s' % mainvars - if isinstance(mainvars, basestring): + if isinstance(mainvars, string_types): mainvars = set(splitstrip(mainvars)) elif not isinstance(mainvars, set): mainvars = set(mainvars) @@ -246,6 +255,9 @@ return self.expression == other.expression return False + def __ne__(self, other): + return not (self == other) + def __hash__(self): return hash(self.expression) @@ -271,7 +283,7 @@ def transform_has_permission(self): found = None rqlst = self.rqlst - for var in rqlst.defined_vars.itervalues(): + for var in rqlst.defined_vars.values(): for varref in var.references(): rel = varref.relation() if rel is None: @@ -319,7 +331,7 @@ """ creating = kwargs.get('creating') if not creating and self.eid is not None: - key = (self.eid, tuple(sorted(kwargs.iteritems()))) + key = (self.eid, tuple(sorted(kwargs.items()))) try: return _cw.local_perm_cache[key] except KeyError: @@ -363,7 +375,7 @@ get_eschema = _cw.vreg.schema.eschema try: for eaction, col in has_perm_defs: - for i in xrange(len(rset)): + for i in range(len(rset)): eschema = get_eschema(rset.description[i][col]) eschema.check_perm(_cw, eaction, eid=rset[i][col]) if self.eid is not None: @@ -400,13 +412,35 @@ return self._check(_cw, x=eid, **kwargs) return self._check(_cw, **kwargs) -def constraint_by_eid(self, eid): - for cstr in self.constraints: - if cstr.eid == eid: - return cstr - raise ValueError('No constraint with eid %d' % eid) -RelationDefinitionSchema.constraint_by_eid = constraint_by_eid + +class CubicWebRelationDefinitionSchema(RelationDefinitionSchema): + def constraint_by_eid(self, eid): + for cstr in self.constraints: + if cstr.eid == eid: + return cstr + raise ValueError('No constraint with eid %d' % eid) + + def rql_expression(self, expression, mainvars=None, eid=None): + """rql expression factory""" + if self.rtype.final: + return ERQLExpression(expression, mainvars, eid) + return RRQLExpression(expression, mainvars, eid) + def check_permission_definitions(self): + super(CubicWebRelationDefinitionSchema, self).check_permission_definitions() + schema = self.subject.schema + for action, groups in self.permissions.items(): + for group_or_rqlexpr in groups: + if action == 'read' and \ + isinstance(group_or_rqlexpr, RQLExpression): + msg = "can't use rql expression for read permission of %s" + raise BadSchemaDefinition(msg % self) + if self.final and isinstance(group_or_rqlexpr, RRQLExpression): + msg = "can't use RRQLExpression on %s, use an ERQLExpression" + raise BadSchemaDefinition(msg % self) + if not self.final and isinstance(group_or_rqlexpr, ERQLExpression): + msg = "can't use ERQLExpression on %s, use a RRQLExpression" + raise BadSchemaDefinition(msg % self) def vargraph(rqlst): """ builds an adjacency graph of variables from the rql syntax tree, e.g: @@ -522,7 +556,7 @@ if not deps: eschemas.append(eschema) del graph[eschema] - for deps in graph.itervalues(): + for deps in graph.values(): try: deps.remove(eschema) except KeyError: @@ -548,9 +582,9 @@ key = key + '_' + form # ensure unicode if context is not None: - return unicode(req.pgettext(context, key)) + return text_type(req.pgettext(context, key)) else: - return unicode(req._(key)) + return text_type(req._(key)) # Schema objects definition ################################################### @@ -576,7 +610,7 @@ assert action in self.ACTIONS, action #assert action in self._groups, '%s %s' % (self, action) try: - return frozenset(g for g in self.permissions[action] if isinstance(g, basestring)) + return frozenset(g for g in self.permissions[action] if isinstance(g, string_types)) except KeyError: return () PermissionMixIn.get_groups = get_groups @@ -595,7 +629,7 @@ assert action in self.ACTIONS, action #assert action in self._rqlexprs, '%s %s' % (self, action) try: - return tuple(g for g in self.permissions[action] if not isinstance(g, basestring)) + return tuple(g for g in self.permissions[action] if not isinstance(g, string_types)) except KeyError: return () PermissionMixIn.get_rqlexprs = get_rqlexprs @@ -665,7 +699,7 @@ groups = self.get_groups(action) if _cw.user.matching_groups(groups): if DBG: - print ('check_perm: %r %r: user matches %s' % (action, _self_str, groups)) + print('check_perm: %r %r: user matches %s' % (action, _self_str, groups)) return # if 'owners' in allowed groups, check if the user actually owns this # object, if so that's enough @@ -676,14 +710,14 @@ kwargs.get('creating') or ('eid' in kwargs and _cw.user.owns(kwargs['eid']))): if DBG: - print ('check_perm: %r %r: user is owner or creation time' % - (action, _self_str)) + print('check_perm: %r %r: user is owner or creation time' % + (action, _self_str)) return # else if there is some rql expressions, check them if DBG: - print ('check_perm: %r %r %s' % - (action, _self_str, [(rqlexpr, kwargs, rqlexpr.check(_cw, **kwargs)) - for rqlexpr in self.get_rqlexprs(action)])) + print('check_perm: %r %r %s' % + (action, _self_str, [(rqlexpr, kwargs, rqlexpr.check(_cw, **kwargs)) + for rqlexpr in self.get_rqlexprs(action)])) if any(rqlexpr.check(_cw, **kwargs) for rqlexpr in self.get_rqlexprs(action)): return @@ -691,35 +725,10 @@ PermissionMixIn.check_perm = check_perm -RelationDefinitionSchema._RPROPERTIES['eid'] = None +CubicWebRelationDefinitionSchema._RPROPERTIES['eid'] = None # remember rproperties defined at this point. Others will have to be serialized in # CWAttribute.extra_props -KNOWN_RPROPERTIES = RelationDefinitionSchema.ALL_PROPERTIES() - -def rql_expression(self, expression, mainvars=None, eid=None): - """rql expression factory""" - if self.rtype.final: - return ERQLExpression(expression, mainvars, eid) - return RRQLExpression(expression, mainvars, eid) -RelationDefinitionSchema.rql_expression = rql_expression - -orig_check_permission_definitions = RelationDefinitionSchema.check_permission_definitions -def check_permission_definitions(self): - orig_check_permission_definitions(self) - schema = self.subject.schema - for action, groups in self.permissions.iteritems(): - for group_or_rqlexpr in groups: - if action == 'read' and \ - isinstance(group_or_rqlexpr, RQLExpression): - msg = "can't use rql expression for read permission of %s" - raise BadSchemaDefinition(msg % self) - if self.final and isinstance(group_or_rqlexpr, RRQLExpression): - msg = "can't use RRQLExpression on %s, use an ERQLExpression" - raise BadSchemaDefinition(msg % self) - if not self.final and isinstance(group_or_rqlexpr, ERQLExpression): - msg = "can't use ERQLExpression on %s, use a RRQLExpression" - raise BadSchemaDefinition(msg % self) -RelationDefinitionSchema.check_permission_definitions = check_permission_definitions +KNOWN_RPROPERTIES = CubicWebRelationDefinitionSchema.ALL_PROPERTIES() class CubicWebEntitySchema(EntitySchema): @@ -763,7 +772,7 @@ def check_permission_definitions(self): super(CubicWebEntitySchema, self).check_permission_definitions() - for groups in self.permissions.itervalues(): + for groups in self.permissions.values(): for group_or_rqlexpr in groups: if isinstance(group_or_rqlexpr, RRQLExpression): msg = "can't use RRQLExpression on %s, use an ERQLExpression" @@ -870,6 +879,7 @@ class CubicWebRelationSchema(PermissionMixIn, RelationSchema): permissions = {} ACTIONS = () + rdef_class = CubicWebRelationDefinitionSchema def __init__(self, schema=None, rdef=None, eid=None, **kwargs): if rdef is not None: @@ -906,7 +916,7 @@ if rdef.may_have_permission(action, req): return True else: - for rdef in self.rdefs.itervalues(): + for rdef in self.rdefs.values(): if rdef.may_have_permission(action, req): return True return False @@ -948,7 +958,7 @@ if not rdef.has_perm(_cw, action, **kwargs): return False else: - for rdef in self.rdefs.itervalues(): + for rdef in self.rdefs.values(): if not rdef.has_perm(_cw, action, **kwargs): return False return True @@ -986,7 +996,7 @@ etype_name_re = r'[A-Z][A-Za-z0-9]*[a-z]+[A-Za-z0-9]*$' def add_entity_type(self, edef): - edef.name = edef.name.encode() + edef.name = str(edef.name) edef.name = bw_normalize_etype(edef.name) if not re.match(self.etype_name_re, edef.name): raise BadSchemaDefinition( @@ -1011,7 +1021,7 @@ raise BadSchemaDefinition( '%r is not a valid name for a relation type. It should be ' 'lower cased' % rdef.name) - rdef.name = rdef.name.encode() + rdef.name = str(rdef.name) rschema = super(CubicWebSchema, self).add_relation_type(rdef) self._eid_index[rschema.eid] = rschema return rschema @@ -1071,7 +1081,7 @@ def iter_computed_attributes(self): for relation in self.relations(): - for rdef in relation.rdefs.itervalues(): + for rdef in relation.rdefs.values(): if rdef.final and rdef.formula is not None: yield rdef @@ -1124,17 +1134,12 @@ def rebuild_infered_relations(self): super(CubicWebSchema, self).rebuild_infered_relations() + self.finalize_computed_attributes() self.finalize_computed_relations() # additional cw specific constraints ########################################### -# these are implemented as CHECK constraints in sql, don't do the work -# twice -StaticVocabularyConstraint.check = lambda *args: True -IntervalBoundConstraint.check = lambda *args: True -BoundaryConstraint.check = lambda *args: True - class BaseRQLConstraint(RRQLExpression, BaseConstraint): """base class for rql constraints""" distinct_query = None @@ -1198,11 +1203,11 @@ return ';%s;%s\n%s' % (','.join(sorted(self.mainvars)), self.expression, self.msg or '') + @classmethod def deserialize(cls, value): value, msg = value.split('\n', 1) _, mainvars, expression = value.split(';', 2) return cls(expression, mainvars, msg) - deserialize = classmethod(deserialize) def repo_check(self, session, eidfrom, rtype, eidto=None): """raise ValidationError if the relation doesn't satisfy the constraint @@ -1245,7 +1250,7 @@ return _cw.execute(rql, args, build_descr=False) -class RQLConstraint(RepoEnforcedRQLConstraintMixIn, RQLVocabularyConstraint): +class RQLConstraint(RepoEnforcedRQLConstraintMixIn, BaseRQLConstraint): """the rql constraint is similar to the RQLVocabularyConstraint but are also enforced at the repository level """ @@ -1287,12 +1292,13 @@ make_workflowable(cls) return cls + +@add_metaclass(workflowable_definition) class WorkflowableEntityType(ybo.EntityType): """Use this base class instead of :class:`EntityType` to have workflow relations (i.e. `in_state`, `wf_info_for` and `custom_workflow`) on your entity type. """ - __metaclass__ = workflowable_definition __abstract__ = True diff -r a4fcee1e9789 -r 19fcce6dc6d1 schemas/Bookmark.py --- a/schemas/Bookmark.py Thu Mar 24 09:43:25 2016 +0100 +++ b/schemas/Bookmark.py Thu Jun 16 14:19:20 2016 +0200 @@ -19,7 +19,7 @@ """ __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from yams.buildobjs import EntityType, RelationType, SubjectRelation, String from cubicweb.schema import RRQLExpression diff -r a4fcee1e9789 -r 19fcce6dc6d1 schemas/base.py --- a/schemas/base.py Thu Mar 24 09:43:25 2016 +0100 +++ b/schemas/base.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,7 +18,7 @@ """core CubicWeb schema, but not necessary at bootstrap time""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from yams.buildobjs import (EntityType, RelationType, RelationDefinition, SubjectRelation, @@ -150,14 +150,16 @@ __permissions__ = PUB_SYSTEM_ATTR_PERMS cardinality = '11' subject = '*' - object = 'Datetime' + object = 'TZDatetime' + class modification_date(RelationType): """latest modification time of an entity""" __permissions__ = PUB_SYSTEM_ATTR_PERMS cardinality = '11' subject = '*' - object = 'Datetime' + object = 'TZDatetime' + class cwuri(RelationType): """internal entity uri""" @@ -379,5 +381,3 @@ 'add': ('managers', RRQLExpression('U has_update_permission S'),), 'delete': ('managers', RRQLExpression('U has_update_permission S'),), } - - diff -r a4fcee1e9789 -r 19fcce6dc6d1 schemas/bootstrap.py --- a/schemas/bootstrap.py Thu Mar 24 09:43:25 2016 +0100 +++ b/schemas/bootstrap.py Thu Jun 16 14:19:20 2016 +0200 @@ -19,7 +19,7 @@ """ __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from yams.buildobjs import (EntityType, RelationType, RelationDefinition, Bytes, SubjectRelation, RichString, String, Boolean, Int) diff -r a4fcee1e9789 -r 19fcce6dc6d1 schemas/workflow.py --- a/schemas/workflow.py Thu Mar 24 09:43:25 2016 +0100 +++ b/schemas/workflow.py Thu Jun 16 14:19:20 2016 +0200 @@ -19,7 +19,7 @@ """ __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from yams.buildobjs import (EntityType, RelationType, RelationDefinition, SubjectRelation, @@ -273,7 +273,7 @@ """indicate the current state of an entity""" __permissions__ = RO_REL_PERMS - # not inlined intentionnaly since when using ldap sources, user'state + # not inlined intentionnally since when using ldap sources, user'state # has to be stored outside the CWUser table inlined = False diff -r a4fcee1e9789 -r 19fcce6dc6d1 selectors.py --- a/selectors.py Thu Mar 24 09:43:25 2016 +0100 +++ b/selectors.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,6 +18,8 @@ from warnings import warn +from six import string_types + from logilab.common.deprecation import deprecated, class_renamed from cubicweb.predicates import * @@ -84,7 +86,7 @@ See `EntityPredicate` documentation for behaviour when row is not specified. - :param *etypes: entity types (`basestring`) which should be refused + :param *etypes: entity types (`string_types`) which should be refused """ def __init__(self, *etypes): super(_but_etype, self).__init__() diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/__init__.py --- a/server/__init__.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/__init__.py Thu Jun 16 14:19:20 2016 +0200 @@ -20,6 +20,7 @@ The server module contains functions to initialize a new repository. """ +from __future__ import print_function __docformat__ = "restructuredtext en" @@ -28,6 +29,9 @@ from glob import glob from contextlib import contextmanager +from six import text_type, string_types +from six.moves import filter + from logilab.common.modutils import LazyObject from logilab.common.textutils import splitstrip from logilab.common.registry import yes @@ -138,7 +142,7 @@ if not debugmode: DEBUG = 0 return - if isinstance(debugmode, basestring): + if isinstance(debugmode, string_types): for mode in splitstrip(debugmode, sep='|'): DEBUG |= globals()[mode] else: @@ -196,7 +200,7 @@ user = session.create_entity('CWUser', login=login, upassword=pwd) for group in groups: session.execute('SET U in_group G WHERE U eid %(u)s, G name %(group)s', - {'u': user.eid, 'group': unicode(group)}) + {'u': user.eid, 'group': text_type(group)}) return user def init_repository(config, interactive=True, drop=False, vreg=None, @@ -225,78 +229,82 @@ sourcescfg = config.read_sources_file() source = sourcescfg['system'] driver = source['db-driver'] - sqlcnx = repo.system_source.get_connection() - sqlcursor = sqlcnx.cursor() - execute = sqlcursor.execute - if drop: - helper = database.get_db_helper(driver) - dropsql = sql_drop_all_user_tables(helper, sqlcursor) - # We may fail dropping some tables because of table dependencies, in a first pass. - # So, we try a second drop sequence to drop remaining tables if needed. - # Note that 2 passes is an arbitrary choice as it seems enougth for our usecases. - # (looping may induce infinite recursion when user have no right for example) - # Here we try to keep code simple and backend independant. That why we don't try to - # distinguish remaining tables (wrong right, dependencies, ...). - failed = sqlexec(dropsql, execute, cnx=sqlcnx, - pbtitle='-> dropping tables (first pass)') + with repo.internal_cnx() as cnx: + sqlcnx = cnx.cnxset.cnx + sqlcursor = cnx.cnxset.cu + execute = sqlcursor.execute + if drop: + helper = database.get_db_helper(driver) + dropsql = sql_drop_all_user_tables(helper, sqlcursor) + # We may fail dropping some tables because of table dependencies, in a first pass. + # So, we try a second drop sequence to drop remaining tables if needed. + # Note that 2 passes is an arbitrary choice as it seems enough for our usecases + # (looping may induce infinite recursion when user have no rights for example). + # Here we try to keep code simple and backend independent. That's why we don't try to + # distinguish remaining tables (missing privileges, dependencies, ...). + failed = sqlexec(dropsql, execute, cnx=sqlcnx, + pbtitle='-> dropping tables (first pass)') + if failed: + failed = sqlexec(failed, execute, cnx=sqlcnx, + pbtitle='-> dropping tables (second pass)') + remainings = list(filter(drop_filter, helper.list_tables(sqlcursor))) + assert not remainings, 'Remaining tables: %s' % ', '.join(remainings) + handler = config.migration_handler(schema, interactive=False, repo=repo, cnx=cnx) + # install additional driver specific sql files + handler.cmd_install_custom_sql_scripts() + for cube in reversed(config.cubes()): + handler.cmd_install_custom_sql_scripts(cube) + _title = '-> creating tables ' + print(_title, end=' ') + # schema entities and relations tables + # can't skip entities table even if system source doesn't support them, + # they are used sometimes by generated sql. Keeping them empty is much + # simpler than fixing this... + schemasql = sqlschema(schema, driver) + #skip_entities=[str(e) for e in schema.entities() + # if not repo.system_source.support_entity(str(e))]) + failed = sqlexec(schemasql, execute, pbtitle=_title, delimiter=';;') if failed: - failed = sqlexec(failed, execute, cnx=sqlcnx, - pbtitle='-> dropping tables (second pass)') - remainings = filter(drop_filter, helper.list_tables(sqlcursor)) - assert not remainings, 'Remaining tables: %s' % ', '.join(remainings) - _title = '-> creating tables ' - print _title, - # schema entities and relations tables - # can't skip entities table even if system source doesn't support them, - # they are used sometimes by generated sql. Keeping them empty is much - # simpler than fixing this... - schemasql = sqlschema(schema, driver) - #skip_entities=[str(e) for e in schema.entities() - # if not repo.system_source.support_entity(str(e))]) - failed = sqlexec(schemasql, execute, pbtitle=_title, delimiter=';;') - if failed: - print 'The following SQL statements failed. You should check your schema.' - print failed - raise Exception('execution of the sql schema failed, you should check your schema') - sqlcursor.close() - sqlcnx.commit() - sqlcnx.close() + print('The following SQL statements failed. You should check your schema.') + print(failed) + raise Exception('execution of the sql schema failed, you should check your schema') + sqlcursor.close() + sqlcnx.commit() with repo.internal_cnx() as cnx: # insert entity representing the system source ssource = cnx.create_entity('CWSource', type=u'native', name=u'system') repo.system_source.eid = ssource.eid cnx.execute('SET X cw_source X WHERE X eid %(x)s', {'x': ssource.eid}) # insert base groups and default admin - print '-> inserting default user and default groups.' + print('-> inserting default user and default groups.') try: - login = unicode(sourcescfg['admin']['login']) + login = text_type(sourcescfg['admin']['login']) pwd = sourcescfg['admin']['password'] except KeyError: if interactive: msg = 'enter login and password of the initial manager account' login, pwd = manager_userpasswd(msg=msg, confirm=True) else: - login, pwd = unicode(source['db-user']), source['db-password'] + login, pwd = text_type(source['db-user']), source['db-password'] # sort for eid predicatability as expected in some server tests for group in sorted(BASE_GROUPS): - cnx.create_entity('CWGroup', name=unicode(group)) + cnx.create_entity('CWGroup', name=text_type(group)) admin = create_user(cnx, login, pwd, u'managers') cnx.execute('SET X owned_by U WHERE X is IN (CWGroup,CWSource), U eid %(u)s', {'u': admin.eid}) cnx.commit() repo.shutdown() - # reloging using the admin user + # re-login using the admin user config._cubes = None # avoid assertion error repo = get_repository(config=config) + # replace previous schema by the new repo's one. This is necessary so that we give the proper + # schema to `initialize_schema` above since it will initialize .eid attribute of schema elements + schema = repo.schema with connect(repo, login, password=pwd) as cnx: with cnx.security_enabled(False, False): repo.system_source.eid = ssource.eid # redo this manually handler = config.migration_handler(schema, interactive=False, cnx=cnx, repo=repo) - # install additional driver specific sql files - handler.cmd_install_custom_sql_scripts() - for cube in reversed(config.cubes()): - handler.cmd_install_custom_sql_scripts(cube) # serialize the schema initialize_schema(config, schema, handler) # yoo ! @@ -310,7 +318,7 @@ # (drop instance attribute to get back to class attribute) del config.cubicweb_appobject_path del config.cube_appobject_path - print '-> database for instance %s initialized.' % config.appid + print('-> database for instance %s initialized.' % config.appid) def initialize_schema(config, schema, mhandler, event='create'): diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/checkintegrity.py --- a/server/checkintegrity.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/checkintegrity.py Thu Jun 16 14:19:20 2016 +0200 @@ -20,6 +20,8 @@ * integrity of a CubicWeb repository. Hum actually only the system database is checked. """ +from __future__ import print_function + __docformat__ = "restructuredtext en" import sys @@ -27,7 +29,7 @@ from logilab.common.shellutils import ProgressBar -from cubicweb.schema import PURE_VIRTUAL_RTYPES, VIRTUAL_RTYPES +from cubicweb.schema import PURE_VIRTUAL_RTYPES, VIRTUAL_RTYPES, UNIQUE_CONSTRAINTS from cubicweb.server.sqlutils import SQL_PREFIX def notify_fixed(fix): @@ -90,11 +92,11 @@ dbhelper = repo.system_source.dbhelper cursor = cnx.cnxset.cu if not dbhelper.has_fti_table(cursor): - print 'no text index table' + print('no text index table') dbhelper.init_fti(cursor) repo.system_source.do_fti = True # ensure full-text indexation is activated if etypes is None: - print 'Reindexing entities' + print('Reindexing entities') etypes = set() for eschema in schema.entities(): if eschema.final: @@ -107,8 +109,8 @@ # clear fti table first cnx.system_sql('DELETE FROM %s' % dbhelper.fti_table) else: - print 'Reindexing entities of type %s' % \ - ', '.join(sorted(str(e) for e in etypes)) + print('Reindexing entities of type %s' % \ + ', '.join(sorted(str(e) for e in etypes))) # clear fti table first. Use subquery for sql compatibility cnx.system_sql("DELETE FROM %s WHERE EXISTS(SELECT 1 FROM ENTITIES " "WHERE eid=%s AND type IN (%s))" % ( @@ -122,8 +124,7 @@ source = repo.system_source for eschema in etypes: etype_class = cnx.vreg['etypes'].etype_class(str(eschema)) - for fti_rql in etype_class.cw_fti_index_rql_queries(cnx): - rset = cnx.execute(fti_rql) + for rset in etype_class.cw_fti_index_rql_limit(cnx): source.fti_index_entities(cnx, rset.entities()) # clear entity cache to avoid high memory consumption on big tables cnx.drop_entity_cache() @@ -135,10 +136,7 @@ def check_schema(schema, cnx, eids, fix=1): """check serialized schema""" - print 'Checking serialized schema' - unique_constraints = ('SizeConstraint', 'FormatConstraint', - 'VocabularyConstraint', - 'RQLVocabularyConstraint') + print('Checking serialized schema') rql = ('Any COUNT(X),RN,SN,ON,CTN GROUPBY RN,SN,ON,CTN ORDERBY 1 ' 'WHERE X is CWConstraint, R constrained_by X, ' 'R relation_type RT, RT name RN, R from_entity ST, ST name SN, ' @@ -146,17 +144,17 @@ for count, rn, sn, on, cstrname in cnx.execute(rql): if count == 1: continue - if cstrname in unique_constraints: - print "ERROR: got %s %r constraints on relation %s.%s.%s" % ( - count, cstrname, sn, rn, on) + if cstrname in UNIQUE_CONSTRAINTS: + print("ERROR: got %s %r constraints on relation %s.%s.%s" % ( + count, cstrname, sn, rn, on)) if fix: - print 'dunno how to fix, do it yourself' + print('dunno how to fix, do it yourself') def check_text_index(schema, cnx, eids, fix=1): """check all entities registered in the text index""" - print 'Checking text index' + print('Checking text index') msg = ' Entity with eid %s exists in the text index but in no source (autofix will remove from text index)' cursor = cnx.system_sql('SELECT uid FROM appears;') for row in cursor.fetchall(): @@ -170,7 +168,7 @@ def check_entities(schema, cnx, eids, fix=1): """check all entities registered in the repo system table""" - print 'Checking entities system table' + print('Checking entities system table') # system table but no source msg = ' Entity %s with eid %s exists in the system table but in no source (autofix will delete the entity)' cursor = cnx.system_sql('SELECT eid,type FROM entities;') @@ -228,7 +226,7 @@ 'WHERE s.cw_name=e.type AND NOT EXISTS(SELECT 1 FROM is_instance_of_relation as cs ' ' WHERE cs.eid_from=e.eid AND cs.eid_to=s.cw_eid)') notify_fixed(True) - print 'Checking entities tables' + print('Checking entities tables') msg = ' Entity with eid %s exists in the %s table but not in the system table (autofix will delete the entity)' for eschema in schema.entities(): if eschema.final: @@ -263,7 +261,7 @@ """check that eids referenced by relations are registered in the repo system table """ - print 'Checking relations' + print('Checking relations') for rschema in schema.relations(): if rschema.final or rschema.type in PURE_VIRTUAL_RTYPES: continue @@ -287,7 +285,7 @@ cursor = cnx.system_sql('SELECT eid_from FROM %s_relation;' % rschema) except Exception as ex: # usually because table doesn't exist - print 'ERROR', ex + print('ERROR', ex) continue for row in cursor.fetchall(): eid = row[0] @@ -310,14 +308,14 @@ def check_mandatory_relations(schema, cnx, eids, fix=1): """check entities missing some mandatory relation""" - print 'Checking mandatory relations' + print('Checking mandatory relations') msg = '%s #%s is missing mandatory %s relation %s (autofix will delete the entity)' for rschema in schema.relations(): if rschema.final or rschema in PURE_VIRTUAL_RTYPES or rschema in ('is', 'is_instance_of'): continue smandatory = set() omandatory = set() - for rdef in rschema.rdefs.itervalues(): + for rdef in rschema.rdefs.values(): if rdef.cardinality[0] in '1+': smandatory.add(rdef.subject) if rdef.cardinality[1] in '1+': @@ -340,12 +338,12 @@ """check for entities stored in the system source missing some mandatory attribute """ - print 'Checking mandatory attributes' + print('Checking mandatory attributes') msg = '%s #%s is missing mandatory attribute %s (autofix will delete the entity)' for rschema in schema.relations(): if not rschema.final or rschema in VIRTUAL_RTYPES: continue - for rdef in rschema.rdefs.itervalues(): + for rdef in rschema.rdefs.values(): if rdef.cardinality[0] in '1+': rql = 'Any X WHERE X %s NULL, X is %s, X cw_source S, S name "system"' % ( rschema, rdef.subject) @@ -361,7 +359,7 @@ FIXME: rewrite using RQL queries ? """ - print 'Checking metadata' + print('Checking metadata') cursor = cnx.system_sql("SELECT DISTINCT type FROM entities;") eidcolumn = SQL_PREFIX + 'eid' msg = ' %s with eid %s has no %s (autofix will set it to now)' @@ -374,8 +372,8 @@ {'type': etype}) continue table = SQL_PREFIX + etype - for rel, default in ( ('creation_date', datetime.now()), - ('modification_date', datetime.now()), ): + for rel, default in ( ('creation_date', datetime.utcnow()), + ('modification_date', datetime.utcnow()), ): column = SQL_PREFIX + rel cursor = cnx.system_sql("SELECT %s FROM %s WHERE %s is NULL" % (eidcolumn, table, column)) @@ -403,9 +401,9 @@ if fix: cnx.commit() else: - print + print() if not fix: - print 'WARNING: Diagnostic run, nothing has been corrected' + print('WARNING: Diagnostic run, nothing has been corrected') if reindex: cnx.rollback() reindex_entities(repo.schema, cnx, withpb=withpb) diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/cwzmq.py --- a/server/cwzmq.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/cwzmq.py Thu Jun 16 14:19:20 2016 +0200 @@ -65,7 +65,7 @@ def add_subscriber(self, address): subscriber = Subscriber(self.ioloop, address) - for topic, callback in self._topics.iteritems(): + for topic, callback in self._topics.items(): subscriber.subscribe(topic, callback) self._subscribers.append(subscriber) diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/edition.py --- a/server/edition.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/edition.py Thu Jun 16 14:19:20 2016 +0200 @@ -38,7 +38,7 @@ class EditedEntity(dict): """encapsulate entities attributes being written by an RQL query""" def __init__(self, entity, **kwargs): - dict.__init__(self, **kwargs) + super(EditedEntity, self).__init__(**kwargs) self.entity = entity self.skip_security = set() self.querier_pending_relations = {} @@ -50,15 +50,18 @@ def __lt__(self, other): # we don't want comparison by value inherited from dict - return id(self) < id(other) + raise NotImplementedError def __eq__(self, other): - return id(self) == id(other) + return self is other + + def __ne__(self, other): + return not (self == other) def __setitem__(self, attr, value): assert attr != 'eid' # don't add attribute into skip_security if already in edited - # attributes, else we may accidentaly skip a desired security check + # attributes, else we may accidentally skip a desired security check if attr not in self: self.skip_security.add(attr) self.edited_attribute(attr, value) @@ -83,7 +86,7 @@ def setdefault(self, attr, default): assert attr != 'eid' # don't add attribute into skip_security if already in edited - # attributes, else we may accidentaly skip a desired security check + # attributes, else we may accidentally skip a desired security check if attr not in self: self[attr] = default return self[attr] @@ -93,7 +96,7 @@ setitem = self.__setitem__ else: setitem = self.edited_attribute - for attr, value in values.iteritems(): + for attr, value in values.items(): setitem(attr, value) def edited_attribute(self, attr, value): @@ -103,6 +106,8 @@ assert not self.saved, 'too late to modify edited attributes' super(EditedEntity, self).__setitem__(attr, value) self.entity.cw_attr_cache[attr] = value + if self.entity._cw.vreg.schema.rschema(attr).final: + self.entity._cw_dont_cache_attribute(attr) def oldnewvalue(self, attr): """returns the couple (old attr value, new attr value) diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/hook.py --- a/server/hook.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/hook.py Thu Jun 16 14:19:20 2016 +0200 @@ -248,6 +248,8 @@ .. autoclass:: cubicweb.server.hook.LateOperation .. autoclass:: cubicweb.server.hook.DataOperationMixIn """ +from __future__ import print_function + __docformat__ = "restructuredtext en" from warnings import warn @@ -331,7 +333,7 @@ with cnx.running_hooks_ops(): for hook in hooks: if debug: - print event, _kwargs, hook + print(event, _kwargs, hook) hook() def get_pruned_hooks(self, cnx, event, entities, eids_from_to, kwargs): @@ -370,7 +372,7 @@ pruned = set() cnx.pruned_hooks_cache[cache_key] = pruned if look_for_selector is not None: - for id, hooks in self.iteritems(): + for id, hooks in self.items(): for hook in hooks: enabled_cat, main_filter = hook.filterable_selectors() if enabled_cat is not None: @@ -382,14 +384,14 @@ (main_filter.frometypes is not None or \ main_filter.toetypes is not None): continue - first_kwargs = _iter_kwargs(entities, eids_from_to, kwargs).next() + first_kwargs = next(_iter_kwargs(entities, eids_from_to, kwargs)) if not main_filter(hook, cnx, **first_kwargs): pruned.add(hook) return pruned def filtered_possible_objects(self, pruned, *args, **kwargs): - for appobjects in self.itervalues(): + for appobjects in self.values(): if pruned: filtered_objects = [obj for obj in appobjects if obj not in pruned] if not filtered_objects: @@ -636,7 +638,7 @@ # to set in concrete class (mandatory) subject_relations = None object_relations = None - # to set in concrete class (optionaly) + # to set in concrete class (optionally) skip_subject_relations = () skip_object_relations = () @@ -713,7 +715,7 @@ the transaction has been either rolled back either: - * intentionaly + * intentionally * a 'precommit' event failed, in which case all operations are rolled back once 'revertprecommit'' has been called diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/hooksmanager.py --- a/server/hooksmanager.py Thu Mar 24 09:43:25 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,22 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -from logilab.common.deprecation import class_renamed, class_moved -from cubicweb.server import hook - -SystemHook = class_renamed('SystemHook', hook.Hook) -Hook = class_moved(hook.Hook) diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/migractions.py --- a/server/migractions.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/migractions.py Thu Jun 16 14:19:20 2016 +0200 @@ -26,6 +26,8 @@ * add an entity * execute raw RQL queries """ +from __future__ import print_function + __docformat__ = "restructuredtext en" import sys @@ -40,9 +42,12 @@ from warnings import warn from contextlib import contextmanager +from six import PY2, text_type + from logilab.common.deprecation import deprecated from logilab.common.decorators import cached, clear_cache +from yams.buildobjs import EntityType from yams.constraints import SizeConstraint from yams.schema import RelationDefinitionSchema @@ -55,7 +60,7 @@ from cubicweb import repoapi from cubicweb.migration import MigrationHelper, yes from cubicweb.server import hook, schemaserial as ss -from cubicweb.server.schema2sql import eschema2sql, rschema2sql, unique_index_name +from cubicweb.server.schema2sql import eschema2sql, rschema2sql, unique_index_name, sql_type from cubicweb.server.utils import manager_userpasswd from cubicweb.server.sqlutils import sqlexec, SQL_PREFIX @@ -93,7 +98,7 @@ self.repo = repo self.session = cnx.session elif connect: - self.repo_connect() + self.repo = config.repository() self.set_cnx() else: self.session = None @@ -134,30 +139,24 @@ try: self.cnx = repoapi.connect(self.repo, login, password=pwd) if not 'managers' in self.cnx.user.groups: - print 'migration need an account in the managers group' + print('migration need an account in the managers group') else: break except AuthenticationError: - print 'wrong user/password' + print('wrong user/password') except (KeyboardInterrupt, EOFError): - print 'aborting...' + print('aborting...') sys.exit(0) try: login, pwd = manager_userpasswd() except (KeyboardInterrupt, EOFError): - print 'aborting...' + print('aborting...') sys.exit(0) self.session = self.repo._get_session(self.cnx.sessionid) - - @cached - def repo_connect(self): - self.repo = repoapi.get_repository(config=self.config) - return self.repo - def cube_upgraded(self, cube, version): self.cmd_set_property('system.version.%s' % cube.lower(), - unicode(version)) + text_type(version)) self.commit() def shutdown(self): @@ -191,7 +190,7 @@ def backup_database(self, backupfile=None, askconfirm=True, format='native'): config = self.config - repo = self.repo_connect() + repo = self.repo # paths timestamp = datetime.now().strftime('%Y-%m-%d_%H-%M-%S') instbkdir = osp.join(config.appdatahome, 'backup') @@ -202,13 +201,13 @@ # check backup has to be done if osp.exists(backupfile) and not \ self.confirm('Backup file %s exists, overwrite it?' % backupfile): - print '-> no backup done.' + print('-> no backup done.') return elif askconfirm and not self.confirm('Backup %s database?' % config.appid): - print '-> no backup done.' + print('-> no backup done.') return open(backupfile,'w').close() # kinda lock - os.chmod(backupfile, 0600) + os.chmod(backupfile, 0o600) # backup source = repo.system_source tmpdir = tempfile.mkdtemp() @@ -217,7 +216,7 @@ try: source.backup(osp.join(tmpdir, source.uri), self.confirm, format=format) except Exception as ex: - print '-> error trying to backup %s [%s]' % (source.uri, ex) + print('-> error trying to backup %s [%s]' % (source.uri, ex)) if not self.confirm('Continue anyway?', default='n'): raise SystemExit(1) else: @@ -226,7 +225,7 @@ format_file.write('%s\n' % format) with open(osp.join(tmpdir, 'versions.txt'), 'w') as version_file: versions = repo.get_versions() - for cube, version in versions.iteritems(): + for cube, version in versions.items(): version_file.write('%s %s\n' % (cube, version)) if not failed: bkup = tarfile.open(backupfile, 'w|gz') @@ -236,7 +235,7 @@ # call hooks repo.hm.call_hooks('server_backup', repo=repo, timestamp=timestamp) # done - print '-> backup file', backupfile + print('-> backup file', backupfile) finally: shutil.rmtree(tmpdir) @@ -268,19 +267,19 @@ if written_format in ('portable', 'native'): format = written_format self.config.init_cnxset_pool = False - repo = self.repo_connect() + repo = self.repo = self.config.repository() source = repo.system_source try: source.restore(osp.join(tmpdir, source.uri), self.confirm, drop, format) except Exception as exc: - print '-> error trying to restore %s [%s]' % (source.uri, exc) + print('-> error trying to restore %s [%s]' % (source.uri, exc)) if not self.confirm('Continue anyway?', default='n'): raise SystemExit(1) shutil.rmtree(tmpdir) # call hooks repo.init_cnxset_pool() repo.hm.call_hooks('server_restore', repo=repo, timestamp=backupfile) - print '-> database restored.' + print('-> database restored.') def commit(self): self.cnx.commit() @@ -362,11 +361,11 @@ directory = osp.join(self.config.cube_dir(cube), 'schema') sql_scripts = glob(osp.join(directory, '*.%s.sql' % driver)) for fpath in sql_scripts: - print '-> installing', fpath + print('-> installing', fpath) failed = sqlexec(open(fpath).read(), self.cnx.system_sql, False, delimiter=';;') if failed: - print '-> ERROR, skipping', fpath + print('-> ERROR, skipping', fpath) # schema synchronization internals ######################################## @@ -424,7 +423,7 @@ {'x': expreid}, ask_confirm=False) else: newexprs.pop(expression) - for expression in newexprs.itervalues(): + for expression in newexprs.values(): expr = expression.expression if not confirm or self.confirm('Add %s expression for %s permission of %s?' % (expr, action, erschema)): @@ -460,7 +459,10 @@ assert reporschema.eid, reporschema self.rqlexecall(ss.updaterschema2rql(rschema, reporschema.eid), ask_confirm=self.verbosity>=2) - if syncrdefs: + if rschema.rule: + if syncperms: + self._synchronize_permissions(rschema, reporschema.eid) + elif syncrdefs: for subj, obj in rschema.rdefs: if (subj, obj) not in reporschema.rdefs: continue @@ -552,12 +554,12 @@ for name in cols: rschema = repoeschema.subjrels.get(name) if rschema is None: - print 'dont add %s unique constraint on %s, missing %s' % ( - ','.join(cols), eschema, name) + print('dont add %s unique constraint on %s, missing %s' % ( + ','.join(cols), eschema, name)) return False if not (rschema.final or rschema.inlined): - print 'dont add %s unique constraint on %s, %s is neither final nor inlined' % ( - ','.join(cols), eschema, name) + print('dont add %s unique constraint on %s, %s is neither final nor inlined' % ( + ','.join(cols), eschema, name)) return False return True @@ -574,6 +576,7 @@ against its current definition: * order and other properties * constraints + * permissions """ subjtype, objtype = str(subjtype), str(objtype) rschema = self.fs_schema.rschema(rtype) @@ -743,8 +746,8 @@ rschema = self.repo.schema.rschema(attrname) attrtype = rschema.objects(etype)[0] except KeyError: - print 'warning: attribute %s %s is not known, skip deletion' % ( - etype, attrname) + print('warning: attribute %s %s is not known, skip deletion' % ( + etype, attrname)) else: self.cmd_drop_relation_definition(etype, attrname, attrtype, commit=commit) @@ -781,13 +784,18 @@ instschema = self.repo.schema eschema = self.fs_schema.eschema(etype) if etype in instschema and not (eschema.final and eschema.eid is None): - print 'warning: %s already known, skip addition' % etype + print('warning: %s already known, skip addition' % etype) return confirm = self.verbosity >= 2 groupmap = self.group_mapping() cstrtypemap = self.cstrtype_mapping() # register the entity into CWEType execute = self.cnx.execute + if eschema.final and eschema not in instschema: + # final types are expected to be in the living schema by default, but they are not if + # the type is defined in a cube that is being added + edef = EntityType(eschema.type, __permissions__=eschema.permissions) + instschema.add_entity_type(edef) ss.execschemarql(execute, eschema, ss.eschema2rql(eschema, groupmap)) # add specializes relation if needed specialized = eschema.specializes() @@ -803,6 +811,8 @@ # ignore those meta relations, they will be automatically added if rschema.type in META_RTYPES: continue + if not attrschema.type in instschema: + self.cmd_add_entity_type(attrschema.type, False, False) if not rschema.type in instschema: # need to add the relation type and to commit to get it # actually in the schema @@ -899,9 +909,11 @@ self.commit() def cmd_drop_entity_type(self, etype, commit=True): - """unregister an existing entity type + """Drop an existing entity type. - This will trigger deletion of necessary relation types and definitions + This will trigger deletion of necessary relation types and definitions. + Note that existing entities of the given type will be deleted without + any hooks called. """ # XXX what if we delete an entity type which is specialized by other types # unregister the entity from CWEType @@ -918,7 +930,7 @@ """ schema = self.repo.schema if oldname not in schema: - print 'warning: entity type %s is unknown, skip renaming' % oldname + print('warning: entity type %s is unknown, skip renaming' % oldname) return # if merging two existing entity types if newname in schema: @@ -997,7 +1009,7 @@ # elif simply renaming an entity type else: self.rqlexec('SET ET name %(newname)s WHERE ET is CWEType, ET name %(on)s', - {'newname' : unicode(newname), 'on' : oldname}, + {'newname' : text_type(newname), 'on' : oldname}, ask_confirm=False) if commit: self.commit() @@ -1017,8 +1029,8 @@ rschema = self.fs_schema.rschema(rtype) execute = self.cnx.execute if rtype in reposchema: - print 'warning: relation type %s is already known, skip addition' % ( - rtype) + print('warning: relation type %s is already known, skip addition' % ( + rtype)) elif rschema.rule: gmap = self.group_mapping() ss.execschemarql(execute, rschema, ss.crschema2rql(rschema, gmap)) @@ -1060,7 +1072,11 @@ self.commit() def cmd_drop_relation_type(self, rtype, commit=True): - """unregister an existing relation type""" + """Drop an existing relation type. + + Note that existing relations of the given type will be deleted without + any hooks called. + """ self.rqlexec('DELETE CWRType X WHERE X name %r' % rtype, ask_confirm=self.verbosity>=2) self.rqlexec('DELETE CWComputedRType X WHERE X name %r' % rtype, @@ -1098,8 +1114,8 @@ if not rtype in self.repo.schema: self.cmd_add_relation_type(rtype, addrdef=False, commit=True) if (subjtype, objtype) in self.repo.schema.rschema(rtype).rdefs: - print 'warning: relation %s %s %s is already known, skip addition' % ( - subjtype, rtype, objtype) + print('warning: relation %s %s %s is already known, skip addition' % ( + subjtype, rtype, objtype)) return rdef = self._get_rdef(rschema, subjtype, objtype) ss.execschemarql(self.cnx.execute, rdef, @@ -1120,7 +1136,11 @@ return rdef def cmd_drop_relation_definition(self, subjtype, rtype, objtype, commit=True): - """unregister an existing relation definition""" + """Drop an existing relation definition. + + Note that existing relations of the given definition will be deleted + without any hooks called. + """ rschema = self.repo.schema.rschema(rtype) if rschema.rule: raise ExecutionError('Cannot drop a relation definition for a ' @@ -1200,7 +1220,7 @@ values = [] for k, v in kwargs.items(): values.append('X %s %%(%s)s' % (k, k)) - if isinstance(v, str): + if PY2 and isinstance(v, str): kwargs[k] = unicode(v) rql = 'SET %s WHERE %s' % (','.join(values), ','.join(restriction)) self.rqlexec(rql, kwargs, ask_confirm=self.verbosity>=2) @@ -1233,7 +1253,7 @@ self.rqlexec('SET C value %%(v)s WHERE X from_entity S, X relation_type R,' 'X constrained_by C, C cstrtype CT, CT name "SizeConstraint",' 'S name "%s", R name "%s"' % (etype, rtype), - {'v': unicode(SizeConstraint(size).serialize())}, + {'v': text_type(SizeConstraint(size).serialize())}, ask_confirm=self.verbosity>=2) else: self.rqlexec('DELETE X constrained_by C WHERE X from_entity S, X relation_type R,' @@ -1270,7 +1290,7 @@ :rtype: `Workflow` """ - wf = self.cmd_create_entity('Workflow', name=unicode(name), + wf = self.cmd_create_entity('Workflow', name=text_type(name), **kwargs) if not isinstance(wfof, (list, tuple)): wfof = (wfof,) @@ -1278,19 +1298,19 @@ return 'missing workflow relations, see make_workflowable(%s)' % etype for etype in wfof: eschema = self.repo.schema[etype] - etype = unicode(etype) + etype = text_type(etype) if ensure_workflowable: assert 'in_state' in eschema.subjrels, _missing_wf_rel(etype) assert 'custom_workflow' in eschema.subjrels, _missing_wf_rel(etype) assert 'wf_info_for' in eschema.objrels, _missing_wf_rel(etype) rset = self.rqlexec( 'SET X workflow_of ET WHERE X eid %(x)s, ET name %(et)s', - {'x': wf.eid, 'et': unicode(etype)}, ask_confirm=False) + {'x': wf.eid, 'et': text_type(etype)}, ask_confirm=False) assert rset, 'unexistant entity type %s' % etype if default: self.rqlexec( 'SET ET default_workflow X WHERE X eid %(x)s, ET name %(et)s', - {'x': wf.eid, 'et': unicode(etype)}, ask_confirm=False) + {'x': wf.eid, 'et': text_type(etype)}, ask_confirm=False) if commit: self.commit() return wf @@ -1321,13 +1341,13 @@ To set a user specific property value, use appropriate method on CWUser instance. """ - value = unicode(value) + value = text_type(value) try: prop = self.rqlexec( 'CWProperty X WHERE X pkey %(k)s, NOT X for_user U', - {'k': unicode(pkey)}, ask_confirm=False).get_entity(0, 0) + {'k': text_type(pkey)}, ask_confirm=False).get_entity(0, 0) except Exception: - self.cmd_create_entity('CWProperty', pkey=unicode(pkey), value=value) + self.cmd_create_entity('CWProperty', pkey=text_type(pkey), value=value) else: prop.cw_set(value=value) @@ -1351,7 +1371,7 @@ # remove from entity cache to avoid memory exhaustion del entity.cw_attr_cache[attribute] pb.update() - print + print() source.set_storage(etype, attribute, storage) def cmd_create_entity(self, etype, commit=False, **kwargs): @@ -1488,8 +1508,10 @@ "WHERE cw_eid=%s") % (newtype, rdef.eid) self.sqlexec(sql, ask_confirm=False) dbhelper = self.repo.system_source.dbhelper - sqltype = dbhelper.TYPE_MAPPING[newtype] + newrdef = self.fs_schema.rschema(attr).rdef(etype, newtype) + sqltype = sql_type(dbhelper, newrdef) cursor = self.cnx.cnxset.cu + # consider former cardinality by design, since cardinality change is not handled here allownull = rdef.cardinality[0] != '1' dbhelper.change_col_type(cursor, 'cw_%s' % etype, 'cw_%s' % attr, sqltype, allownull) if commit: @@ -1564,12 +1586,14 @@ else: raise StopIteration - def next(self): + def __next__(self): if self._rsetit is not None: - return self._rsetit.next() + return next(self._rsetit) rset = self._get_rset() self._rsetit = iter(rset) - return self._rsetit.next() + return next(self._rsetit) + + next = __next__ def entities(self): try: diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/querier.py --- a/server/querier.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/querier.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,10 +18,15 @@ """Helper classes to execute RQL queries on a set of sources, performing security checking and data aggregation. """ +from __future__ import print_function + __docformat__ = "restructuredtext en" from itertools import repeat +from six import text_type, string_types, integer_types +from six.moves import range + from rql import RQLSyntaxError, CoercionError from rql.stmts import Union from rql.nodes import ETYPE_PYOBJ_MAP, etype_from_pyobj, Relation, Exists, Not @@ -61,7 +66,7 @@ def check_no_password_selected(rqlst): """check that Password entities are not selected""" for solution in rqlst.solutions: - for var, etype in solution.iteritems(): + for var, etype in solution.items(): if etype == 'Password': raise Unauthorized('Password selection is not allowed (%s)' % var) @@ -103,13 +108,13 @@ solution, args)) if not user.matching_groups(rdef.get_groups('read')): if DBG: - print ('check_read_access: %s %s does not match %s' % - (rdef, user.groups, rdef.get_groups('read'))) + print('check_read_access: %s %s does not match %s' % + (rdef, user.groups, rdef.get_groups('read'))) # XXX rqlexpr not allowed raise Unauthorized('read', rel.r_type) if DBG: - print ('check_read_access: %s %s matches %s' % - (rdef, user.groups, rdef.get_groups('read'))) + print('check_read_access: %s %s matches %s' % + (rdef, user.groups, rdef.get_groups('read'))) def get_local_checks(cnx, rqlst, solution): """Check that the given user has credentials to access data read by the @@ -138,8 +143,8 @@ ex = Unauthorized('read', solution[varname]) ex.var = varname if DBG: - print ('check_read_access: %s %s %s %s' % - (varname, eschema, user.groups, eschema.get_groups('read'))) + print('check_read_access: %s %s %s %s' % + (varname, eschema, user.groups, eschema.get_groups('read'))) raise ex # don't insert security on variable only referenced by 'NOT X relation Y' or # 'NOT EXISTS(X relation Y)' @@ -265,7 +270,7 @@ # which have a known eid varkwargs = {} if not cnx.transaction_data.get('security-rqlst-cache'): - for var in rqlst.defined_vars.itervalues(): + for var in rqlst.defined_vars.values(): if var.stinfo['constnode'] is not None: eid = var.stinfo['constnode'].eval(self.args) varkwargs[var.name] = int(eid) @@ -285,7 +290,7 @@ newsolutions.append(solution) # try to benefit of rqlexpr.check cache for entities which # are specified by eid in query'args - for varname, eid in varkwargs.iteritems(): + for varname, eid in varkwargs.items(): try: rqlexprs = localcheck.pop(varname) except KeyError: @@ -303,7 +308,7 @@ # mark variables protected by an rql expression restricted_vars.update(localcheck) # turn local check into a dict key - localcheck = tuple(sorted(localcheck.iteritems())) + localcheck = tuple(sorted(localcheck.items())) localchecks.setdefault(localcheck, []).append(solution) # raise Unautorized exception if the user can't access to any solution if not newsolutions: @@ -334,7 +339,7 @@ def __init__(self, querier, rqlst, args, cnx): ExecutionPlan.__init__(self, querier, rqlst, args, cnx) - # save originaly selected variable, we may modify this + # save originally selected variable, we may modify this # dictionary for substitution (query parameters) self.selected = rqlst.selection # list of rows of entities definition (ssplanner.EditedEntity) @@ -414,7 +419,7 @@ def relation_defs(self): """return the list for relation definitions to insert""" - for rdefs in self._expanded_r_defs.itervalues(): + for rdefs in self._expanded_r_defs.values(): for rdef in rdefs: yield rdef for rdef in self.r_defs: @@ -446,13 +451,13 @@ relations = {} for subj, rtype, obj in self.relation_defs(): # if a string is given into args instead of an int, we get it here - if isinstance(subj, basestring): + if isinstance(subj, string_types): subj = int(subj) - elif not isinstance(subj, (int, long)): + elif not isinstance(subj, integer_types): subj = subj.entity.eid - if isinstance(obj, basestring): + if isinstance(obj, string_types): obj = int(obj) - elif not isinstance(obj, (int, long)): + elif not isinstance(obj, integer_types): obj = obj.entity.eid if repo.schema.rschema(rtype).inlined: if subj not in edited_entities: @@ -468,7 +473,7 @@ else: relations[rtype] = [(subj, obj)] repo.glob_add_relations(cnx, relations) - for edited in edited_entities.itervalues(): + for edited in edited_entities.values(): repo.glob_update_entity(cnx, edited) @@ -507,7 +512,7 @@ def parse(self, rql, annotate=False): """return a rql syntax tree for the given rql""" try: - return self._parse(unicode(rql), annotate=annotate) + return self._parse(text_type(rql), annotate=annotate) except UnicodeError: raise RQLSyntaxError(rql) @@ -539,8 +544,8 @@ """ if server.DEBUG & (server.DBG_RQL | server.DBG_SQL): if server.DEBUG & (server.DBG_MORE | server.DBG_SQL): - print '*'*80 - print 'querier input', repr(rql), repr(args) + print('*'*80) + print('querier input', repr(rql), repr(args)) # parse the query and binds variables cachekey = (rql,) try: @@ -601,7 +606,7 @@ if args: # different SQL generated when some argument is None or not (IS # NULL). This should be considered when computing sql cache key - cachekey += tuple(sorted([k for k, v in args.iteritems() + cachekey += tuple(sorted([k for k, v in args.items() if v is None])) # make an execution plan plan = self.plan_factory(rqlst, args, cnx) @@ -641,7 +646,7 @@ # so compute description manually even if there is only # one solution basedescr = [None] * len(plan.selected) - todetermine = zip(xrange(len(plan.selected)), repeat(False)) + todetermine = list(zip(range(len(plan.selected)), repeat(False))) descr = _build_descr(cnx, results, basedescr, todetermine) # FIXME: get number of affected entities / relations on non # selection queries ? @@ -668,7 +673,7 @@ unstables = rqlst.get_variable_indices() basedescr = [] todetermine = [] - for i in xrange(len(rqlst.children[0].selection)): + for i in range(len(rqlst.children[0].selection)): ttype = _selection_idx_type(i, rqlst, args) if ttype is None or ttype == 'Any': ttype = None diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/repository.py --- a/server/repository.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/repository.py Thu Jun 16 14:19:20 2016 +0200 @@ -25,15 +25,18 @@ point to a cubicweb instance. * handles session management """ +from __future__ import print_function + __docformat__ = "restructuredtext en" import threading -import Queue from warnings import warn from itertools import chain from time import time, localtime, strftime from contextlib import contextmanager +from six.moves import range, queue + from logilab.common.decorators import cached, clear_cache from logilab.common.deprecation import deprecated @@ -186,18 +189,18 @@ # registry hook to fix user class on registry reload @onevent('after-registry-reload', self) def fix_user_classes(self): - # After registery reload the 'CWUser' class used for CWEtype - # changed. To any existing user object have a different class than + # After registry reload the 'CWUser' class used for CWEtype + # changed. So any existing user object have a different class than # the new loaded one. We are hot fixing this. usercls = self.vreg['etypes'].etype_class('CWUser') - for session in self._sessions.itervalues(): + for session in self._sessions.values(): if not isinstance(session.user, InternalManager): session.user.__class__ = usercls def init_cnxset_pool(self): """should be called bootstrap_repository, as this is what it does""" config = self.config - self._cnxsets_pool = Queue.Queue() + self._cnxsets_pool = queue.Queue() # 0. init a cnxset that will be used to fetch bootstrap information from # the database self._cnxsets_pool.put_nowait(self.system_source.wrapped_connection()) @@ -223,6 +226,11 @@ if not config.creating: self.info("set fs instance'schema") self.set_schema(config.load_schema(expand_cubes=True)) + if not config.creating: + # set eids on entities schema + with self.internal_cnx() as cnx: + for etype, eid in cnx.execute('Any XN,X WHERE X is CWEType, X name XN'): + self.schema.eschema(etype).eid = eid else: # normal start: load the instance schema from the database self.info('loading schema from the repository') @@ -240,7 +248,7 @@ # proper initialization self._get_cnxset().close(True) self.cnxsets = [] # list of available cnxsets (can't iterate on a Queue) - for i in xrange(config['connections-pool-size']): + for i in range(config['connections-pool-size']): self.cnxsets.append(self.system_source.wrapped_connection()) self._cnxsets_pool.put_nowait(self.cnxsets[-1]) @@ -308,7 +316,7 @@ else: self.vreg._set_schema(schema) self.querier.set_schema(schema) - for source in self.sources_by_uri.itervalues(): + for source in self.sources_by_uri.values(): source.set_schema(schema) self.schema = schema @@ -377,7 +385,7 @@ def _get_cnxset(self): try: return self._cnxsets_pool.get(True, timeout=5) - except Queue.Empty: + except queue.Empty: raise Exception('no connections set available after 5 secs, probably either a ' 'bug in code (too many uncommited/rolled back ' 'connections) or too much load on the server (in ' @@ -387,13 +395,6 @@ def _free_cnxset(self, cnxset): self._cnxsets_pool.put_nowait(cnxset) - def pinfo(self): - # XXX: session.cnxset is accessed from a local storage, would be interesting - # to see if there is a cnxset set in any thread specific data) - return '%s: %s (%s)' % (self._cnxsets_pool.qsize(), - ','.join(session.user.login for session in self._sessions.itervalues() - if session.cnxset), - threading.currentThread()) def shutdown(self): """called on server stop event to properly close opened sessions and connections @@ -441,7 +442,7 @@ """ # iter on sources_by_uri then check enabled source since sources doesn't # contain copy based sources - for source in self.sources_by_uri.itervalues(): + for source in self.sources_by_uri.values(): if self.config.source_enabled(source) and source.support_entity('CWUser'): try: return source.authenticate(cnx, login, **authinfo) @@ -575,7 +576,7 @@ """ sources = {} # remove sensitive information - for uri, source in self.sources_by_uri.iteritems(): + for uri, source in self.sources_by_uri.items(): sources[uri] = source.public_config return sources @@ -623,7 +624,7 @@ raise Exception('bad input for find_user') with self.internal_cnx() as cnx: varmaker = rqlvar_maker() - vars = [(attr, varmaker.next()) for attr in fetch_attrs] + vars = [(attr, next(varmaker)) for attr in fetch_attrs] rql = 'Any %s WHERE X is CWUser, ' % ','.join(var[1] for var in vars) rql += ','.join('X %s %s' % (var[0], var[1]) for var in vars) + ',' rset = cnx.execute(rql + ','.join('X %s %%(%s)s' % (attr, attr) @@ -658,12 +659,6 @@ """open a new session for a given user and return its sessionid """ return self.new_session(login, **kwargs).sessionid - def check_session(self, sessionid): - """raise `BadConnectionId` if the connection is no more valid, else - return its latest activity timestamp. - """ - return self._get_session(sessionid).timestamp - def close(self, sessionid, txid=None, checkshuttingdown=True): """close the session with the given id""" session = self._get_session(sessionid, txid=txid, @@ -779,6 +774,7 @@ args[key] = int(args[key]) return tuple(cachekey) + @deprecated('[3.22] use the new store API') def extid2eid(self, source, extid, etype, cnx, insert=True, sourceparams=None): """Return eid from a local id. If the eid is a negative integer, that @@ -919,7 +915,7 @@ # set caches asap extid = self.init_entity_caches(cnx, entity, source) if server.DEBUG & server.DBG_REPO: - print 'ADD entity', self, entity.cw_etype, entity.eid, edited + print('ADD entity', self, entity.cw_etype, entity.eid, edited) prefill_entity_caches(entity) self.hm.call_hooks('before_add_entity', cnx, entity=entity) relations = preprocess_inlined_relations(cnx, entity) @@ -950,8 +946,8 @@ """ entity = edited.entity if server.DEBUG & server.DBG_REPO: - print 'UPDATE entity', entity.cw_etype, entity.eid, \ - entity.cw_attr_cache, edited + print('UPDATE entity', entity.cw_etype, entity.eid, + entity.cw_attr_cache, edited) hm = self.hm eschema = entity.e_schema cnx.set_entity_cache(entity) @@ -1043,9 +1039,9 @@ except KeyError: data_by_etype[etype] = [entity] source = self.system_source - for etype, entities in data_by_etype.iteritems(): + for etype, entities in data_by_etype.items(): if server.DEBUG & server.DBG_REPO: - print 'DELETE entities', etype, [entity.eid for entity in entities] + print('DELETE entities', etype, [entity.eid for entity in entities]) self.hm.call_hooks('before_delete_entity', cnx, entities=entities) self._delete_cascade_multi(cnx, entities) source.delete_entities(cnx, entities) @@ -1067,10 +1063,10 @@ subjects_by_types = {} objects_by_types = {} activintegrity = cnx.is_hook_category_activated('activeintegrity') - for rtype, eids_subj_obj in relations.iteritems(): + for rtype, eids_subj_obj in relations.items(): if server.DEBUG & server.DBG_REPO: for subjeid, objeid in eids_subj_obj: - print 'ADD relation', subjeid, rtype, objeid + print('ADD relation', subjeid, rtype, objeid) for subjeid, objeid in eids_subj_obj: if rtype in relations_by_rtype: relations_by_rtype[rtype].append((subjeid, objeid)) @@ -1105,22 +1101,22 @@ objects[objeid] = len(relations_by_rtype[rtype]) continue objects[objeid] = len(relations_by_rtype[rtype]) - for rtype, source_relations in relations_by_rtype.iteritems(): + for rtype, source_relations in relations_by_rtype.items(): self.hm.call_hooks('before_add_relation', cnx, rtype=rtype, eids_from_to=source_relations) - for rtype, source_relations in relations_by_rtype.iteritems(): + for rtype, source_relations in relations_by_rtype.items(): source.add_relations(cnx, rtype, source_relations) rschema = self.schema.rschema(rtype) for subjeid, objeid in source_relations: cnx.update_rel_cache_add(subjeid, rtype, objeid, rschema.symmetric) - for rtype, source_relations in relations_by_rtype.iteritems(): + for rtype, source_relations in relations_by_rtype.items(): self.hm.call_hooks('after_add_relation', cnx, rtype=rtype, eids_from_to=source_relations) def glob_delete_relation(self, cnx, subject, rtype, object): """delete a relation from the repository""" if server.DEBUG & server.DBG_REPO: - print 'DELETE relation', subject, rtype, object + print('DELETE relation', subject, rtype, object) source = self.system_source self.hm.call_hooks('before_delete_relation', cnx, eidfrom=subject, rtype=rtype, eidto=object) diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/rqlannotation.py --- a/server/rqlannotation.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/rqlannotation.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,6 +18,7 @@ """Functions to add additional annotations on a rql syntax tree to ease later code generation. """ +from __future__ import print_function __docformat__ = "restructuredtext en" @@ -33,7 +34,7 @@ #if server.DEBUG: # print '-------- sql annotate', repr(rqlst) getrschema = annotator.schema.rschema - for var in rqlst.defined_vars.itervalues(): + for var in rqlst.defined_vars.values(): stinfo = var.stinfo if stinfo.get('ftirels'): has_text_query = True @@ -144,7 +145,7 @@ stinfo['invariant'] = False # see unittest_rqlannotation. test_has_text_security_cache_bug # XXX probably more to do, but yet that work without more... - for col_alias in rqlst.aliases.itervalues(): + for col_alias in rqlst.aliases.values(): if col_alias.stinfo.get('ftirels'): has_text_query = True return has_text_query @@ -194,7 +195,7 @@ # if DISTINCT query, can use variable from a different scope as principal # since introduced duplicates will be removed if scope.stmt.distinct and diffscope_rels: - return iter(_sort(diffscope_rels)).next() + return next(iter(_sort(diffscope_rels))) # XXX could use a relation from a different scope if it can't generate # duplicates, so we should have to check cardinality raise CantSelectPrincipal() @@ -231,7 +232,7 @@ for select in union.children: for subquery in select.with_: set_qdata(getrschema, subquery.query, noinvariant) - for var in select.defined_vars.itervalues(): + for var in select.defined_vars.values(): if var.stinfo['invariant']: if var in noinvariant and not var.stinfo['principal'].r_type == 'has_text': var._q_invariant = False @@ -317,7 +318,7 @@ def compute(self, rqlst): # set domains for each variable - for varname, var in rqlst.defined_vars.iteritems(): + for varname, var in rqlst.defined_vars.items(): if var.stinfo['uidrel'] is not None or \ self.eschema(rqlst.solutions[0][varname]).final: ptypes = var.stinfo['possibletypes'] @@ -354,9 +355,9 @@ continue def _debug_print(self): - print 'varsols', dict((x, sorted(str(v) for v in values)) - for x, values in self.varsols.iteritems()) - print 'ambiguous vars', sorted(self.ambiguousvars) + print('varsols', dict((x, sorted(str(v) for v in values)) + for x, values in self.varsols.items())) + print('ambiguous vars', sorted(self.ambiguousvars)) def set_rel_constraint(self, term, rel, etypes_func): if isinstance(term, VariableRef) and self.is_ambiguous(term.variable): diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/schema2sql.py --- a/server/schema2sql.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/schema2sql.py Thu Jun 16 14:19:20 2016 +0200 @@ -162,8 +162,8 @@ def check_constraint(eschema, aschema, attr, constraint, dbhelper, prefix=''): # XXX should find a better name - cstrname = 'cstr' + md5(eschema.type + attr + constraint.type() + - (constraint.serialize() or '')).hexdigest() + cstrname = 'cstr' + md5((eschema.type + attr + constraint.type() + + (constraint.serialize() or '')).encode('ascii')).hexdigest() if constraint.type() == 'BoundaryConstraint': value = as_sql(constraint.boundary, dbhelper, prefix) return cstrname, '%s%s %s %s' % (prefix, attr, constraint.operator, value) @@ -190,8 +190,7 @@ """write an attribute schema as SQL statements to stdout""" attr = rschema.type rdef = rschema.rdef(eschema.type, aschema.type) - sqltype = type_from_constraints(dbhelper, aschema.type, rdef.constraints, - creating) + sqltype = type_from_rdef(dbhelper, rdef, creating) if SET_DEFAULT: default = eschema.default(attr) if default is not None: @@ -215,25 +214,33 @@ return sqltype -def type_from_constraints(dbhelper, etype, constraints, creating=True): - """return a sql type string corresponding to the constraints""" - constraints = list(constraints) +def type_from_rdef(dbhelper, rdef, creating=True): + """return a sql type string corresponding to the relation definition""" + constraints = list(rdef.constraints) unique, sqltype = False, None - size_constrained_string = dbhelper.TYPE_MAPPING.get('SizeConstrainedString', 'varchar(%s)') - if etype == 'String': + if rdef.object.type == 'String': for constraint in constraints: if isinstance(constraint, SizeConstraint): if constraint.max is not None: + size_constrained_string = dbhelper.TYPE_MAPPING.get( + 'SizeConstrainedString', 'varchar(%s)') sqltype = size_constrained_string % constraint.max elif isinstance(constraint, UniqueConstraint): unique = True if sqltype is None: - sqltype = dbhelper.TYPE_MAPPING[etype] + sqltype = sql_type(dbhelper, rdef) if creating and unique: sqltype += ' UNIQUE' return sqltype +def sql_type(dbhelper, rdef): + sqltype = dbhelper.TYPE_MAPPING[rdef.object] + if callable(sqltype): + sqltype = sqltype(rdef) + return sqltype + + _SQL_SCHEMA = """ CREATE TABLE %(table)s ( eid_from INTEGER NOT NULL REFERENCES entities (eid), diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/schemaserial.py --- a/server/schemaserial.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/schemaserial.py Thu Jun 16 14:19:20 2016 +0200 @@ -16,16 +16,20 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . """functions for schema / permissions (de)serialization using RQL""" +from __future__ import print_function __docformat__ = "restructuredtext en" import os import json import sys +import sqlite3 + +from six import PY2, text_type, string_types from logilab.common.shellutils import ProgressBar, DummyProgressBar -from yams import BadSchemaDefinition, schema as schemamod, buildobjs as ybo +from yams import BadSchemaDefinition, schema as schemamod, buildobjs as ybo, constraints from cubicweb import Binary from cubicweb.schema import (KNOWN_RPROPERTIES, CONSTRAINTS, ETYPE_NAME_MAP, @@ -49,11 +53,11 @@ return res missing = [g for g in ('owners', 'managers', 'users', 'guests') if not g in res] if missing: - print 'some native groups are missing but the following groups have been found:' - print '\n'.join('* %s (%s)' % (n, eid) for n, eid in res.items()) - print - print 'enter the eid of a to group to map to each missing native group' - print 'or just type enter to skip permissions granted to a group' + print('some native groups are missing but the following groups have been found:') + print('\n'.join('* %s (%s)' % (n, eid) for n, eid in res.items())) + print() + print('enter the eid of a to group to map to each missing native group') + print('or just type enter to skip permissions granted to a group') for group in missing: while True: value = raw_input('eid for group %s: ' % group).strip() @@ -62,13 +66,13 @@ try: eid = int(value) except ValueError: - print 'eid should be an integer' + print('eid should be an integer') continue for eid_ in res.values(): if eid == eid_: break else: - print 'eid is not a group eid' + print('eid is not a group eid') continue res[name] = eid break @@ -146,7 +150,7 @@ {'x': etype, 'n': netype}) cnx.commit(False) tocleanup = [eid] - tocleanup += (eid for eid, cached in repo._type_source_cache.iteritems() + tocleanup += (eid for eid, cached in repo._type_source_cache.items() if etype == cached[0]) repo.clear_caches(tocleanup) cnx.commit(False) @@ -240,7 +244,7 @@ 'order', 'description', 'indexed', 'fulltextindexed', 'internationalizable', 'default', 'formula'), values)) typeparams = extra_props.get(attrs['rdefeid']) - attrs.update(json.load(typeparams) if typeparams else {}) + attrs.update(json.loads(typeparams.getvalue().decode('ascii')) if typeparams else {}) default = attrs['default'] if default is not None: if isinstance(default, Binary): @@ -281,7 +285,7 @@ else: rtype = str(rel) relations[1].append(rtype) - for eschema, unique_together in unique_togethers.itervalues(): + for eschema, unique_together in unique_togethers.values(): eschema._unique_together.append(tuple(sorted(unique_together))) schema.infer_specialization_rules() cnx.commit() @@ -309,12 +313,20 @@ res.setdefault(eid, {}).setdefault(action, []).append( (expr, mainvars, expreid) ) return res + def deserialize_rdef_constraints(cnx): """return the list of relation definition's constraints as instances""" + if cnx.repo.system_source.dbdriver != 'sqlite' or sqlite3.sqlite_version_info >= (3, 7, 12): + # these are implemented as CHECK constraints in sql, don't do the work twice. Unless we + # are using too old version of sqlite which misses the constraint name in the integrity + # error so we've to check them by ourselves anyway + constraints.StaticVocabularyConstraint.check = lambda *args: True + constraints.IntervalBoundConstraint.check = lambda *args: True + constraints.BoundaryConstraint.check = lambda *args: True res = {} for rdefeid, ceid, ct, val in cnx.execute( - 'Any E, X,TN,V WHERE E constrained_by X, X is CWConstraint, ' - 'X cstrtype T, T name TN, X value V', build_descr=False): + 'Any E, X,TN,V WHERE E constrained_by X, X is CWConstraint, ' + 'X cstrtype T, T name TN, X value V', build_descr=False): cstr = CONSTRAINTS[ct].deserialize(val) cstr.eid = ceid res.setdefault(rdefeid, []).append(cstr) @@ -331,7 +343,7 @@ thispermsdict = permsidx[erschema.eid] except KeyError: return - for action, somethings in thispermsdict.iteritems(): + for action, somethings in thispermsdict.items(): erschema.permissions[action] = tuple( isinstance(p, tuple) and erschema.rql_expression(*p) or p for p in somethings) @@ -344,7 +356,7 @@ current schema """ _title = '-> storing the schema in the database ' - print _title, + print(_title, end=' ') execute = cnx.execute eschemas = schema.entities() pb_size = (len(eschemas + schema.relations()) @@ -366,7 +378,7 @@ cstrtypemap = {} rql = 'INSERT CWConstraintType X: X name %(ct)s' for cstrtype in CONSTRAINTS: - cstrtypemap[cstrtype] = execute(rql, {'ct': unicode(cstrtype)}, + cstrtypemap[cstrtype] = execute(rql, {'ct': text_type(cstrtype)}, build_descr=False)[0][0] pb.update() # serialize relations @@ -381,10 +393,10 @@ continue execschemarql(execute, rschema, rschema2rql(rschema, addrdef=False)) if rschema.symmetric: - rdefs = [rdef for k, rdef in rschema.rdefs.iteritems() + rdefs = [rdef for k, rdef in rschema.rdefs.items() if (rdef.subject, rdef.object) == k] else: - rdefs = rschema.rdefs.itervalues() + rdefs = rschema.rdefs.values() for rdef in rdefs: execschemarql(execute, rdef, rdef2rql(rdef, cstrtypemap, groupmap)) @@ -397,7 +409,7 @@ for rql, kwargs in specialize2rql(schema): execute(rql, kwargs, build_descr=False) pb.update() - print + print() # high level serialization functions @@ -455,8 +467,8 @@ columnset = set() for columns in eschema._unique_together: if columns in columnset: - print ('schemaserial: skipping duplicate unique together %r %r' % - (eschema.type, columns)) + print('schemaserial: skipping duplicate unique together %r %r' % + (eschema.type, columns)) continue columnset.add(columns) rql, args = _uniquetogether2rql(eschema, columns) @@ -471,7 +483,7 @@ for i, name in enumerate(unique_together): rschema = eschema.schema.rschema(name) rtype = 'T%d' % i - substs[rtype] = unicode(rschema.type) + substs[rtype] = text_type(rschema.type) relations.append('C relations %s' % rtype) restrictions.append('%(rtype)s name %%(%(rtype)s)s' % {'rtype': rtype}) relations = ', '.join(relations) @@ -483,11 +495,11 @@ def _ervalues(erschema): try: - type_ = unicode(erschema.type) + type_ = text_type(erschema.type) except UnicodeDecodeError as e: raise Exception("can't decode %s [was %s]" % (erschema.type, e)) try: - desc = unicode(erschema.description) or u'' + desc = text_type(erschema.description) or u'' except UnicodeDecodeError as e: raise Exception("can't decode %s [was %s]" % (erschema.description, e)) return { @@ -509,7 +521,7 @@ if addrdef: assert cstrtypemap # sort for testing purpose - for rdef in sorted(rschema.rdefs.itervalues(), + for rdef in sorted(rschema.rdefs.values(), key=lambda x: (x.subject, x.object)): for rql, values in rdef2rql(rdef, cstrtypemap, groupmap): yield rql, values @@ -519,7 +531,7 @@ values['final'] = rschema.final values['symmetric'] = rschema.symmetric values['inlined'] = rschema.inlined - if isinstance(rschema.fulltext_container, str): + if PY2 and isinstance(rschema.fulltext_container, str): values['fulltext_container'] = unicode(rschema.fulltext_container) else: values['fulltext_container'] = rschema.fulltext_container @@ -535,7 +547,7 @@ def crschema_relations_values(crschema): values = _ervalues(crschema) - values['rule'] = unicode(crschema.rule) + values['rule'] = text_type(crschema.rule) # XXX why oh why? del values['final'] relations = ['X %s %%(%s)s' % (attr, attr) for attr in sorted(values)] @@ -581,20 +593,20 @@ value = bool(value) elif prop == 'ordernum': value = int(value) - elif isinstance(value, str): + elif PY2 and isinstance(value, str): value = unicode(value) if value is not None and prop == 'default': value = Binary.zpickle(value) values[amap.get(prop, prop)] = value if extra: - values['extra_props'] = Binary(json.dumps(extra)) + values['extra_props'] = Binary(json.dumps(extra).encode('ascii')) relations = ['X %s %%(%s)s' % (attr, attr) for attr in sorted(values)] return relations, values def constraints2rql(cstrtypemap, constraints, rdefeid=None): for constraint in constraints: values = {'ct': cstrtypemap[constraint.type()], - 'value': unicode(constraint.serialize()), + 'value': text_type(constraint.serialize()), 'x': rdefeid} # when not specified, will have to be set by the caller yield 'INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE \ CT eid %(ct)s, EDEF eid %(x)s', values @@ -613,23 +625,23 @@ # may occurs when modifying persistent schema continue for group_or_rqlexpr in grantedto: - if isinstance(group_or_rqlexpr, basestring): + if isinstance(group_or_rqlexpr, string_types): # group try: yield ('SET X %s_permission Y WHERE Y eid %%(g)s, X eid %%(x)s' % action, {'g': groupmap[group_or_rqlexpr]}) except KeyError: - print ("WARNING: group %s used in permissions for %s was ignored because it doesn't exist." - " You may want to add it into a precreate.py file" % (group_or_rqlexpr, erschema)) + print("WARNING: group %s used in permissions for %s was ignored because it doesn't exist." + " You may want to add it into a precreate.py file" % (group_or_rqlexpr, erschema)) continue else: # rqlexpr rqlexpr = group_or_rqlexpr yield ('INSERT RQLExpression E: E expression %%(e)s, E exprtype %%(t)s, ' 'E mainvars %%(v)s, X %s_permission E WHERE X eid %%(x)s' % action, - {'e': unicode(rqlexpr.expression), - 'v': unicode(','.join(sorted(rqlexpr.mainvars))), - 't': unicode(rqlexpr.__class__.__name__)}) + {'e': text_type(rqlexpr.expression), + 'v': text_type(','.join(sorted(rqlexpr.mainvars))), + 't': text_type(rqlexpr.__class__.__name__)}) # update functions @@ -641,7 +653,7 @@ def updaterschema2rql(rschema, eid): if rschema.rule: yield ('SET X rule %(r)s WHERE X eid %(x)s', - {'x': eid, 'r': unicode(rschema.rule)}) + {'x': eid, 'r': text_type(rschema.rule)}) else: relations, values = rschema_relations_values(rschema) values['x'] = eid diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/serverconfig.py --- a/server/serverconfig.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/serverconfig.py Thu Jun 16 14:19:20 2016 +0200 @@ -16,12 +16,14 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . """server.serverconfig definition""" +from __future__ import print_function __docformat__ = "restructuredtext en" import sys from os.path import join, exists -from StringIO import StringIO + +from six.moves import StringIO import logilab.common.configuration as lgconfig from logilab.common.decorators import cached @@ -234,18 +236,19 @@ def bootstrap_cubes(self): from logilab.common.textutils import splitstrip - for line in file(join(self.apphome, 'bootstrap_cubes')): - line = line.strip() - if not line or line.startswith('#'): - continue - self.init_cubes(self.expand_cubes(splitstrip(line))) - break - else: - # no cubes - self.init_cubes(()) + with open(join(self.apphome, 'bootstrap_cubes')) as f: + for line in f: + line = line.strip() + if not line or line.startswith('#'): + continue + self.init_cubes(self.expand_cubes(splitstrip(line))) + break + else: + # no cubes + self.init_cubes(()) def write_bootstrap_cubes_file(self, cubes): - stream = file(join(self.apphome, 'bootstrap_cubes'), 'w') + stream = open(join(self.apphome, 'bootstrap_cubes'), 'w') stream.write('# this is a generated file only used for bootstraping\n') stream.write('# you should not have to edit this\n') stream.write('%s\n' % ','.join(cubes)) @@ -276,7 +279,7 @@ assert len(self.sources_mode) == 1 if source.connect_for_migration: return True - print 'not connecting to source', source.uri, 'during migration' + print('not connecting to source', source.uri, 'during migration') return False if 'all' in self.sources_mode: assert len(self.sources_mode) == 1 diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/serverctl.py --- a/server/serverctl.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/serverctl.py Thu Jun 16 14:19:20 2016 +0200 @@ -16,6 +16,7 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . """cubicweb-ctl commands and command handlers specific to the repository""" +from __future__ import print_function __docformat__ = 'restructuredtext en' @@ -28,6 +29,9 @@ import logging import subprocess +from six import string_types +from six.moves import input + from logilab.common import nullobject from logilab.common.configuration import Configuration, merge_options from logilab.common.shellutils import ASK, generate_password @@ -55,27 +59,27 @@ driver = source['db-driver'] dbhelper = get_db_helper(driver) if interactive: - print '-> connecting to %s database' % driver, + print('-> connecting to %s database' % driver, end=' ') if dbhost: - print '%s@%s' % (dbname, dbhost), + print('%s@%s' % (dbname, dbhost), end=' ') else: - print dbname, + print(dbname, end=' ') if dbhelper.users_support: if not interactive or (not special_privs and source.get('db-user')): user = source.get('db-user', os.environ.get('USER', '')) if interactive: - print 'as', user + print('as', user) password = source.get('db-password') else: - print + print() if special_privs: - print 'WARNING' + print('WARNING') print ('the user will need the following special access rights ' 'on the database:') - print special_privs - print + print(special_privs) + print() default_user = source.get('db-user', os.environ.get('USER', '')) - user = raw_input('Connect as user ? [%r]: ' % default_user) + user = input('Connect as user ? [%r]: ' % default_user) user = user.strip() or default_user if user == source.get('db-user'): password = source.get('db-password') @@ -146,7 +150,7 @@ cnx = repoapi.connect(repo, login, password=pwd) return repo, cnx except AuthenticationError: - print '-> Error: wrong user/password.' + print('-> Error: wrong user/password.') # reset cubes else we'll have an assertion error on next retry config._cubes = None login, pwd = manager_userpasswd() @@ -164,9 +168,9 @@ """ config = self.config if not automatic: - print underline_title('Configuring the repository') + print(underline_title('Configuring the repository')) config.input_config('email', inputlevel) - print '\n'+underline_title('Configuring the sources') + print('\n'+underline_title('Configuring the sources')) sourcesfile = config.sources_file() # hack to make Method('default_instance_id') usable in db option defs # (in native.py) @@ -174,12 +178,12 @@ options=SOURCE_TYPES['native'].options) if not automatic: sconfig.input_config(inputlevel=inputlevel) - print + print() sourcescfg = {'system': sconfig} if automatic: # XXX modify a copy password = generate_password() - print '-> set administrator account to admin / %s' % password + print('-> set administrator account to admin / %s' % password) USER_OPTIONS[1][1]['default'] = password sconfig = Configuration(options=USER_OPTIONS) else: @@ -197,8 +201,8 @@ CWCTL.run(['db-create', '--config-level', str(inputlevel), self.config.appid]) else: - print ('-> nevermind, you can do it later with ' - '"cubicweb-ctl db-create %s".' % self.config.appid) + print('-> nevermind, you can do it later with ' + '"cubicweb-ctl db-create %s".' % self.config.appid) @contextmanager @@ -242,26 +246,26 @@ with db_transaction(source, privilege='DROP SCHEMA') as cursor: helper = get_db_helper(source['db-driver']) helper.drop_schema(cursor, db_namespace) - print '-> database schema %s dropped' % db_namespace + print('-> database schema %s dropped' % db_namespace) def _drop_database(self, source): dbname = source['db-name'] if source['db-driver'] == 'sqlite': - print 'deleting database file %(db-name)s' % source + print('deleting database file %(db-name)s' % source) os.unlink(source['db-name']) - print '-> database %(db-name)s dropped.' % source + print('-> database %(db-name)s dropped.' % source) else: helper = get_db_helper(source['db-driver']) with db_sys_transaction(source, privilege='DROP DATABASE') as cursor: - print 'dropping database %(db-name)s' % source + print('dropping database %(db-name)s' % source) cursor.execute('DROP DATABASE "%(db-name)s"' % source) - print '-> database %(db-name)s dropped.' % source + print('-> database %(db-name)s dropped.' % source) def _drop_user(self, source): user = source['db-user'] or None if user is not None: with db_sys_transaction(source, privilege='DROP USER') as cursor: - print 'dropping user %s' % user + print('dropping user %s' % user) cursor.execute('DROP USER %s' % user) def _cleanup_steps(self, source): @@ -288,7 +292,7 @@ try: step(source) except Exception as exc: - print 'ERROR', exc + print('ERROR', exc) if ASK.confirm('An error occurred. Continue anyway?', default_is_yes=False): continue @@ -357,7 +361,7 @@ ASK.confirm('Database %s already exists. Drop it?' % dbname)): os.unlink(dbname) elif self.config.create_db: - print '\n'+underline_title('Creating the system database') + print('\n'+underline_title('Creating the system database')) # connect on the dbms system base to create our base dbcnx = _db_sys_cnx(source, 'CREATE/DROP DATABASE and / or USER', interactive=not automatic) @@ -368,17 +372,17 @@ if not helper.user_exists(cursor, user) and (automatic or \ ASK.confirm('Create db user %s ?' % user, default_is_yes=False)): helper.create_user(source['db-user'], source.get('db-password')) - print '-> user %s created.' % user + print('-> user %s created.' % user) if dbname in helper.list_databases(cursor): if automatic or ASK.confirm('Database %s already exists -- do you want to drop it ?' % dbname): cursor.execute('DROP DATABASE "%s"' % dbname) else: - print ('you may want to run "cubicweb-ctl db-init ' - '--drop %s" manually to continue.' % config.appid) + print('you may want to run "cubicweb-ctl db-init ' + '--drop %s" manually to continue.' % config.appid) return createdb(helper, source, dbcnx, cursor) dbcnx.commit() - print '-> database %s created.' % dbname + print('-> database %s created.' % dbname) except BaseException: dbcnx.rollback() raise @@ -400,13 +404,13 @@ try: helper.create_language(cursor, extlang) except Exception as exc: - print '-> ERROR:', exc - print '-> could not create language %s, some stored procedures might be unusable' % extlang + print('-> ERROR:', exc) + print('-> could not create language %s, some stored procedures might be unusable' % extlang) cnx.rollback() else: cnx.commit() - print '-> database for instance %s created and necessary extensions installed.' % appid - print + print('-> database for instance %s created and necessary extensions installed.' % appid) + print() if automatic: CWCTL.run(['db-init', '--automatic', '--config-level', '0', config.appid]) @@ -414,8 +418,8 @@ CWCTL.run(['db-init', '--config-level', str(self.config.config_level), config.appid]) else: - print ('-> nevermind, you can do it later with ' - '"cubicweb-ctl db-init %s".' % config.appid) + print('-> nevermind, you can do it later with ' + '"cubicweb-ctl db-init %s".' % config.appid) class InitInstanceCommand(Command): @@ -452,7 +456,7 @@ def run(self, args): check_options_consistency(self.config) - print '\n'+underline_title('Initializing the system database') + print('\n'+underline_title('Initializing the system database')) from cubicweb.server import init_repository appid = args[0] config = ServerConfiguration.config_for(appid) @@ -503,10 +507,10 @@ used = set(n for n, in cnx.execute('Any SN WHERE S is CWSource, S name SN')) cubes = repo.get_cubes() while True: - type = raw_input('source type (%s): ' + type = input('source type (%s): ' % ', '.join(sorted(SOURCE_TYPES))) if type not in SOURCE_TYPES: - print '-> unknown source type, use one of the available types.' + print('-> unknown source type, use one of the available types.') continue sourcemodule = SOURCE_TYPES[type].module if not sourcemodule.startswith('cubicweb.'): @@ -520,23 +524,23 @@ continue break while True: - parser = raw_input('parser type (%s): ' + parser = input('parser type (%s): ' % ', '.join(sorted(repo.vreg['parsers']))) if parser in repo.vreg['parsers']: break - print '-> unknown parser identifier, use one of the available types.' + print('-> unknown parser identifier, use one of the available types.') while True: - sourceuri = raw_input('source identifier (a unique name used to ' + sourceuri = input('source identifier (a unique name used to ' 'tell sources apart): ').strip() if not sourceuri: - print '-> mandatory.' + print('-> mandatory.') else: sourceuri = unicode(sourceuri, sys.stdin.encoding) if sourceuri in used: - print '-> uri already used, choose another one.' + print('-> uri already used, choose another one.') else: break - url = raw_input('source URL (leave empty for none): ').strip() + url = input('source URL (leave empty for none): ').strip() url = unicode(url) if url else None # XXX configurable inputlevel sconfig = ask_source_config(config, type, inputlevel=self.config.config_level) @@ -583,10 +587,10 @@ cnx.rollback() import traceback traceback.print_exc() - print '-> an error occurred:', ex + print('-> an error occurred:', ex) else: cnx.commit() - print '-> rights granted to %s on instance %s.' % (appid, user) + print('-> rights granted to %s on instance %s.' % (appid, user)) class ResetAdminPasswordCommand(Command): @@ -617,7 +621,7 @@ try: adminlogin = sourcescfg['admin']['login'] except KeyError: - print '-> Error: could not get cubicweb administrator login.' + print('-> Error: could not get cubicweb administrator login.') sys.exit(1) cnx = source_cnx(sourcescfg['system']) driver = sourcescfg['system']['db-driver'] @@ -627,9 +631,9 @@ cursor.execute("SELECT * FROM cw_CWUser WHERE cw_login=%(l)s", {'l': adminlogin}) if not cursor.fetchall(): - print ("-> error: admin user %r specified in sources doesn't exist " - "in the database" % adminlogin) - print " fix your sources file before running this command" + print("-> error: admin user %r specified in sources doesn't exist " + "in the database" % adminlogin) + print(" fix your sources file before running this command") cnx.close() sys.exit(1) if self.config.password is None: @@ -650,10 +654,10 @@ cnx.rollback() import traceback traceback.print_exc() - print '-> an error occurred:', ex + print('-> an error occurred:', ex) else: cnx.commit() - print '-> password reset, sources file regenerated.' + print('-> password reset, sources file regenerated.') cnx.close() @@ -666,17 +670,17 @@ if sudo: dmpcmd = 'sudo %s' % (dmpcmd) dmpcmd = 'ssh -t %s "%s"' % (host, dmpcmd) - print dmpcmd + print(dmpcmd) if os.system(dmpcmd): raise ExecutionError('Error while dumping the database') if output is None: output = filename cmd = 'scp %s:/tmp/%s %s' % (host, filename, output) - print cmd + print(cmd) if os.system(cmd): raise ExecutionError('Error while retrieving the dump at /tmp/%s' % filename) rmcmd = 'ssh -t %s "rm -f /tmp/%s"' % (host, filename) - print rmcmd + print(rmcmd) if os.system(rmcmd) and not ASK.confirm( 'An error occurred while deleting remote dump at /tmp/%s. ' 'Continue anyway?' % filename): @@ -686,7 +690,7 @@ def _local_dump(appid, output, format='native'): config = ServerConfiguration.config_for(appid) config.quick_start = True - mih = config.migration_handler(connect=False, verbosity=1) + mih = config.migration_handler(verbosity=1) mih.backup_database(output, askconfirm=False, format=format) mih.shutdown() @@ -696,28 +700,28 @@ config.quick_start = True mih = config.migration_handler(connect=False, verbosity=1) mih.restore_database(backupfile, drop, askconfirm=False, format=format) - repo = mih.repo_connect() + repo = mih.repo # version of the database dbversions = repo.get_versions() mih.shutdown() if not dbversions: - print "bad or missing version information in the database, don't upgrade file system" + print("bad or missing version information in the database, don't upgrade file system") return # version of installed software eversion = dbversions['cubicweb'] status = instance_status(config, eversion, dbversions) # * database version > installed software if status == 'needsoftupgrade': - print "** The database of %s is more recent than the installed software!" % config.appid - print "** Upgrade your software, then migrate the database by running the command" - print "** 'cubicweb-ctl upgrade %s'" % config.appid + print("** The database of %s is more recent than the installed software!" % config.appid) + print("** Upgrade your software, then migrate the database by running the command") + print("** 'cubicweb-ctl upgrade %s'" % config.appid) return # * database version < installed software, an upgrade will be necessary # anyway, just rewrite vc.conf and warn user he has to upgrade elif status == 'needapplupgrade': - print "** The database of %s is older than the installed software." % config.appid - print "** Migrate the database by running the command" - print "** 'cubicweb-ctl upgrade %s'" % config.appid + print("** The database of %s is older than the installed software." % config.appid) + print("** Migrate the database by running the command") + print("** 'cubicweb-ctl upgrade %s'" % config.appid) return # * database version = installed software, database version = instance fs version # ok! @@ -732,12 +736,12 @@ try: softversion = config.cube_version(cube) except ConfigurationError: - print '-> Error: no cube version information for %s, please check that the cube is installed.' % cube + print('-> Error: no cube version information for %s, please check that the cube is installed.' % cube) continue try: applversion = vcconf[cube] except KeyError: - print '-> Error: no cube version information for %s in version configuration.' % cube + print('-> Error: no cube version information for %s in version configuration.' % cube) continue if softversion == applversion: continue @@ -883,7 +887,7 @@ _local_restore(destappid, output, not self.config.no_drop, self.config.format) if self.config.keep_dump: - print '-> you can get the dump file at', output + print('-> you can get the dump file at', output) else: os.remove(output) @@ -1001,9 +1005,9 @@ stats = source.pull_data(cnx, force=True, raise_on_error=True) finally: repo.shutdown() - for key, val in stats.iteritems(): + for key, val in stats.items(): if val: - print key, ':', val + print(key, ':', val) @@ -1019,7 +1023,7 @@ for p in ('read', 'add', 'update', 'delete'): rule = perms.get(p) if rule: - perms[p] = tuple(str(x) if isinstance(x, basestring) else x + perms[p] = tuple(str(x) if isinstance(x, string_types) else x for x in rule) return perms, perms in defaultrelperms or perms in defaulteperms @@ -1079,7 +1083,7 @@ if self.config.db is not None: appcfg = ServerConfiguration.config_for(appid) srccfg = appcfg.read_sources_file() - for key, value in self.config.db.iteritems(): + for key, value in self.config.db.items(): if '.' in key: section, key = key.split('.', 1) else: diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/session.py --- a/server/session.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/session.py Thu Jun 16 14:19:20 2016 +0200 @@ -16,6 +16,8 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . """Repository users' and internal' sessions.""" +from __future__ import print_function + __docformat__ = "restructuredtext en" import sys @@ -25,6 +27,8 @@ import functools from contextlib import contextmanager +from six import text_type + from logilab.common.deprecation import deprecated from logilab.common.textutils import unormalize from logilab.common.registry import objectify_predicate @@ -556,7 +560,7 @@ else: relations_dict[rtype] = eids self.repo.glob_add_relations(self, relations_dict) - for edited in edited_entities.itervalues(): + for edited in edited_entities.values(): self.repo.glob_update_entity(self, edited) @@ -769,7 +773,7 @@ def transaction_uuid(self, set=True): uuid = self.transaction_data.get('tx_uuid') if set and uuid is None: - self.transaction_data['tx_uuid'] = uuid = unicode(uuid4().hex) + self.transaction_data['tx_uuid'] = uuid = text_type(uuid4().hex) self.repo.system_source.start_undoable_transaction(self, uuid) return uuid @@ -874,7 +878,7 @@ processed = [] self.commit_state = 'precommit' if debug: - print self.commit_state, '*' * 20 + print(self.commit_state, '*' * 20) try: with self.running_hooks_ops(): while self.pending_operations: @@ -882,7 +886,7 @@ operation.processed = 'precommit' processed.append(operation) if debug: - print operation + print(operation) operation.handle_event('precommit_event') self.pending_operations[:] = processed self.debug('precommit transaction %s done', self.connectionid) @@ -899,11 +903,11 @@ # and revertcommit, that will be enough in mont case. operation.failed = True if debug: - print self.commit_state, '*' * 20 + print(self.commit_state, '*' * 20) with self.running_hooks_ops(): for operation in reversed(processed): if debug: - print operation + print(operation) try: operation.handle_event('revertprecommit_event') except BaseException: @@ -917,12 +921,12 @@ self.cnxset.commit() self.commit_state = 'postcommit' if debug: - print self.commit_state, '*' * 20 + print(self.commit_state, '*' * 20) with self.running_hooks_ops(): while self.pending_operations: operation = self.pending_operations.pop(0) if debug: - print operation + print(operation) operation.processed = 'postcommit' try: operation.handle_event('postcommit_event') @@ -1004,7 +1008,7 @@ """ def __init__(self, user, repo, cnxprops=None, _id=None): - self.sessionid = _id or make_uid(unormalize(user.login).encode('UTF8')) + self.sessionid = _id or make_uid(unormalize(user.login)) self.user = user # XXX repoapi: deprecated and store only a login. self.repo = repo self.vreg = repo.vreg diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/sources/__init__.py --- a/server/sources/__init__.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/sources/__init__.py Thu Jun 16 14:19:20 2016 +0200 @@ -16,13 +16,18 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . """cubicweb server sources support""" +from __future__ import print_function __docformat__ = "restructuredtext en" from time import time from logging import getLogger +from base64 import b64decode + +from six import text_type from logilab.common import configuration +from logilab.common.textutils import unormalize from logilab.common.deprecation import deprecated from yams.schema import role_name @@ -35,25 +40,25 @@ def dbg_st_search(uri, union, varmap, args, cachekey=None, prefix='rql for'): if server.DEBUG & server.DBG_RQL: global t - print ' %s %s source: %s' % (prefix, uri, repr(union.as_string())) + print(' %s %s source: %s' % (prefix, uri, repr(union.as_string()))) t = time() if varmap: - print ' using varmap', varmap + print(' using varmap', varmap) if server.DEBUG & server.DBG_MORE: - print ' args', repr(args) - print ' cache key', cachekey - print ' solutions', ','.join(str(s.solutions) - for s in union.children) + print(' args', repr(args)) + print(' cache key', cachekey) + print(' solutions', ','.join(str(s.solutions) + for s in union.children)) # return true so it can be used as assertion (and so be killed by python -O) return True def dbg_results(results): if server.DEBUG & server.DBG_RQL: if len(results) > 10: - print ' -->', results[:10], '...', len(results), + print(' -->', results[:10], '...', len(results), end=' ') else: - print ' -->', results, - print 'time: ', time() - t + print(' -->', results, end=' ') + print('time: ', time() - t) # return true so it can be used as assertion (and so be killed by python -O) return True @@ -104,7 +109,9 @@ self.public_config['use-cwuri-as-url'] = self.use_cwuri_as_url self.remove_sensitive_information(self.public_config) self.uri = source_config.pop('uri') - set_log_methods(self, getLogger('cubicweb.sources.'+self.uri)) + # unormalize to avoid non-ascii characters in logger's name, this will cause decoding error + # on logging + set_log_methods(self, getLogger('cubicweb.sources.' + unormalize(text_type(self.uri)))) source_config.pop('type') self.update_config(None, self.check_conf_dict(eid, source_config, fail_if_unknown=False)) @@ -140,7 +147,7 @@ pass @classmethod - def check_conf_dict(cls, eid, confdict, _=unicode, fail_if_unknown=True): + def check_conf_dict(cls, eid, confdict, _=text_type, fail_if_unknown=True): """check configuration of source entity. Return config dict properly typed with defaults set. """ @@ -157,7 +164,7 @@ try: value = configuration._validate(value, optdict, optname) except Exception as ex: - msg = unicode(ex) # XXX internationalization + msg = text_type(ex) # XXX internationalization raise ValidationError(eid, {role_name('config', 'subject'): msg}) processed[optname] = value # cw < 3.10 bw compat @@ -199,6 +206,12 @@ else: self.urls = [] + @staticmethod + def decode_extid(extid): + if extid is None: + return extid + return b64decode(extid) + # source initialization / finalization ##################################### def set_schema(self, schema): diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/sources/datafeed.py --- a/server/sources/datafeed.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/sources/datafeed.py Thu Jun 16 14:19:20 2016 +0200 @@ -19,15 +19,21 @@ database """ -import urllib2 -import StringIO +from io import BytesIO from os.path import exists from datetime import datetime, timedelta -from base64 import b64decode -from cookielib import CookieJar -import urlparse + +from six import text_type +from six.moves.urllib.parse import urlparse +from six.moves.urllib.request import Request, build_opener, HTTPCookieProcessor +from six.moves.urllib.error import HTTPError +from six.moves.http_cookiejar import CookieJar + +from pytz import utc from lxml import etree +from logilab.common.deprecation import deprecated + from cubicweb import RegistryNotFound, ObjectNotFound, ValidationError, UnknownEid from cubicweb.server.repository import preprocess_inlined_relations from cubicweb.server.sources import AbstractSource @@ -157,10 +163,10 @@ def fresh(self): if self.latest_retrieval is None: return False - return datetime.utcnow() < (self.latest_retrieval + self.synchro_interval) + return datetime.now(tz=utc) < (self.latest_retrieval + self.synchro_interval) def update_latest_retrieval(self, cnx): - self.latest_retrieval = datetime.utcnow() + self.latest_retrieval = datetime.now(tz=utc) cnx.execute('SET X latest_retrieval %(date)s WHERE X eid %(x)s', {'x': self.eid, 'date': self.latest_retrieval}) cnx.commit() @@ -168,7 +174,7 @@ def acquire_synchronization_lock(self, cnx): # XXX race condition until WHERE of SET queries is executed using # 'SELECT FOR UPDATE' - now = datetime.utcnow() + now = datetime.now(tz=utc) if not cnx.execute( 'SET X in_synchronization %(now)s WHERE X eid %(x)s, ' 'X in_synchronization NULL OR X in_synchronization < %(maxdt)s', @@ -244,6 +250,7 @@ error = True return error + @deprecated('[3.21] use the new store API') def before_entity_insertion(self, cnx, lid, etype, eid, sourceparams): """called by the repository when an eid has been attributed for an entity stored here but the entity has not been inserted in the system @@ -259,6 +266,7 @@ sourceparams['parser'].before_entity_copy(entity, sourceparams) return entity + @deprecated('[3.21] use the new store API') def after_entity_insertion(self, cnx, lid, entity, sourceparams): """called by the repository after an entity stored here has been inserted in the system table. @@ -282,13 +290,13 @@ sql = ('SELECT extid, eid, type FROM entities, cw_source_relation ' 'WHERE entities.eid=cw_source_relation.eid_from ' 'AND cw_source_relation.eid_to=%s' % self.eid) - return dict((b64decode(uri), (eid, type)) + return dict((self.decode_extid(uri), (eid, type)) for uri, eid, type in cnx.system_sql(sql).fetchall()) def init_import_log(self, cnx, **kwargs): dataimport = cnx.create_entity('CWDataImport', cw_import_of=self, - start_timestamp=datetime.utcnow(), - **kwargs) + start_timestamp=datetime.now(tz=utc), + **kwargs) dataimport.init() return dataimport @@ -328,7 +336,7 @@ For http URLs, it will try to find a cwclientlib config entry (if available) and use it as requester. """ - purl = urlparse.urlparse(url) + purl = urlparse(url) if purl.scheme == 'file': return URLLibResponseAdapter(open(url[7:]), url) @@ -344,7 +352,7 @@ self.source.info('Using cwclientlib for %s' % url) resp = cnx.get(url) resp.raise_for_status() - return URLLibResponseAdapter(StringIO.StringIO(resp.text), url) + return URLLibResponseAdapter(BytesIO(resp.content), url) except (ImportError, ValueError, EnvironmentError) as exc: # ImportError: not available # ValueError: no config entry found @@ -354,11 +362,11 @@ # no chance with cwclientlib, fall back to former implementation if purl.scheme in ('http', 'https'): self.source.info('GET %s', url) - req = urllib2.Request(url) + req = Request(url) return _OPENER.open(req, timeout=self.source.http_timeout) # url is probably plain content - return URLLibResponseAdapter(StringIO.StringIO(url), url) + return URLLibResponseAdapter(BytesIO(url.encode('ascii')), url) def add_schema_config(self, schemacfg, checkonly=False): """added CWSourceSchemaConfig, modify mapping accordingly""" @@ -370,6 +378,7 @@ msg = schemacfg._cw._("this parser doesn't use a mapping") raise ValidationError(schemacfg.eid, {None: msg}) + @deprecated('[3.21] use the new store API') def extid2entity(self, uri, etype, **sourceparams): """Return an entity for the given uri. May return None if it should be skipped. @@ -388,11 +397,11 @@ else: source = self.source sourceparams['parser'] = self - if isinstance(uri, unicode): + if isinstance(uri, text_type): uri = uri.encode('utf-8') try: - eid = cnx.repo.extid2eid(source, str(uri), etype, cnx, - sourceparams=sourceparams) + eid = cnx.repo.extid2eid(source, uri, etype, cnx, + sourceparams=sourceparams) except ValidationError as ex: if raise_on_error: raise @@ -419,9 +428,11 @@ """main callback: process the url""" raise NotImplementedError + @deprecated('[3.21] use the new store API') def before_entity_copy(self, entity, sourceparams): raise NotImplementedError + @deprecated('[3.21] use the new store API') def after_entity_copy(self, entity, sourceparams): self.stats['created'].add(entity.eid) @@ -447,10 +458,10 @@ def handle_deletion(self, config, cnx, myuris): if config['delete-entities'] and myuris: byetype = {} - for extid, (eid, etype) in myuris.iteritems(): + for extid, (eid, etype) in myuris.items(): if self.is_deleted(extid, etype, eid): byetype.setdefault(etype, []).append(str(eid)) - for etype, eids in byetype.iteritems(): + for etype, eids in byetype.items(): self.warning('delete %s %s entities', len(eids), etype) cnx.execute('DELETE %s X WHERE X eid IN (%s)' % (etype, ','.join(eids))) @@ -463,7 +474,7 @@ self.notify_checked(entity) mdate = attrs.get('modification_date') if not mdate or mdate > entity.modification_date: - attrs = dict( (k, v) for k, v in attrs.iteritems() + attrs = dict( (k, v) for k, v in attrs.items() if v != getattr(entity, k)) if attrs: entity.cw_set(**attrs) @@ -472,6 +483,7 @@ class DataFeedXMLParser(DataFeedParser): + @deprecated() def process(self, url, raise_on_error=False): """IDataFeedParser main entry point""" try: @@ -481,23 +493,9 @@ raise self.import_log.record_error(str(ex)) return True - error = False - commit = self._cw.commit - rollback = self._cw.rollback for args in parsed: - try: - self.process_item(*args, raise_on_error=raise_on_error) - # commit+set_cnxset instead of commit(free_cnxset=False) to let - # other a chance to get our connections set - commit() - except ValidationError as exc: - if raise_on_error: - raise - self.source.error('Skipping %s because of validation error %s' - % (args, exc)) - rollback() - error = True - return error + self.process_item(*args, raise_on_error=raise_on_error) + return False def parse(self, url): stream = self.retrieve_url(url) @@ -530,10 +528,10 @@ self.source.debug(str(exc)) # no chance with cwclientlib, fall back to former implementation - if urlparse.urlparse(url).scheme in ('http', 'https'): + if urlparse(url).scheme in ('http', 'https'): try: _OPENER.open(url, timeout=self.source.http_timeout) - except urllib2.HTTPError as ex: + except HTTPError as ex: if ex.code == 404: return True return False @@ -555,15 +553,12 @@ def getcode(self): return self.code - def info(self): - from mimetools import Message - return Message(StringIO.StringIO()) # use a cookie enabled opener to use session cookie if any -_OPENER = urllib2.build_opener() +_OPENER = build_opener() try: from logilab.common import urllib2ext _OPENER.add_handler(urllib2ext.HTTPGssapiAuthHandler()) except ImportError: # python-kerberos not available pass -_OPENER.add_handler(urllib2.HTTPCookieProcessor(CookieJar())) +_OPENER.add_handler(HTTPCookieProcessor(CookieJar())) diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/sources/ldapfeed.py --- a/server/sources/ldapfeed.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/sources/ldapfeed.py Thu Jun 16 14:19:20 2016 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -17,14 +17,13 @@ # with CubicWeb. If not, see . """cubicweb ldap feed source""" -from __future__ import division # XXX why? +from __future__ import division # XXX why? from datetime import datetime -import ldap -from ldap.ldapobject import ReconnectLDAPObject -from ldap.filter import filter_format -from ldapurl import LDAPUrl +from six import PY2, string_types + +import ldap3 from logilab.common.configuration import merge_options @@ -32,15 +31,15 @@ from cubicweb.server import utils from cubicweb.server.sources import datafeed -_ = unicode +from cubicweb import _ # search scopes -BASE = ldap.SCOPE_BASE -ONELEVEL = ldap.SCOPE_ONELEVEL -SUBTREE = ldap.SCOPE_SUBTREE -LDAP_SCOPES = {'BASE': ldap.SCOPE_BASE, - 'ONELEVEL': ldap.SCOPE_ONELEVEL, - 'SUBTREE': ldap.SCOPE_SUBTREE} +BASE = ldap3.SEARCH_SCOPE_BASE_OBJECT +ONELEVEL = ldap3.SEARCH_SCOPE_SINGLE_LEVEL +SUBTREE = ldap3.SEARCH_SCOPE_WHOLE_SUBTREE +LDAP_SCOPES = {'BASE': BASE, + 'ONELEVEL': ONELEVEL, + 'SUBTREE': SUBTREE} # map ldap protocol to their standard port PROTO_PORT = {'ldap': 389, @@ -49,6 +48,15 @@ } +def replace_filter(s): + s = s.replace('*', '\\2A') + s = s.replace('(', '\\28') + s = s.replace(')', '\\29') + s = s.replace('\\', '\\5c') + s = s.replace('\0', '\\00') + return s + + class LDAPFeedSource(datafeed.DataFeedSource): """LDAP feed source: unlike ldapuser source, this source is copy based and will import ldap content (beside passwords for authentication) into the @@ -61,7 +69,7 @@ ('auth-mode', {'type' : 'choice', 'default': 'simple', - 'choices': ('simple', 'cram_md5', 'digest_md5', 'gssapi'), + 'choices': ('simple', 'digest_md5', 'gssapi'), 'help': 'authentication mode used to authenticate user to the ldap.', 'group': 'ldap-source', 'level': 3, }), @@ -183,8 +191,8 @@ self.user_default_groups = typedconfig['user-default-group'] self.user_attrs = {'dn': 'eid', 'modifyTimestamp': 'modification_date'} self.user_attrs.update(typedconfig['user-attrs-map']) - self.user_rev_attrs = dict((v, k) for k, v in self.user_attrs.iteritems()) - self.base_filters = [filter_format('(%s=%s)', ('objectClass', o)) + self.user_rev_attrs = dict((v, k) for k, v in self.user_attrs.items()) + self.base_filters = ['(objectclass=%s)' % replace_filter(o) for o in typedconfig['user-classes']] if typedconfig['user-filter']: self.base_filters.append(typedconfig['user-filter']) @@ -193,8 +201,8 @@ self.group_attrs = typedconfig['group-attrs-map'] self.group_attrs = {'dn': 'eid', 'modifyTimestamp': 'modification_date'} self.group_attrs.update(typedconfig['group-attrs-map']) - self.group_rev_attrs = dict((v, k) for k, v in self.group_attrs.iteritems()) - self.group_base_filters = [filter_format('(%s=%s)', ('objectClass', o)) + self.group_rev_attrs = dict((v, k) for k, v in self.group_attrs.items()) + self.group_base_filters = ['(objectClass=%s)' % replace_filter(o) for o in typedconfig['group-classes']] if typedconfig['group-filter']: self.group_base_filters.append(typedconfig['group-filter']) @@ -215,9 +223,11 @@ def connection_info(self): assert len(self.urls) == 1, self.urls protocol, hostport = self.urls[0].split('://') - if protocol != 'ldapi' and not ':' in hostport: - hostport = '%s:%s' % (hostport, PROTO_PORT[protocol]) - return protocol, hostport + if protocol != 'ldapi' and ':' in hostport: + host, port = hostport.rsplit(':', 1) + else: + host, port = hostport, PROTO_PORT[protocol] + return protocol, host, port def authenticate(self, cnx, login, password=None, **kwargs): """return CWUser eid for the given login/password if this account is @@ -232,87 +242,69 @@ # You get Authenticated as: 'NT AUTHORITY\ANONYMOUS LOGON'. # we really really don't want that raise AuthenticationError() - searchfilter = [filter_format('(%s=%s)', (self.user_login_attr, login))] + searchfilter = ['(%s=%s)' % (replace_filter(self.user_login_attr), replace_filter(login))] searchfilter.extend(self.base_filters) searchstr = '(&%s)' % ''.join(searchfilter) # first search the user try: user = self._search(cnx, self.user_base_dn, self.user_base_scope, searchstr)[0] - except (IndexError, ldap.SERVER_DOWN): + except IndexError: # no such user raise AuthenticationError() # check password by establishing a (unused) connection try: self._connect(user, password) - except ldap.LDAPError as ex: + except ldap3.LDAPException as ex: # Something went wrong, most likely bad credentials self.info('while trying to authenticate %s: %s', user, ex) raise AuthenticationError() except Exception: self.error('while trying to authenticate %s', user, exc_info=True) raise AuthenticationError() - eid = self.repo.extid2eid(self, user['dn'], 'CWUser', cnx, insert=False) - if eid < 0: - # user has been moved away from this source + eid = self.repo.system_source.extid2eid(cnx, user['dn'].encode('ascii')) + if eid is None or eid < 0: + # user is not known or has been moved away from this source raise AuthenticationError() return eid def _connect(self, user=None, userpwd=None): - protocol, hostport = self.connection_info() - self.info('connecting %s://%s as %s', protocol, hostport, + protocol, host, port = self.connection_info() + self.info('connecting %s://%s:%s as %s', protocol, host, port, user and user['dn'] or 'anonymous') - # don't require server certificate when using ldaps (will - # enable self signed certs) - ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_NEVER) - url = LDAPUrl(urlscheme=protocol, hostport=hostport) - conn = ReconnectLDAPObject(url.initializeUrl()) - # Set the protocol version - version 3 is preferred - try: - conn.set_option(ldap.OPT_PROTOCOL_VERSION, ldap.VERSION3) - except ldap.LDAPError: # Invalid protocol version, fall back safely - conn.set_option(ldap.OPT_PROTOCOL_VERSION, ldap.VERSION2) - # Deny auto-chasing of referrals to be safe, we handle them instead - # Required for AD - try: - conn.set_option(ldap.OPT_REFERRALS, 0) - except ldap.LDAPError: # Cannot set referrals, so do nothing - pass - #conn.set_option(ldap.OPT_NETWORK_TIMEOUT, conn_timeout) - #conn.timeout = op_timeout + server = ldap3.Server(host, port=int(port)) + conn = ldap3.Connection(server, user=user and user['dn'], client_strategy=ldap3.STRATEGY_SYNC_RESTARTABLE, auto_referrals=False) # Now bind with the credentials given. Let exceptions propagate out. if user is None: # XXX always use simple bind for data connection if not self.cnx_dn: - conn.simple_bind_s(self.cnx_dn, self.cnx_pwd) + conn.bind() else: self._authenticate(conn, {'dn': self.cnx_dn}, self.cnx_pwd) else: # user specified, we want to check user/password, no need to return # the connection which will be thrown out - self._authenticate(conn, user, userpwd) + if not self._authenticate(conn, user, userpwd): + raise AuthenticationError() return conn def _auth_simple(self, conn, user, userpwd): - conn.simple_bind_s(user['dn'], userpwd) - - def _auth_cram_md5(self, conn, user, userpwd): - from ldap import sasl - auth_token = sasl.cram_md5(user['dn'], userpwd) - conn.sasl_interactive_bind_s('', auth_token) + conn.authentication = ldap3.AUTH_SIMPLE + conn.user = user['dn'] + conn.password = userpwd + return conn.bind() def _auth_digest_md5(self, conn, user, userpwd): - from ldap import sasl - auth_token = sasl.digest_md5(user['dn'], userpwd) - conn.sasl_interactive_bind_s('', auth_token) + conn.authentication = ldap3.AUTH_SASL + conn.sasl_mechanism = 'DIGEST-MD5' + # realm, user, password, authz-id + conn.sasl_credentials = (None, user['dn'], userpwd, None) + return conn.bind() def _auth_gssapi(self, conn, user, userpwd): - # print XXX not proper sasl/gssapi - import kerberos - if not kerberos.checkPassword(user[self.user_login_attr], userpwd): - raise Exception('BAD login / mdp') - #from ldap import sasl - #conn.sasl_interactive_bind_s('', sasl.gssapi()) + conn.authentication = ldap3.AUTH_SASL + conn.sasl_mechanism = 'GSSAPI' + return conn.bind() def _search(self, cnx, base, scope, searchstr='(objectClass=*)', attrs=()): @@ -322,37 +314,15 @@ if self._conn is None: self._conn = self._connect() ldapcnx = self._conn - try: - res = ldapcnx.search_s(base, scope, searchstr, attrs) - except ldap.PARTIAL_RESULTS: - res = ldapcnx.result(all=0)[1] - except ldap.NO_SUCH_OBJECT: - self.info('ldap NO SUCH OBJECT %s %s %s', base, scope, searchstr) - self._process_no_such_object(cnx, base) + if not ldapcnx.search(base, searchstr, search_scope=scope, attributes=attrs): return [] - # except ldap.REFERRAL as e: - # ldapcnx = self.handle_referral(e) - # try: - # res = ldapcnx.search_s(base, scope, searchstr, attrs) - # except ldap.PARTIAL_RESULTS: - # res_type, res = ldapcnx.result(all=0) result = [] - for rec_dn, rec_dict in res: - # When used against Active Directory, "rec_dict" may not be - # be a dictionary in some cases (instead, it can be a list) - # - # An example of a useless "res" entry that can be ignored - # from AD is - # (None, ['ldap://ForestDnsZones.PORTAL.LOCAL/DC=ForestDnsZones,DC=PORTAL,DC=LOCAL']) - # This appears to be some sort of internal referral, but - # we can't handle it, so we need to skip over it. - try: - items = rec_dict.iteritems() - except AttributeError: + for rec in ldapcnx.response: + if rec['type'] != 'searchResEntry': continue - else: - itemdict = self._process_ldap_item(rec_dn, items) - result.append(itemdict) + items = rec['attributes'].items() + itemdict = self._process_ldap_item(rec['dn'], items) + result.append(itemdict) self.debug('ldap built results %s', len(result)) return result @@ -363,20 +333,21 @@ if self.user_attrs.get(key) == 'upassword': # XXx better password detection value = value[0].encode('utf-8') # we only support ldap_salted_sha1 for ldap sources, see: server/utils.py - if not value.startswith('{SSHA}'): + if not value.startswith(b'{SSHA}'): value = utils.crypt_password(value) itemdict[key] = Binary(value) elif self.user_attrs.get(key) == 'modification_date': itemdict[key] = datetime.strptime(value[0], '%Y%m%d%H%M%SZ') else: - value = [unicode(val, 'utf-8', 'replace') for val in value] + if PY2 and value and isinstance(value[0], str): + value = [unicode(val, 'utf-8', 'replace') for val in value] if len(value) == 1: itemdict[key] = value = value[0] else: itemdict[key] = value # we expect memberUid to be a list of user ids, make sure of it member = self.group_rev_attrs['member'] - if isinstance(itemdict.get(member), basestring): + if isinstance(itemdict.get(member), string_types): itemdict[member] = [itemdict[member]] return itemdict diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/sources/native.py --- a/server/sources/native.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/sources/native.py Thu Jun 16 14:19:20 2016 +0200 @@ -23,13 +23,13 @@ string. This is because it should actually be Bytes but we want an index on it for fast querying. """ +from __future__ import print_function + __docformat__ = "restructuredtext en" -from cPickle import loads, dumps -import cPickle as pickle from threading import Lock from datetime import datetime -from base64 import b64decode, b64encode +from base64 import b64encode from contextlib import contextmanager from os.path import basename import re @@ -38,6 +38,9 @@ import logging import sys +from six import PY2, text_type, binary_type, string_types +from six.moves import range, cPickle as pickle + from logilab.common.decorators import cached, clear_cache from logilab.common.configuration import Method from logilab.common.shellutils import getlogin, ASK @@ -53,7 +56,7 @@ from cubicweb.cwconfig import CubicWebNoAppConfiguration from cubicweb.server import hook from cubicweb.server import schema2sql as y2sql -from cubicweb.server.utils import crypt_password, eschema_eid, verify_and_update +from cubicweb.server.utils import crypt_password, verify_and_update from cubicweb.server.sqlutils import SQL_PREFIX, SQLAdapterMixIn from cubicweb.server.rqlannotation import set_qdata from cubicweb.server.hook import CleanupDeletedEidsCacheOp @@ -76,12 +79,12 @@ it's a function just so that it shows up in profiling """ if server.DEBUG & server.DBG_SQL: - print 'exec', query, args + print('exec', query, args) try: self.cu.execute(str(query), args) except Exception as ex: - print "sql: %r\n args: %s\ndbms message: %r" % ( - query, args, ex.args[0]) + print("sql: %r\n args: %s\ndbms message: %r" % ( + query, args, ex.args[0])) raise def fetchall(self): @@ -116,12 +119,9 @@ """return backend type and a boolean flag if NULL values should be allowed for a given relation definition """ - if rdef.object.final: - ttype = rdef.object - else: - ttype = 'Int' # eid type - coltype = y2sql.type_from_constraints(dbhelper, ttype, - rdef.constraints, creating=False) + if not rdef.object.final: + return dbhelper.TYPE_MAPPING['Int'] + coltype = y2sql.type_from_rdef(dbhelper, rdef, creating=False) allownull = rdef.cardinality[0] != '1' return coltype, allownull @@ -134,7 +134,7 @@ Type of _UndoException message must be `unicode` by design in CubicWeb. """ - assert isinstance(self.args[0], unicode) + assert isinstance(self.args[0], text_type) return self.args[0] @@ -556,7 +556,7 @@ sql, qargs, cbs = self._rql_sqlgen.generate(union, args, varmap) self._cache[cachekey] = sql, qargs, cbs args = self.merge_args(args, qargs) - assert isinstance(sql, basestring), repr(sql) + assert isinstance(sql, string_types), repr(sql) cursor = self.doexec(cnx, sql, args) results = self.process_result(cursor, cnx, cbs) assert dbg_results(results) @@ -611,7 +611,7 @@ self.doexec(cnx, sql, attrs) if cnx.ertype_supports_undo(entity.cw_etype): self._record_tx_action(cnx, 'tx_entity_actions', u'C', - etype=unicode(entity.cw_etype), eid=entity.eid) + etype=text_type(entity.cw_etype), eid=entity.eid) def update_entity(self, cnx, entity): """replace an entity in the source""" @@ -620,8 +620,8 @@ if cnx.ertype_supports_undo(entity.cw_etype): changes = self._save_attrs(cnx, entity, attrs) self._record_tx_action(cnx, 'tx_entity_actions', u'U', - etype=unicode(entity.cw_etype), eid=entity.eid, - changes=self._binary(dumps(changes))) + etype=text_type(entity.cw_etype), eid=entity.eid, + changes=self._binary(pickle.dumps(changes))) sql = self.sqlgen.update(SQL_PREFIX + entity.cw_etype, attrs, ['cw_eid']) self.doexec(cnx, sql, attrs) @@ -635,8 +635,8 @@ if (r.final or r.inlined) and not r in VIRTUAL_RTYPES] changes = self._save_attrs(cnx, entity, attrs) self._record_tx_action(cnx, 'tx_entity_actions', u'D', - etype=unicode(entity.cw_etype), eid=entity.eid, - changes=self._binary(dumps(changes))) + etype=text_type(entity.cw_etype), eid=entity.eid, + changes=self._binary(pickle.dumps(changes))) attrs = {'cw_eid': entity.eid} sql = self.sqlgen.delete(SQL_PREFIX + entity.cw_etype, attrs) self.doexec(cnx, sql, attrs) @@ -646,7 +646,7 @@ self._add_relations(cnx, rtype, [(subject, object)], inlined) if cnx.ertype_supports_undo(rtype): self._record_tx_action(cnx, 'tx_relation_actions', u'A', - eid_from=subject, rtype=unicode(rtype), eid_to=object) + eid_from=subject, rtype=text_type(rtype), eid_to=object) def add_relations(self, cnx, rtype, subj_obj_list, inlined=False): """add a relations to the source""" @@ -654,7 +654,7 @@ if cnx.ertype_supports_undo(rtype): for subject, object in subj_obj_list: self._record_tx_action(cnx, 'tx_relation_actions', u'A', - eid_from=subject, rtype=unicode(rtype), eid_to=object) + eid_from=subject, rtype=text_type(rtype), eid_to=object) def _add_relations(self, cnx, rtype, subj_obj_list, inlined=False): """add a relation to the source""" @@ -671,7 +671,7 @@ etypes[etype].append((subject, object)) else: etypes[etype] = [(subject, object)] - for subj_etype, subj_obj_list in etypes.iteritems(): + for subj_etype, subj_obj_list in etypes.items(): attrs = [{'cw_eid': subject, SQL_PREFIX + rtype: object} for subject, object in subj_obj_list] sql.append((self.sqlgen.update(SQL_PREFIX + etype, attrs[0], @@ -686,7 +686,7 @@ self._delete_relation(cnx, subject, rtype, object, rschema.inlined) if cnx.ertype_supports_undo(rtype): self._record_tx_action(cnx, 'tx_relation_actions', u'R', - eid_from=subject, rtype=unicode(rtype), eid_to=object) + eid_from=subject, rtype=text_type(rtype), eid_to=object) def _delete_relation(self, cnx, subject, rtype, object, inlined=False): """delete a relation from the source""" @@ -708,7 +708,7 @@ """ cursor = cnx.cnxset.cu if server.DEBUG & server.DBG_SQL: - print 'exec', query, args, cnx.cnxset.cnx + print('exec', query, args, cnx.cnxset.cnx) try: # str(query) to avoid error if it's a unicode string cursor.execute(str(query), args) @@ -767,7 +767,7 @@ it's a function just so that it shows up in profiling """ if server.DEBUG & server.DBG_SQL: - print 'execmany', query, 'with', len(args), 'arguments', cnx.cnxset.cnx + print('execmany', query, 'with', len(args), 'arguments', cnx.cnxset.cnx) cursor = cnx.cnxset.cu try: # str(query) to avoid error if it's a unicode string @@ -852,10 +852,9 @@ """return a tuple (type, extid, source) for the entity with id """ sql = 'SELECT type, extid, asource FROM entities WHERE eid=%s' % eid res = self._eid_type_source(cnx, eid, sql) - if res[-2] is not None: - if not isinstance(res, list): - res = list(res) - res[-2] = b64decode(res[-2]) + if not isinstance(res, list): + res = list(res) + res[-2] = self.decode_extid(res[-2]) return res def eid_type_source_pre_131(self, cnx, eid): @@ -864,15 +863,14 @@ res = self._eid_type_source(cnx, eid, sql) if not isinstance(res, list): res = list(res) - if res[-1] is not None: - res[-1] = b64decode(res[-1]) + res[-1] = self.decode_extid(res[-1]) res.append("system") return res def extid2eid(self, cnx, extid): """get eid from an external id. Return None if no record found.""" - assert isinstance(extid, str) - args = {'x': b64encode(extid)} + assert isinstance(extid, binary_type) + args = {'x': b64encode(extid).decode('ascii')} cursor = self.doexec(cnx, 'SELECT eid FROM entities WHERE extid=%(x)s', args) @@ -911,23 +909,20 @@ assert cnx.cnxset is not None # begin by inserting eid/type/source/extid into the entities table if extid is not None: - assert isinstance(extid, str) - extid = b64encode(extid) - attrs = {'type': unicode(entity.cw_etype), 'eid': entity.eid, 'extid': extid and unicode(extid), - 'asource': unicode(source.uri)} + assert isinstance(extid, binary_type) + extid = b64encode(extid).decode('ascii') + attrs = {'type': text_type(entity.cw_etype), 'eid': entity.eid, 'extid': extid, + 'asource': text_type(source.uri)} self._handle_insert_entity_sql(cnx, self.sqlgen.insert('entities', attrs), attrs) # insert core relations: is, is_instance_of and cw_source - try: + + if entity.e_schema.eid is not None: # else schema has not yet been serialized self._handle_is_relation_sql(cnx, 'INSERT INTO is_relation(eid_from,eid_to) VALUES (%s,%s)', - (entity.eid, eschema_eid(cnx, entity.e_schema))) - except IndexError: - # during schema serialization, skip - pass - else: + (entity.eid, entity.e_schema.eid)) for eschema in entity.e_schema.ancestors() + [entity.e_schema]: self._handle_is_relation_sql(cnx, 'INSERT INTO is_instance_of_relation(eid_from,eid_to) VALUES (%s,%s)', - (entity.eid, eschema_eid(cnx, eschema))) + (entity.eid, eschema.eid)) if 'CWSource' in self.schema and source.eid is not None: # else, cw < 3.10 self._handle_is_relation_sql(cnx, 'INSERT INTO cw_source_relation(eid_from,eid_to) VALUES (%s,%s)', (entity.eid, source.eid)) @@ -975,13 +970,13 @@ if actionfilters.pop('public', True): genrestr['txa_public'] = True # put additional filters in trarestr and/or tearestr - for key, val in actionfilters.iteritems(): + for key, val in actionfilters.items(): if key == 'etype': # filtering on etype implies filtering on entity actions # only, and with no eid specified assert actionfilters.get('action', 'C') in 'CUD' assert not 'eid' in actionfilters - tearestr['etype'] = unicode(val) + tearestr['etype'] = text_type(val) elif key == 'eid': # eid filter may apply to 'eid' of tx_entity_actions or to # 'eid_from' OR 'eid_to' of tx_relation_actions @@ -992,10 +987,10 @@ trarestr['eid_to'] = val elif key == 'action': if val in 'CUD': - tearestr['txa_action'] = unicode(val) + tearestr['txa_action'] = text_type(val) else: assert val in 'AR' - trarestr['txa_action'] = unicode(val) + trarestr['txa_action'] = text_type(val) else: raise AssertionError('unknow filter %s' % key) assert trarestr or tearestr, "can't only filter on 'public'" @@ -1029,11 +1024,11 @@ def tx_info(self, cnx, txuuid): """See :class:`cubicweb.repoapi.Connection.transaction_info`""" - return tx.Transaction(cnx, txuuid, *self._tx_info(cnx, unicode(txuuid))) + return tx.Transaction(cnx, txuuid, *self._tx_info(cnx, text_type(txuuid))) def tx_actions(self, cnx, txuuid, public): """See :class:`cubicweb.repoapi.Connection.transaction_actions`""" - txuuid = unicode(txuuid) + txuuid = text_type(txuuid) self._tx_info(cnx, txuuid) restr = {'tx_uuid': txuuid} if public: @@ -1044,7 +1039,7 @@ 'etype', 'eid', 'changes')) with cnx.ensure_cnx_set: cu = self.doexec(cnx, sql, restr) - actions = [tx.EntityAction(a,p,o,et,e,c and loads(self.binary_to_str(c))) + actions = [tx.EntityAction(a,p,o,et,e,c and pickle.loads(self.binary_to_str(c))) for a,p,o,et,e,c in cu.fetchall()] sql = self.sqlgen.select('tx_relation_actions', restr, ('txa_action', 'txa_public', 'txa_order', @@ -1168,8 +1163,8 @@ elif eschema.destination(rtype) in ('Bytes', 'Password'): changes[column] = self._binary(value) edited[rtype] = Binary(value) - elif isinstance(value, str): - edited[rtype] = unicode(value, cnx.encoding, 'replace') + elif PY2 and isinstance(value, str): + edited[rtype] = text_type(value, cnx.encoding, 'replace') else: edited[rtype] = value # This must only be done after init_entitiy_caches : defered in calling functions @@ -1210,14 +1205,14 @@ try: sentity, oentity, rdef = _undo_rel_info(cnx, subj, rtype, obj) except _UndoException as ex: - errors.append(unicode(ex)) + errors.append(text_type(ex)) else: for role, entity in (('subject', sentity), ('object', oentity)): try: _undo_check_relation_target(entity, rdef, role) except _UndoException as ex: - errors.append(unicode(ex)) + errors.append(text_type(ex)) continue if not errors: self.repo.hm.call_hooks('before_add_relation', cnx, @@ -1293,7 +1288,7 @@ try: sentity, oentity, rdef = _undo_rel_info(cnx, subj, rtype, obj) except _UndoException as ex: - errors.append(unicode(ex)) + errors.append(text_type(ex)) else: rschema = rdef.rtype if rschema.inlined: @@ -1507,12 +1502,7 @@ if 'CWUser' in schema: # probably an empty schema if not true... # rql syntax trees used to authenticate users self._passwd_rqlst = self.source.compile_rql(self.passwd_rql, self._sols) - if 'CWSource' in schema: - self._auth_rqlst = self.source.compile_rql(self.auth_rql, self._sols) - else: - self._auth_rqlst = self.source.compile_rql( - u'Any X WHERE X is CWUser, X login %(login)s, X upassword %(pwd)s', - ({'X': 'CWUser', 'P': 'Password'},)) + self._auth_rqlst = self.source.compile_rql(self.auth_rql, self._sols) def authenticate(self, cnx, login, password=None, **kwargs): """return CWUser eid for the given login/password if this account is @@ -1549,7 +1539,7 @@ SQL_PREFIX + 'CWUser', SQL_PREFIX + 'upassword', SQL_PREFIX + 'login'), - {'newhash': self.source._binary(newhash), + {'newhash': self.source._binary(newhash.encode('ascii')), 'login': login}) cnx.commit() return user @@ -1697,7 +1687,7 @@ self.logger.info('number of rows: %d', rowcount) blocksize = self.blocksize if rowcount > 0: - for i, start in enumerate(xrange(0, rowcount, blocksize)): + for i, start in enumerate(range(0, rowcount, blocksize)): rows = list(itertools.islice(rows_iterator, blocksize)) serialized = self._serialize(table, columns, rows) archive.writestr('tables/%s.%04d' % (table, i), serialized) @@ -1718,7 +1708,7 @@ return tuple(columns), rows def _serialize(self, name, columns, rows): - return dumps((name, columns, rows), pickle.HIGHEST_PROTOCOL) + return pickle.dumps((name, columns, rows), pickle.HIGHEST_PROTOCOL) def restore(self, backupfile): archive = zipfile.ZipFile(backupfile, 'r', allowZip64=True) @@ -1771,7 +1761,7 @@ return sequences, numranges, tables, table_chunks def read_sequence(self, archive, seq): - seqname, columns, rows = loads(archive.read('sequences/%s' % seq)) + seqname, columns, rows = pickle.loads(archive.read('sequences/%s' % seq)) assert seqname == seq assert len(rows) == 1 assert len(rows[0]) == 1 @@ -1781,7 +1771,7 @@ self.cnx.commit() def read_numrange(self, archive, numrange): - rangename, columns, rows = loads(archive.read('numrange/%s' % numrange)) + rangename, columns, rows = pickle.loads(archive.read('numrange/%s' % numrange)) assert rangename == numrange assert len(rows) == 1 assert len(rows[0]) == 1 @@ -1796,7 +1786,7 @@ self.cnx.commit() row_count = 0 for filename in filenames: - tablename, columns, rows = loads(archive.read(filename)) + tablename, columns, rows = pickle.loads(archive.read(filename)) assert tablename == table if not rows: continue diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/sources/rql2sql.py --- a/server/sources/rql2sql.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/sources/rql2sql.py Thu Jun 16 14:19:20 2016 +0200 @@ -51,6 +51,9 @@ import threading +from six import PY2 +from six.moves import range + from logilab.database import FunctionDescr, SQL_FUNCTIONS_REGISTRY from rql import BadRQLQuery, CoercionError @@ -172,7 +175,7 @@ existssols = {} unstable = set() invariants = {} - for vname, var in rqlst.defined_vars.iteritems(): + for vname, var in rqlst.defined_vars.items(): vtype = newsols[0][vname] if var._q_invariant or vname in varmap: # remove invariant variable from solutions to remove duplicates @@ -187,13 +190,13 @@ thisexistssols = [newsols[0]] thisexistsvars = set() existssols[var.scope] = thisexistssols, thisexistsvars - for i in xrange(len(newsols)-1, 0, -1): + for i in range(len(newsols)-1, 0, -1): if vtype != newsols[i][vname]: thisexistssols.append(newsols.pop(i)) thisexistsvars.add(vname) else: # remember unstable variables - for i in xrange(1, len(newsols)): + for i in range(1, len(newsols)): if vtype != newsols[i][vname]: unstable.add(vname) if invariants: @@ -205,11 +208,11 @@ newsols = newsols_ # reinsert solutions for invariants for sol in newsols: - for invvar, vartype in invariants[id(sol)].iteritems(): + for invvar, vartype in invariants[id(sol)].items(): sol[invvar] = vartype for sol in existssols: try: - for invvar, vartype in invariants[id(sol)].iteritems(): + for invvar, vartype in invariants[id(sol)].items(): sol[invvar] = vartype except KeyError: continue @@ -257,7 +260,7 @@ append(term) if groups: for vref in term.iget_nodes(VariableRef): - if not vref in groups: + if not any(vref.is_equivalent(g) for g in groups): groups.append(vref) def fix_selection_and_group(rqlst, needwrap, selectsortterms, @@ -273,7 +276,7 @@ (isinstance(term, Function) and get_func_descr(term.name).aggregat)): for vref in term.iget_nodes(VariableRef): - if not vref in groupvrefs: + if not any(vref.is_equivalent(group) for group in groupvrefs): groups.append(vref) groupvrefs.append(vref) if needwrap and (groups or having): @@ -364,7 +367,7 @@ self.done = set() self.tables = self.subtables.copy() self.actual_tables = [[]] - for _, tsql in self.tables.itervalues(): + for _, tsql in self.tables.values(): self.actual_tables[-1].append(tsql) self.outer_chains = [] self.outer_tables = {} @@ -398,7 +401,7 @@ notdone_outside_vars = set() # when iterating other solutions inner to an EXISTS subquery, we should # reset variables which have this exists node as scope at each iteration - for var in exists.stmt.defined_vars.itervalues(): + for var in exists.stmt.defined_vars.values(): if var.scope is exists: thisexistsvars.add(var.name) elif var.name not in self.done: @@ -600,7 +603,7 @@ self.outer_chains.remove(lchain) rchain += lchain self.mark_as_used_in_outer_join(leftalias) - for alias, (aouter, aconditions, achain) in outer_tables.iteritems(): + for alias, (aouter, aconditions, achain) in outer_tables.items(): if achain is lchain: outer_tables[alias] = (aouter, aconditions, rchain) else: @@ -1475,7 +1478,7 @@ """generate SQL name for a function""" if func.name == 'FTIRANK': try: - rel = iter(func.children[0].variable.stinfo['ftirels']).next() + rel = next(iter(func.children[0].variable.stinfo['ftirels'])) except KeyError: raise BadRQLQuery("can't use FTIRANK on variable not used in an" " 'has_text' relation (eg full-text search)") @@ -1512,7 +1515,7 @@ return self._mapped_term(constant, '%%(%s)s' % value)[0] except KeyError: _id = value - if isinstance(_id, unicode): + if PY2 and isinstance(_id, unicode): _id = _id.encode() else: _id = str(id(constant)).replace('-', '', 1) @@ -1561,7 +1564,7 @@ # add additional restriction on entities.type column pts = variable.stinfo['possibletypes'] if len(pts) == 1: - etype = iter(variable.stinfo['possibletypes']).next() + etype = next(iter(variable.stinfo['possibletypes'])) restr = "%s.type='%s'" % (vtablename, etype) else: etypes = ','.join("'%s'" % et for et in pts) @@ -1609,7 +1612,7 @@ def _temp_table_scope(self, select, table): scope = 9999 - for var, sql in self._varmap.iteritems(): + for var, sql in self._varmap.items(): # skip "attribute variable" in varmap (such 'T.login') if not '.' in var and table == sql.split('.', 1)[0]: try: @@ -1668,7 +1671,7 @@ except KeyError: pass rel = (variable.stinfo.get('principal') or - iter(variable.stinfo['rhsrelations']).next()) + next(iter(variable.stinfo['rhsrelations']))) linkedvar = rel.children[0].variable if rel.r_type == 'eid': return linkedvar.accept(self) diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/sources/storages.py --- a/server/sources/storages.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/sources/storages.py Thu Jun 16 14:19:20 2016 +0200 @@ -23,6 +23,10 @@ from contextlib import contextmanager import tempfile +from six import PY2, PY3, text_type, binary_type + +from logilab.common import nullobject + from yams.schema import role_name from cubicweb import Binary, ValidationError @@ -44,7 +48,7 @@ query result process of fetched attribute's value and should have the following prototype:: - callback(self, source, session, value) + callback(self, source, cnx, value) where `value` is the value actually stored in the backend. None values will be skipped (eg callback won't be called). @@ -92,24 +96,33 @@ return tempfile.mkstemp(prefix=base, suffix=ext, dir=dirpath) @contextmanager -def fsimport(session): - present = 'fs_importing' in session.transaction_data - old_value = session.transaction_data.get('fs_importing') - session.transaction_data['fs_importing'] = True +def fsimport(cnx): + present = 'fs_importing' in cnx.transaction_data + old_value = cnx.transaction_data.get('fs_importing') + cnx.transaction_data['fs_importing'] = True yield if present: - session.transaction_data['fs_importing'] = old_value + cnx.transaction_data['fs_importing'] = old_value else: - del session.transaction_data['fs_importing'] + del cnx.transaction_data['fs_importing'] + + +_marker = nullobject() class BytesFileSystemStorage(Storage): """store Bytes attribute value on the file system""" - def __init__(self, defaultdir, fsencoding='utf-8', wmode=0444): - if type(defaultdir) is unicode: - defaultdir = defaultdir.encode(fsencoding) + def __init__(self, defaultdir, fsencoding=_marker, wmode=0o444): + if PY3: + if not isinstance(defaultdir, text_type): + raise TypeError('defaultdir must be a unicode object in python 3') + if fsencoding is not _marker: + raise ValueError('fsencoding is no longer supported in python 3') + else: + self.fsencoding = fsencoding or 'utf-8' + if isinstance(defaultdir, text_type): + defaultdir = defaultdir.encode(fsencoding) self.default_directory = defaultdir - self.fsencoding = fsencoding # extra umask to use when creating file # 0444 as in "only allow read bit in permission" self._wmode = wmode @@ -126,7 +139,7 @@ fileobj.close() - def callback(self, source, session, value): + def callback(self, source, cnx, value): """sql generator callback when some attribute with a custom storage is accessed """ @@ -141,11 +154,13 @@ """an entity using this storage for attr has been added""" if entity._cw.transaction_data.get('fs_importing'): binary = Binary.from_file(entity.cw_edited[attr].getvalue()) + entity._cw_dont_cache_attribute(attr, repo_side=True) else: binary = entity.cw_edited.pop(attr) fd, fpath = self.new_fs_path(entity, attr) # bytes storage used to store file's path - entity.cw_edited.edited_attribute(attr, Binary(fpath)) + binary_obj = Binary(fpath if PY2 else fpath.encode('utf-8')) + entity.cw_edited.edited_attribute(attr, binary_obj) self._writecontent(fd, binary) AddFileOp.get_instance(entity._cw).add_data(fpath) return binary @@ -159,6 +174,7 @@ # We do not need to create it but we need to fetch the content of # the file as the actual content of the attribute fpath = entity.cw_edited[attr].getvalue() + entity._cw_dont_cache_attribute(attr, repo_side=True) assert fpath is not None binary = Binary.from_file(fpath) else: @@ -187,7 +203,8 @@ entity.cw_edited.edited_attribute(attr, None) else: # register the new location for the file. - entity.cw_edited.edited_attribute(attr, Binary(fpath)) + binary_obj = Binary(fpath if PY2 else fpath.encode('utf-8')) + entity.cw_edited.edited_attribute(attr, binary_obj) if oldpath is not None and oldpath != fpath: # Mark the old file as useless so the file will be removed at # commit. @@ -206,16 +223,19 @@ # available. Keeping the extension is useful for example in the case of # PIL processing that use filename extension to detect content-type, as # well as providing more understandable file names on the fs. + if PY2: + attr = attr.encode('ascii') basename = [str(entity.eid), attr] name = entity.cw_attr_metadata(attr, 'name') if name is not None: - basename.append(name.encode(self.fsencoding)) + basename.append(name.encode(self.fsencoding) if PY2 else name) fd, fspath = uniquify_path(self.default_directory, '_'.join(basename)) if fspath is None: msg = entity._cw._('failed to uniquify path (%s, %s)') % ( self.default_directory, '_'.join(basename)) raise ValidationError(entity.eid, {role_name(attr, 'subject'): msg}) + assert isinstance(fspath, str) # bytes on py2, unicode on py3 return fd, fspath def current_fs_path(self, entity, attr): @@ -229,34 +249,40 @@ rawvalue = cu.fetchone()[0] if rawvalue is None: # no previous value return None - return sysource._process_value(rawvalue, cu.description[0], - binarywrap=str) + fspath = sysource._process_value(rawvalue, cu.description[0], + binarywrap=binary_type) + if PY3: + fspath = fspath.decode('utf-8') + assert isinstance(fspath, str) # bytes on py2, unicode on py3 + return fspath def migrate_entity(self, entity, attribute): """migrate an entity attribute to the storage""" entity.cw_edited = EditedEntity(entity, **entity.cw_attr_cache) self.entity_added(entity, attribute) - session = entity._cw - source = session.repo.system_source + cnx = entity._cw + source = cnx.repo.system_source attrs = source.preprocess_entity(entity) sql = source.sqlgen.update('cw_' + entity.cw_etype, attrs, ['cw_eid']) - source.doexec(session, sql, attrs) + source.doexec(cnx, sql, attrs) entity.cw_edited = None class AddFileOp(hook.DataOperationMixIn, hook.Operation): def rollback_event(self): for filepath in self.get_data(): + assert isinstance(filepath, str) # bytes on py2, unicode on py3 try: unlink(filepath) except Exception as ex: - self.error('cant remove %s: %s' % (filepath, ex)) + self.error("can't remove %s: %s" % (filepath, ex)) class DeleteFileOp(hook.DataOperationMixIn, hook.Operation): def postcommit_event(self): for filepath in self.get_data(): + assert isinstance(filepath, str) # bytes on py2, unicode on py3 try: unlink(filepath) except Exception as ex: - self.error('cant remove %s: %s' % (filepath, ex)) + self.error("can't remove %s: %s" % (filepath, ex)) diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/sqlutils.py --- a/server/sqlutils.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/sqlutils.py Thu Jun 16 14:19:20 2016 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -16,6 +16,7 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . """SQL utilities functions and classes.""" +from __future__ import print_function __docformat__ = "restructuredtext en" @@ -23,15 +24,19 @@ import re import subprocess from os.path import abspath -from itertools import ifilter from logging import getLogger -from datetime import time, datetime +from datetime import time, datetime, timedelta + +from six import string_types, text_type +from six.moves import filter + +from pytz import utc from logilab import database as db, common as lgc from logilab.common.shellutils import ProgressBar, DummyProgressBar from logilab.common.deprecation import deprecated from logilab.common.logging_ext import set_log_methods -from logilab.common.date import utctime, utcdatetime +from logilab.common.date import utctime, utcdatetime, strptime from logilab.database.sqlgen import SQLGenerator from cubicweb import Binary, ConfigurationError @@ -43,9 +48,14 @@ lgc.USE_MX_DATETIME = False SQL_PREFIX = 'cw_' + def _run_command(cmd): - print ' '.join(cmd) - return subprocess.call(cmd) + if isinstance(cmd, string_types): + print(cmd) + return subprocess.call(cmd, shell=True) + else: + print(' '.join(cmd)) + return subprocess.call(cmd) def sqlexec(sqlstmts, cursor_or_execute, withpb=True, @@ -69,7 +79,7 @@ else: execute = cursor_or_execute sqlstmts_as_string = False - if isinstance(sqlstmts, basestring): + if isinstance(sqlstmts, string_types): sqlstmts_as_string = True sqlstmts = sqlstmts.split(delimiter) if withpb: @@ -87,7 +97,7 @@ try: # some dbapi modules doesn't accept unicode for sql string execute(str(sql)) - except Exception, err: + except Exception: if cnx: cnx.rollback() failed.append(sql) @@ -95,7 +105,7 @@ if cnx: cnx.commit() if withpb: - print + print() if sqlstmts_as_string: failed = delimiter.join(failed) return failed @@ -178,9 +188,9 @@ # for mssql, we need to drop views before tables if hasattr(dbhelper, 'list_views'): cmds += ['DROP VIEW %s;' % name - for name in ifilter(_SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION, dbhelper.list_views(sqlcursor))] + for name in filter(_SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION, dbhelper.list_views(sqlcursor))] cmds += ['DROP TABLE %s;' % name - for name in ifilter(_SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION, dbhelper.list_tables(sqlcursor))] + for name in filter(_SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION, dbhelper.list_tables(sqlcursor))] return '\n'.join(cmds) @@ -370,7 +380,7 @@ def merge_args(self, args, query_args): if args is not None: newargs = {} - for key, val in args.iteritems(): + for key, val in args.items(): # convert cubicweb binary into db binary if isinstance(val, Binary): val = self._binary(val.getvalue()) @@ -441,7 +451,7 @@ attrs = {} eschema = entity.e_schema converters = getattr(self.dbhelper, 'TYPE_CONVERTERS', {}) - for attr, value in entity.cw_edited.iteritems(): + for attr, value in entity.cw_edited.items(): if value is not None and eschema.subjrels[attr].final: atype = str(entity.e_schema.destination(attr)) if atype in converters: @@ -472,7 +482,55 @@ # connection initialization functions ########################################## -def init_sqlite_connexion(cnx): +def _install_sqlite_querier_patch(): + """This monkey-patch hotfixes a bug sqlite causing some dates to be returned as strings rather than + date objects (http://www.sqlite.org/cvstrac/tktview?tn=1327,33) + """ + from cubicweb.server.querier import QuerierHelper + + if hasattr(QuerierHelper, '_sqlite_patched'): + return # already monkey patched + + def wrap_execute(base_execute): + def new_execute(*args, **kwargs): + rset = base_execute(*args, **kwargs) + if rset.description: + found_date = False + for row, rowdesc in zip(rset, rset.description): + for cellindex, (value, vtype) in enumerate(zip(row, rowdesc)): + if vtype in ('TZDatetime', 'Date', 'Datetime') \ + and isinstance(value, text_type): + found_date = True + value = value.rsplit('.', 1)[0] + try: + row[cellindex] = strptime(value, '%Y-%m-%d %H:%M:%S') + except Exception: + row[cellindex] = strptime(value, '%Y-%m-%d') + if vtype == 'TZDatetime': + row[cellindex] = row[cellindex].replace(tzinfo=utc) + if vtype == 'Time' and isinstance(value, text_type): + found_date = True + try: + row[cellindex] = strptime(value, '%H:%M:%S') + except Exception: + # DateTime used as Time? + row[cellindex] = strptime(value, '%Y-%m-%d %H:%M:%S') + if vtype == 'Interval' and isinstance(value, int): + found_date = True + # XXX value is in number of seconds? + row[cellindex] = timedelta(0, value, 0) + if not found_date: + break + return rset + return new_execute + + QuerierHelper.execute = wrap_execute(QuerierHelper.execute) + QuerierHelper._sqlite_patched = True + + +def _init_sqlite_connection(cnx): + """Internal function that will be called to init a sqlite connection""" + _install_sqlite_querier_patch() class group_concat(object): def __init__(self): @@ -481,7 +539,7 @@ if value is not None: self.values.add(value) def finalize(self): - return ', '.join(unicode(v) for v in self.values) + return ', '.join(text_type(v) for v in self.values) cnx.create_aggregate("GROUP_CONCAT", 1, group_concat) @@ -519,14 +577,15 @@ yams.constraints.patch_sqlite_decimal() sqlite_hooks = SQL_CONNECT_HOOKS.setdefault('sqlite', []) -sqlite_hooks.append(init_sqlite_connexion) +sqlite_hooks.append(_init_sqlite_connection) -def init_postgres_connexion(cnx): +def _init_postgres_connection(cnx): + """Internal function that will be called to init a postgresql connection""" cnx.cursor().execute('SET TIME ZONE UTC') # commit is needed, else setting are lost if the connection is first # rolled back cnx.commit() postgres_hooks = SQL_CONNECT_HOOKS.setdefault('postgres', []) -postgres_hooks.append(init_postgres_connexion) +postgres_hooks.append(_init_postgres_connection) diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/ssplanner.py --- a/server/ssplanner.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/ssplanner.py Thu Jun 16 14:19:20 2016 +0200 @@ -19,6 +19,8 @@ __docformat__ = "restructuredtext en" +from six import text_type + from rql.stmts import Union, Select from rql.nodes import Constant, Relation @@ -54,7 +56,7 @@ value = rhs.eval(plan.args) eschema = edef.entity.e_schema attrtype = eschema.subjrels[rtype].objects(eschema)[0] - if attrtype == 'Password' and isinstance(value, unicode): + if attrtype == 'Password' and isinstance(value, text_type): value = value.encode('UTF8') edef.edited_attribute(rtype, value) elif str(rhs) in to_build: @@ -306,7 +308,7 @@ if varmap is None: return varmap maprepr = {} - for var, sql in varmap.iteritems(): + for var, sql in varmap.items(): table, col = sql.split('.') maprepr[var] = '%s.%s' % (tablesinorder[table], col) return maprepr @@ -527,7 +529,7 @@ result[i] = newrow # update entities repo.glob_add_relations(cnx, relations) - for eid, edited in edefs.iteritems(): + for eid, edited in edefs.items(): repo.glob_update_entity(cnx, edited) return result diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/data-cwep002/schema.py --- a/server/test/data-cwep002/schema.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/test/data-cwep002/schema.py Thu Jun 16 14:19:20 2016 +0200 @@ -33,4 +33,3 @@ class has_employee(ComputedRelation): rule = 'O works_for S' __permissions__ = {'read': ('managers',)} - diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/data-migractions/cubes/fakecustomtype/__init__.py diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/data-migractions/cubes/fakecustomtype/__pkginfo__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/server/test/data-migractions/cubes/fakecustomtype/__pkginfo__.py Thu Jun 16 14:19:20 2016 +0200 @@ -0,0 +1,50 @@ +# pylint: disable-msg=W0622 +"""cubicweb-fakeemail packaging information""" + +modname = 'fakecustomtype' +distname = "cubicweb-%s" % modname + +numversion = (1, 0, 0) +version = '.'.join(str(num) for num in numversion) + +license = 'LGPL' +author = "Logilab" +author_email = "contact@logilab.fr" +web = 'http://www.cubicweb.org/project/%s' % distname +description = "whatever" +classifiers = [ + 'Environment :: Web Environment', + 'Framework :: CubicWeb', + 'Programming Language :: Python', + 'Programming Language :: JavaScript', +] + +# used packages +__depends__ = {'cubicweb': '>= 3.19.0', + } + + +# packaging ### + +from os import listdir as _listdir +from os.path import join, isdir +from glob import glob + +THIS_CUBE_DIR = join('share', 'cubicweb', 'cubes', modname) + +def listdir(dirpath): + return [join(dirpath, fname) for fname in _listdir(dirpath) + if fname[0] != '.' and not fname.endswith('.pyc') + and not fname.endswith('~') + and not isdir(join(dirpath, fname))] + +data_files = [ + # common files + [THIS_CUBE_DIR, [fname for fname in glob('*.py') if fname != 'setup.py']], + ] +# check for possible extended cube layout +for dirname in ('entities', 'views', 'sobjects', 'hooks', 'schema', 'data', 'i18n', 'migration', 'wdoc'): + if isdir(dirname): + data_files.append([join(THIS_CUBE_DIR, dirname), listdir(dirname)]) +# Note: here, you'll need to add subdirectories if you want +# them to be included in the debian package diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/data-migractions/cubes/fakecustomtype/schema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/server/test/data-migractions/cubes/fakecustomtype/schema.py Thu Jun 16 14:19:20 2016 +0200 @@ -0,0 +1,7 @@ + +from yams.buildobjs import EntityType, make_type + +Numeric = make_type('Numeric') + +class Location(EntityType): + num = Numeric(scale=10, precision=18) diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/data-migractions/cubes/fakecustomtype/site_cubicweb.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/server/test/data-migractions/cubes/fakecustomtype/site_cubicweb.py Thu Jun 16 14:19:20 2016 +0200 @@ -0,0 +1,17 @@ +from yams import register_base_type +from logilab.database import get_db_helper +from logilab.database.sqlgen import SQLExpression + +_NUMERIC_PARAMETERS = {'scale': 0, 'precision': None} +register_base_type('Numeric', _NUMERIC_PARAMETERS) + +# Add the datatype to the helper mapping +pghelper = get_db_helper('postgres') + + +def pg_numeric_sqltype(rdef): + """Return a PostgreSQL column type corresponding to rdef + """ + return 'numeric(%s, %s)' % (rdef.precision, rdef.scale) + +pghelper.TYPE_MAPPING['Numeric'] = pg_numeric_sqltype diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/data-migractions/cubes/fakeemail/schema.py --- a/server/test/data-migractions/cubes/fakeemail/schema.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/test/data-migractions/cubes/fakeemail/schema.py Thu Jun 16 14:19:20 2016 +0200 @@ -5,7 +5,7 @@ :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr """ __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ # pylint: disable-msg=E0611,F0401 from yams.buildobjs import (SubjectRelation, RelationType, EntityType, @@ -84,5 +84,3 @@ subject = 'Comment' name = 'generated_by' object = 'Email' - - diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/data-migractions/migratedapp/schema.py --- a/server/test/data-migractions/migratedapp/schema.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/test/data-migractions/migratedapp/schema.py Thu Jun 16 14:19:20 2016 +0200 @@ -21,6 +21,7 @@ SubjectRelation, Bytes, RichString, String, Int, Boolean, Datetime, Date, Float) from yams.constraints import SizeConstraint, UniqueConstraint +from cubicweb import _ from cubicweb.schema import (WorkflowableEntityType, RQLConstraint, RQLVocabularyConstraint, ERQLExpression, RRQLExpression) @@ -210,3 +211,8 @@ class same_as(RelationDefinition): subject = ('Societe',) object = 'ExternalUri' + +class inlined_rel(RelationDefinition): + subject = object = 'Folder2' + inlined = True + cardinality = '??' diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/data-migractions/schema.py --- a/server/test/data-migractions/schema.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/test/data-migractions/schema.py Thu Jun 16 14:19:20 2016 +0200 @@ -24,6 +24,8 @@ RQLConstraint, RQLUniqueConstraint, RQLVocabularyConstraint, ERQLExpression, RRQLExpression) +from cubicweb import _ + class Affaire(WorkflowableEntityType): __permissions__ = { @@ -85,7 +87,7 @@ object = 'SubDivision' from cubicweb.schemas.base import CWUser -CWUser.get_relations('login').next().fulltextindexed = True +next(CWUser.get_relations('login')).fulltextindexed = True class Note(WorkflowableEntityType): date = String(maxsize=10) @@ -223,13 +225,13 @@ class ecrit_par_1(RelationDefinition): name = 'ecrit_par' subject = 'Note' - object ='Personne' + object = 'Personne' cardinality = '?*' class ecrit_par_2(RelationDefinition): name = 'ecrit_par' subject = 'Note' - object ='CWUser' + object = 'CWUser' cardinality='?*' diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/data-schema2sql/schema/Dates.py --- a/server/test/data-schema2sql/schema/Dates.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/test/data-schema2sql/schema/Dates.py Thu Jun 16 14:19:20 2016 +0200 @@ -26,4 +26,3 @@ d2 = Date(default=date(2007, 12, 11)) t1 = Time(default=time(8, 40)) t2 = Time(default=time(9, 45)) - diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/data-schema2sql/schema/State.py --- a/server/test/data-schema2sql/schema/State.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/test/data-schema2sql/schema/State.py Thu Jun 16 14:19:20 2016 +0200 @@ -19,7 +19,7 @@ SubjectRelation, Int, String, Boolean) from yams.constraints import SizeConstraint, UniqueConstraint -from __init__ import RESTRICTED_RTYPE_PERMS +from . import RESTRICTED_RTYPE_PERMS class State(EntityType): """used to associate simple states to an entity diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/data-schema2sql/schema/schema.py --- a/server/test/data-schema2sql/schema/schema.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/test/data-schema2sql/schema/schema.py Thu Jun 16 14:19:20 2016 +0200 @@ -110,4 +110,3 @@ 'add': ('managers',), 'delete': ('managers',), } - diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/data-schemaserial/schema.py --- a/server/test/data-schemaserial/schema.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/test/data-schemaserial/schema.py Thu Jun 16 14:19:20 2016 +0200 @@ -29,4 +29,3 @@ inline2 = SubjectRelation('Affaire', inlined=True, cardinality='?*') custom_field_of_jungle = BabarTestType(jungle_speed=42) - diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/data-schemaserial/site_cubicweb.py --- a/server/test/data-schemaserial/site_cubicweb.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/test/data-schemaserial/site_cubicweb.py Thu Jun 16 14:19:20 2016 +0200 @@ -27,4 +27,3 @@ def dumb_sort(something): return something register_sqlite_pyfunc(dumb_sort) - diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/data/migration/postcreate.py --- a/server/test/data/migration/postcreate.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/test/data/migration/postcreate.py Thu Jun 16 14:19:20 2016 +0200 @@ -35,4 +35,3 @@ wf.add_transition(u'start', pitetre, encours) wf.add_transition(u'end', encours, finie) commit() - diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/data/schema.py --- a/server/test/data/schema.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/test/data/schema.py Thu Jun 16 14:19:20 2016 +0200 @@ -24,6 +24,7 @@ RQLConstraint, RQLUniqueConstraint, RQLVocabularyConstraint, ERQLExpression, RRQLExpression) +from cubicweb import _ class Affaire(WorkflowableEntityType): __permissions__ = { @@ -85,7 +86,7 @@ object = 'SubDivision' from cubicweb.schemas.base import CWUser -CWUser.get_relations('login').next().fulltextindexed = True +next(CWUser.get_relations('login')).fulltextindexed = True class Note(WorkflowableEntityType): date = String(maxsize=10) diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/datacomputed/migratedapp/schema.py --- a/server/test/datacomputed/migratedapp/schema.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/test/datacomputed/migratedapp/schema.py Thu Jun 16 14:19:20 2016 +0200 @@ -59,3 +59,8 @@ class renamed(ComputedRelation): rule = 'S employees E, O concerns E' + + +class perm_changes(ComputedRelation): + __permissions__ = {'read': ('managers',)} + rule = 'S employees E, O concerns E' diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/datacomputed/schema.py --- a/server/test/datacomputed/schema.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/test/datacomputed/schema.py Thu Jun 16 14:19:20 2016 +0200 @@ -58,3 +58,8 @@ class to_be_renamed(ComputedRelation): rule = 'S employees E, O concerns E' + + +class perm_changes(ComputedRelation): + __permissions__ = {'read': ('managers', 'users')} + rule = 'S employees E, O concerns E' diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/requirements.txt --- a/server/test/requirements.txt Thu Mar 24 09:43:25 2016 +0100 +++ b/server/test/requirements.txt Thu Jun 16 14:19:20 2016 +0200 @@ -1,4 +1,5 @@ psycopg2 +ldap3 cubicweb-basket cubicweb-card cubicweb-comment diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/unittest_checkintegrity.py --- a/server/test/unittest_checkintegrity.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/test/unittest_checkintegrity.py Thu Jun 16 14:19:20 2016 +0200 @@ -17,7 +17,13 @@ # with CubicWeb. If not, see . import sys -from StringIO import StringIO + +from six import PY2 +if PY2: + from StringIO import StringIO +else: + from io import StringIO + from logilab.common.testlib import TestCase, unittest_main from cubicweb.devtools import get_test_db_handler, TestServerConfiguration diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/unittest_datafeed.py --- a/server/test/unittest_datafeed.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/test/unittest_datafeed.py Thu Jun 16 14:19:20 2016 +0200 @@ -1,3 +1,4 @@ +# coding: utf-8 # copyright 2011-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # @@ -16,7 +17,6 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -import mimetools from datetime import timedelta from contextlib import contextmanager @@ -28,7 +28,7 @@ def setup_database(self): with self.admin_access.repo_cnx() as cnx: with self.base_parser(cnx): - cnx.create_entity('CWSource', name=u'myfeed', type=u'datafeed', + cnx.create_entity('CWSource', name=u'ô myfeed', type=u'datafeed', parser=u'testparser', url=u'ignored', config=u'synchronization-interval=1min') cnx.commit() @@ -48,21 +48,23 @@ entity.cw_edited.update(sourceparams['item']) with self.temporary_appobjects(AParser): - if 'myfeed' in self.repo.sources_by_uri: - yield self.repo.sources_by_uri['myfeed']._get_parser(session) + if u'ô myfeed' in self.repo.sources_by_uri: + yield self.repo.sources_by_uri[u'ô myfeed']._get_parser(session) else: yield def test(self): - self.assertIn('myfeed', self.repo.sources_by_uri) - dfsource = self.repo.sources_by_uri['myfeed'] + self.assertIn(u'ô myfeed', self.repo.sources_by_uri) + dfsource = self.repo.sources_by_uri[u'ô myfeed'] self.assertNotIn('use_cwuri_as_url', dfsource.__dict__) - self.assertEqual({'type': u'datafeed', 'uri': u'myfeed', 'use-cwuri-as-url': True}, + self.assertEqual({'type': u'datafeed', 'uri': u'ô myfeed', 'use-cwuri-as-url': True}, dfsource.public_config) self.assertEqual(dfsource.use_cwuri_as_url, True) self.assertEqual(dfsource.latest_retrieval, None) self.assertEqual(dfsource.synchro_interval, timedelta(seconds=60)) self.assertFalse(dfsource.fresh()) + # ensure source's logger name has been unormalized + self.assertEqual(dfsource.info.__self__.name, 'cubicweb.sources.o myfeed') with self.repo.internal_cnx() as cnx: with self.base_parser(cnx): @@ -78,17 +80,17 @@ self.assertEqual(entity.title, 'cubicweb.org') self.assertEqual(entity.content, 'the cw web site') self.assertEqual(entity.cwuri, 'http://www.cubicweb.org/') - self.assertEqual(entity.cw_source[0].name, 'myfeed') + self.assertEqual(entity.cw_source[0].name, u'ô myfeed') self.assertEqual(entity.cw_metainformation(), {'type': 'Card', - 'source': {'uri': 'myfeed', 'type': 'datafeed', 'use-cwuri-as-url': True}, - 'extid': 'http://www.cubicweb.org/'} + 'source': {'uri': u'ô myfeed', 'type': 'datafeed', 'use-cwuri-as-url': True}, + 'extid': b'http://www.cubicweb.org/'} ) self.assertEqual(entity.absolute_url(), 'http://www.cubicweb.org/') # test repo cache keys self.assertEqual(self.repo._type_source_cache[entity.eid], - ('Card', 'http://www.cubicweb.org/', 'myfeed')) - self.assertEqual(self.repo._extid_cache['http://www.cubicweb.org/'], + ('Card', b'http://www.cubicweb.org/', u'ô myfeed')) + self.assertEqual(self.repo._extid_cache[b'http://www.cubicweb.org/'], entity.eid) # test repull stats = dfsource.pull_data(cnx, force=True) @@ -101,19 +103,18 @@ self.assertEqual(stats['created'], set()) self.assertEqual(stats['updated'], set((entity.eid,))) self.assertEqual(self.repo._type_source_cache[entity.eid], - ('Card', 'http://www.cubicweb.org/', 'myfeed')) - self.assertEqual(self.repo._extid_cache['http://www.cubicweb.org/'], + ('Card', b'http://www.cubicweb.org/', u'ô myfeed')) + self.assertEqual(self.repo._extid_cache[b'http://www.cubicweb.org/'], entity.eid) self.assertEqual(dfsource.source_cwuris(cnx), - {'http://www.cubicweb.org/': (entity.eid, 'Card')} - ) + {b'http://www.cubicweb.org/': (entity.eid, 'Card')}) self.assertTrue(dfsource.latest_retrieval) self.assertTrue(dfsource.fresh()) # test_rename_source with self.admin_access.repo_cnx() as cnx: - cnx.execute('SET S name "myrenamedfeed" WHERE S is CWSource, S name "myfeed"') + cnx.entity_from_eid(dfsource.eid).cw_set(name=u"myrenamedfeed") cnx.commit() entity = cnx.execute('Card X').get_entity(0, 0) self.assertEqual(entity.cwuri, 'http://www.cubicweb.org/') @@ -121,11 +122,11 @@ self.assertEqual(entity.cw_metainformation(), {'type': 'Card', 'source': {'uri': 'myrenamedfeed', 'type': 'datafeed', 'use-cwuri-as-url': True}, - 'extid': 'http://www.cubicweb.org/'} + 'extid': b'http://www.cubicweb.org/'} ) self.assertEqual(self.repo._type_source_cache[entity.eid], - ('Card', 'http://www.cubicweb.org/', 'myrenamedfeed')) - self.assertEqual(self.repo._extid_cache['http://www.cubicweb.org/'], + ('Card', b'http://www.cubicweb.org/', 'myrenamedfeed')) + self.assertEqual(self.repo._extid_cache[b'http://www.cubicweb.org/'], entity.eid) # test_delete_source @@ -140,7 +141,14 @@ value = parser.retrieve_url('a string') self.assertEqual(200, value.getcode()) self.assertEqual('a string', value.geturl()) - self.assertIsInstance(value.info(), mimetools.Message) + + def test_update_url(self): + dfsource = self.repo.sources_by_uri[u'ô myfeed'] + with self.admin_access.repo_cnx() as cnx: + cnx.entity_from_eid(dfsource.eid).cw_set(url=u"http://pouet.com\nhttp://pouet.org") + self.assertEqual(dfsource.urls, [u'ignored']) + cnx.commit() + self.assertEqual(dfsource.urls, [u"http://pouet.com", u"http://pouet.org"]) class DataFeedConfigTC(CubicWebTC): diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/unittest_ldapsource.py --- a/server/test/unittest_ldapsource.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/test/unittest_ldapsource.py Thu Jun 16 14:19:20 2016 +0200 @@ -15,19 +15,26 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""cubicweb.server.sources.ldapusers unit and functional tests""" +"""cubicweb.server.sources.ldapfeed unit and functional tests + +Those tests expect to have slapd, python-ldap3 and ldapscripts packages installed. +""" +from __future__ import print_function import os import sys import shutil import time -from os.path import join, exists import subprocess import tempfile +import unittest +from os.path import join + +from six import string_types +from six.moves import range from cubicweb import AuthenticationError from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.devtools.repotest import RQLGeneratorTC from cubicweb.devtools.httptest import get_available_port @@ -44,13 +51,14 @@ URL = None + def create_slapd_configuration(cls): global URL slapddir = tempfile.mkdtemp('cw-unittest-ldap') config = cls.config slapdconf = join(config.apphome, "slapd.conf") - confin = file(join(config.apphome, "slapd.conf.in")).read() - confstream = file(slapdconf, 'w') + confin = open(join(config.apphome, "slapd.conf.in")).read() + confstream = open(slapdconf, 'w') confstream.write(confin % {'apphome': config.apphome, 'testdir': slapddir}) confstream.close() # fill ldap server with some data @@ -61,16 +69,16 @@ slapproc = subprocess.Popen(cmdline, stdout=PIPE, stderr=PIPE) stdout, stderr = slapproc.communicate() if slapproc.returncode: - print >> sys.stderr, ('slapadd returned with status: %s' - % slapproc.returncode) + print('slapadd returned with status: %s' + % slapproc.returncode, file=sys.stderr) sys.stdout.write(stdout) sys.stderr.write(stderr) - #ldapuri = 'ldapi://' + join(basedir, "ldapi").replace('/', '%2f') - port = get_available_port(xrange(9000, 9100)) + # ldapuri = 'ldapi://' + join(basedir, "ldapi").replace('/', '%2f') + port = get_available_port(range(9000, 9100)) host = 'localhost:%s' % port ldapuri = 'ldap://%s' % host - cmdline = ["/usr/sbin/slapd", "-f", slapdconf, "-h", ldapuri, "-d", "0"] + cmdline = ["/usr/sbin/slapd", "-f", slapdconf, "-h", ldapuri, "-d", "0"] config.info('Starting slapd:', ' '.join(cmdline)) PIPE = subprocess.PIPE cls.slapd_process = subprocess.Popen(cmdline, stdout=PIPE, stderr=PIPE) @@ -83,6 +91,7 @@ URL = u'ldap://%s' % host return slapddir + def terminate_slapd(cls): config = cls.config if cls.slapd_process and cls.slapd_process.returncode is None: @@ -90,12 +99,12 @@ if hasattr(cls.slapd_process, 'terminate'): cls.slapd_process.terminate() else: - import os, signal + import signal os.kill(cls.slapd_process.pid, signal.SIGTERM) stdout, stderr = cls.slapd_process.communicate() if cls.slapd_process.returncode: - print >> sys.stderr, ('slapd returned with status: %s' - % cls.slapd_process.returncode) + print('slapd returned with status: %s' + % cls.slapd_process.returncode, file=sys.stderr) sys.stdout.write(stdout) sys.stderr.write(stderr) config.info('DONE') @@ -107,6 +116,8 @@ @classmethod def setUpClass(cls): + if not os.path.exists('/usr/sbin/slapd'): + raise unittest.SkipTest('slapd not found') from cubicweb.cwctl import init_cmdline_log_threshold init_cmdline_log_threshold(cls.config, cls.loglevel) cls._tmpdir = create_slapd_configuration(cls) @@ -139,7 +150,7 @@ cnx.execute('DELETE Any E WHERE E cw_source S, S name "ldap"') cnx.execute('SET S config %(conf)s, S url %(url)s ' 'WHERE S is CWSource, S name "ldap"', - {"conf": CONFIG_LDAPFEED, 'url': URL} ) + {"conf": CONFIG_LDAPFEED, 'url': URL}) cnx.commit() with self.repo.internal_cnx() as cnx: self.pull(cnx) @@ -148,32 +159,32 @@ """ add an LDAP entity """ - modcmd = ['dn: %s'%dn, 'changetype: add'] - for key, values in mods.iteritems(): - if isinstance(values, basestring): + modcmd = ['dn: %s' % dn, 'changetype: add'] + for key, values in mods.items(): + if isinstance(values, string_types): values = [values] for value in values: - modcmd.append('%s: %s'%(key, value)) + modcmd.append('%s: %s' % (key, value)) self._ldapmodify(modcmd) def delete_ldap_entry(self, dn): """ delete an LDAP entity """ - modcmd = ['dn: %s'%dn, 'changetype: delete'] + modcmd = ['dn: %s' % dn, 'changetype: delete'] self._ldapmodify(modcmd) def update_ldap_entry(self, dn, mods): """ modify one or more attributes of an LDAP entity """ - modcmd = ['dn: %s'%dn, 'changetype: modify'] - for (kind, key), values in mods.iteritems(): + modcmd = ['dn: %s' % dn, 'changetype: modify'] + for (kind, key), values in mods.items(): modcmd.append('%s: %s' % (kind, key)) - if isinstance(values, basestring): + if isinstance(values, string_types): values = [values] for value in values: - modcmd.append('%s: %s'%(key, value)) + modcmd.append('%s: %s' % (key, value)) modcmd.append('-') self._ldapmodify(modcmd) @@ -183,10 +194,11 @@ 'cn=admin,dc=cubicweb,dc=test', '-w', 'cw'] PIPE = subprocess.PIPE p = subprocess.Popen(updatecmd, stdin=PIPE, stdout=PIPE, stderr=PIPE) - p.stdin.write('\n'.join(modcmd)) + p.stdin.write('\n'.join(modcmd).encode('ascii')) p.stdin.close() if p.wait(): - raise RuntimeError("ldap update failed: %s"%('\n'.join(p.stderr.readlines()))) + raise RuntimeError("ldap update failed: %s" % ('\n'.join(p.stderr.readlines()))) + class CheckWrongGroup(LDAPFeedTestBase): """ @@ -196,18 +208,17 @@ def test_wrong_group(self): with self.admin_access.repo_cnx() as cnx: - source = cnx.execute('CWSource S WHERE S type="ldapfeed"').get_entity(0,0) + source = cnx.execute('CWSource S WHERE S type="ldapfeed"').get_entity(0, 0) config = source.repo_source.check_config(source) # inject a bogus group here, along with at least a valid one - config['user-default-group'] = ('thisgroupdoesnotexists','users') + config['user-default-group'] = ('thisgroupdoesnotexists', 'users') source.repo_source.update_config(source, config) cnx.commit() # here we emitted an error log entry - stats = source.repo_source.pull_data(cnx, force=True, raise_on_error=True) + source.repo_source.pull_data(cnx, force=True, raise_on_error=True) cnx.commit() - class LDAPFeedUserTC(LDAPFeedTestBase): """ A testcase for CWUser support in ldapfeed (basic tests and authentication). @@ -223,6 +234,8 @@ # ensure we won't be logged against self.assertRaises(AuthenticationError, source.authenticate, cnx, 'toto', 'toto') + self.assertRaises(AuthenticationError, + source.authenticate, cnx, 'syt', 'toto') self.assertTrue(source.authenticate(cnx, 'syt', 'syt')) sessionid = self.repo.connect('syt', password='syt') self.assertTrue(sessionid) @@ -284,12 +297,12 @@ # and that the password stored in the system source is not empty or so user = cnx.execute('CWUser U WHERE U login "syt"').get_entity(0, 0) user.cw_clear_all_caches() - pwd = cnx.system_sql("SELECT cw_upassword FROM cw_cwuser WHERE cw_login='syt';").fetchall()[0][0] + cu = cnx.system_sql("SELECT cw_upassword FROM cw_cwuser WHERE cw_login='syt';") + pwd = cu.fetchall()[0][0] self.assertIsNotNone(pwd) self.assertTrue(str(pwd)) - class LDAPFeedUserDeletionTC(LDAPFeedTestBase): """ A testcase for situations where users are deleted from or @@ -299,7 +312,7 @@ def test_a_filter_inactivate(self): """ filtered out people should be deactivated, unable to authenticate """ with self.admin_access.repo_cnx() as cnx: - source = cnx.execute('CWSource S WHERE S type="ldapfeed"').get_entity(0,0) + source = cnx.execute('CWSource S WHERE S type="ldapfeed"').get_entity(0, 0) config = source.repo_source.check_config(source) # filter with adim's phone number config['user-filter'] = u'(%s=%s)' % ('telephoneNumber', '109') @@ -346,21 +359,22 @@ self.pull(cnx) # reinsert syt self.add_ldap_entry('uid=syt,ou=People,dc=cubicweb,dc=test', - { 'objectClass': ['OpenLDAPperson','posixAccount','top','shadowAccount'], - 'cn': 'Sylvain Thenault', - 'sn': 'Thenault', - 'gidNumber': '1004', - 'uid': 'syt', - 'homeDirectory': '/home/syt', - 'shadowFlag': '134538764', - 'uidNumber': '1004', - 'givenName': 'Sylvain', - 'telephoneNumber': '106', - 'displayName': 'sthenault', - 'gecos': 'Sylvain Thenault', - 'mail': ['sylvain.thenault@logilab.fr','syt@logilab.fr'], - 'userPassword': 'syt', - }) + {'objectClass': ['OpenLDAPperson', 'posixAccount', 'top', + 'shadowAccount'], + 'cn': 'Sylvain Thenault', + 'sn': 'Thenault', + 'gidNumber': '1004', + 'uid': 'syt', + 'homeDirectory': '/home/syt', + 'shadowFlag': '134538764', + 'uidNumber': '1004', + 'givenName': 'Sylvain', + 'telephoneNumber': '106', + 'displayName': 'sthenault', + 'gecos': 'Sylvain Thenault', + 'mail': ['sylvain.thenault@logilab.fr', 'syt@logilab.fr'], + 'userPassword': 'syt', + }) with self.repo.internal_cnx() as cnx: self.pull(cnx) with self.admin_access.repo_cnx() as cnx: @@ -433,8 +447,7 @@ try: self.update_ldap_entry('cn=logilab,ou=Group,dc=cubicweb,dc=test', - {('add', 'memberUid'): ['syt']}) - time.sleep(1.1) # timestamps precision is 1s + {('add', 'memberUid'): ['syt']}) with self.repo.internal_cnx() as cnx: self.pull(cnx) @@ -452,7 +465,7 @@ def test_group_member_deleted(self): with self.repo.internal_cnx() as cnx: - self.pull(cnx) # ensure we are sync'ed + self.pull(cnx) # ensure we are sync'ed with self.admin_access.repo_cnx() as cnx: rset = cnx.execute('Any L WHERE U in_group G, G name %(name)s, U login L', {'name': 'logilab'}) @@ -462,21 +475,19 @@ try: self.update_ldap_entry('cn=logilab,ou=Group,dc=cubicweb,dc=test', {('delete', 'memberUid'): ['adim']}) - time.sleep(1.1) # timestamps precision is 1s with self.repo.internal_cnx() as cnx: self.pull(cnx) with self.admin_access.repo_cnx() as cnx: rset = cnx.execute('Any L WHERE U in_group G, G name %(name)s, U login L', {'name': 'logilab'}) - self.assertEqual(len(rset), 0) + self.assertEqual(len(rset), 0, rset.rows) finally: # back to normal ldap setup self.tearDownClass() self.setUpClass() - if __name__ == '__main__': from logilab.common.testlib import unittest_main unittest_main() diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/unittest_migractions.py --- a/server/test/unittest_migractions.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/test/unittest_migractions.py Thu Jun 16 14:19:20 2016 +0200 @@ -22,6 +22,7 @@ from contextlib import contextmanager from logilab.common.testlib import unittest_main, Tags, tag +from logilab.common import tempattr from yams.constraints import UniqueConstraint @@ -54,7 +55,8 @@ class MigrationConfig(cubicweb.devtools.TestServerConfiguration): default_sources = cubicweb.devtools.DEFAULT_PSQL_SOURCES - CUBES_PATH = [osp.join(HERE, 'data-migractions', 'cubes')] + CUBES_PATH = cubicweb.devtools.TestServerConfiguration.CUBES_PATH + [ + osp.join(HERE, 'data-migractions', 'cubes')] class MigrationTC(CubicWebTC): @@ -151,7 +153,7 @@ orderdict2 = dict(mh.rqlexec('Any RTN, O WHERE X name "Note", RDEF from_entity X, ' 'RDEF relation_type RT, RDEF ordernum O, RT name RTN')) whateverorder = migrschema['whatever'].rdef('Note', 'Int').order - for k, v in orderdict.iteritems(): + for k, v in orderdict.items(): if v >= whateverorder: orderdict[k] = v+1 orderdict['whatever'] = whateverorder @@ -274,10 +276,10 @@ 'description', 'description_format', 'eid', 'filed_under2', 'has_text', - 'identity', 'in_basket', 'is', 'is_instance_of', + 'identity', 'in_basket', 'inlined_rel', 'is', 'is_instance_of', 'modification_date', 'name', 'owned_by']) self.assertCountEqual([str(rs) for rs in self.schema['Folder2'].object_relations()], - ['filed_under2', 'identity']) + ['filed_under2', 'identity', 'inlined_rel']) # Old will be missing as it has been renamed into 'New' in the migrated # schema while New hasn't been added here. self.assertEqual(sorted(str(e) for e in self.schema['filed_under2'].subjects()), @@ -287,6 +289,20 @@ for cstr in eschema.rdef('name').constraints: self.assertTrue(hasattr(cstr, 'eid')) + def test_add_cube_with_custom_final_type(self): + with self.mh() as (cnx, mh): + try: + mh.cmd_add_cube('fakecustomtype') + self.assertIn('Numeric', self.schema) + self.assertTrue(self.schema['Numeric'].final) + rdef = self.schema['num'].rdefs[('Location', 'Numeric')] + self.assertEqual(rdef.scale, 10) + self.assertEqual(rdef.precision, 18) + fields = self.table_schema(mh, '%sLocation' % SQL_PREFIX) + self.assertEqual(fields['%snum' % SQL_PREFIX], ('numeric', None)) # XXX + finally: + mh.cmd_drop_cube('fakecustomtype') + def test_add_drop_entity_type(self): with self.mh() as (cnx, mh): mh.cmd_add_entity_type('Folder2') @@ -524,12 +540,12 @@ # remaining orphan rql expr which should be deleted at commit (composite relation) # unattached expressions -> pending deletion on commit self.assertEqual(cnx.execute('Any COUNT(X) WHERE X is RQLExpression, X exprtype "ERQLExpression",' - 'NOT ET1 read_permission X, NOT ET2 add_permission X, ' - 'NOT ET3 delete_permission X, NOT ET4 update_permission X')[0][0], + 'NOT ET1 read_permission X, NOT ET2 add_permission X, ' + 'NOT ET3 delete_permission X, NOT ET4 update_permission X')[0][0], 7) self.assertEqual(cnx.execute('Any COUNT(X) WHERE X is RQLExpression, X exprtype "RRQLExpression",' - 'NOT ET1 read_permission X, NOT ET2 add_permission X, ' - 'NOT ET3 delete_permission X, NOT ET4 update_permission X')[0][0], + 'NOT ET1 read_permission X, NOT ET2 add_permission X, ' + 'NOT ET3 delete_permission X, NOT ET4 update_permission X')[0][0], 2) # finally self.assertEqual(cnx.execute('Any COUNT(X) WHERE X is RQLExpression')[0][0], @@ -579,7 +595,7 @@ def test_add_drop_cube_and_deps(self): with self.mh() as (cnx, mh): schema = self.repo.schema - self.assertEqual(sorted((str(s), str(o)) for s, o in schema['see_also'].rdefs.iterkeys()), + self.assertEqual(sorted((str(s), str(o)) for s, o in schema['see_also'].rdefs), sorted([('EmailThread', 'EmailThread'), ('Folder', 'Folder'), ('Bookmark', 'Bookmark'), ('Bookmark', 'Note'), ('Note', 'Note'), ('Note', 'Bookmark')])) @@ -593,7 +609,7 @@ for ertype in ('Email', 'EmailThread', 'EmailPart', 'File', 'sender', 'in_thread', 'reply_to', 'data_format'): self.assertNotIn(ertype, schema) - self.assertEqual(sorted(schema['see_also'].rdefs.iterkeys()), + self.assertEqual(sorted(schema['see_also'].rdefs), sorted([('Folder', 'Folder'), ('Bookmark', 'Bookmark'), ('Bookmark', 'Note'), @@ -612,12 +628,12 @@ for ertype in ('Email', 'EmailThread', 'EmailPart', 'File', 'sender', 'in_thread', 'reply_to', 'data_format'): self.assertIn(ertype, schema) - self.assertEqual(sorted(schema['see_also'].rdefs.iterkeys()), - sorted([('EmailThread', 'EmailThread'), ('Folder', 'Folder'), - ('Bookmark', 'Bookmark'), - ('Bookmark', 'Note'), - ('Note', 'Note'), - ('Note', 'Bookmark')])) + self.assertEqual(sorted(schema['see_also'].rdefs), + sorted([('EmailThread', 'EmailThread'), ('Folder', 'Folder'), + ('Bookmark', 'Bookmark'), + ('Bookmark', 'Note'), + ('Note', 'Note'), + ('Note', 'Bookmark')])) self.assertEqual(sorted(schema['see_also'].subjects()), ['Bookmark', 'EmailThread', 'Folder', 'Note']) self.assertEqual(sorted(schema['see_also'].objects()), ['Bookmark', 'EmailThread', 'Folder', 'Note']) from cubes.fakeemail.__pkginfo__ import version as email_version @@ -719,6 +735,24 @@ self.assertEqual(tel, 1.0) self.assertIsInstance(tel, float) + def test_drop_required_inlined_relation(self): + with self.mh() as (cnx, mh): + bob = mh.cmd_create_entity('Personne', nom=u'bob') + note = mh.cmd_create_entity('Note', ecrit_par=bob) + mh.commit() + rdef = mh.fs_schema.rschema('ecrit_par').rdefs[('Note', 'Personne')] + with tempattr(rdef, 'cardinality', '1*'): + mh.sync_schema_props_perms('ecrit_par', syncperms=False) + mh.cmd_drop_relation_type('ecrit_par') + self.assertNotIn('%secrit_par' % SQL_PREFIX, + self.table_schema(mh, '%sPersonne' % SQL_PREFIX)) + + def test_drop_inlined_rdef_delete_data(self): + with self.mh() as (cnx, mh): + note = mh.cmd_create_entity('Note', ecrit_par=cnx.user.eid) + mh.commit() + mh.drop_relation_definition('Note', 'ecrit_par', 'CWUser') + self.assertFalse(mh.sqlexec('SELECT * FROM cw_Note WHERE cw_ecrit_par IS NOT NULL')) class MigrationCommandsComputedTC(MigrationTC): """ Unit tests for computed relations and attributes @@ -784,6 +818,20 @@ self.assertEqual(self.schema['whatever'].subjects(), ('Company',)) self.assertFalse(self.table_sql(mh, 'whatever_relation')) + def test_computed_relation_sync_schema_props_perms_security(self): + with self.mh() as (cnx, mh): + rdef = next(iter(self.schema['perm_changes'].rdefs.values())) + self.assertEqual(rdef.permissions, + {'add': (), 'delete': (), + 'read': ('managers', 'users')}) + mh.cmd_sync_schema_props_perms('perm_changes') + self.assertEqual(self.schema['perm_changes'].permissions, + {'read': ('managers',)}) + rdef = next(iter(self.schema['perm_changes'].rdefs.values())) + self.assertEqual(rdef.permissions, + {'add': (), 'delete': (), + 'read': ('managers',)}) + def test_computed_relation_sync_schema_props_perms_on_rdef(self): self.assertIn('whatever', self.schema) with self.mh() as (cnx, mh): @@ -866,8 +914,10 @@ self.assertIn('note100', self.schema) with self.mh() as (cnx, mh): mh.cmd_sync_schema_props_perms('note100') - self.assertEqual(self.schema['note100'].rdefs['Note', 'Int'].formula, - 'Any N*100 WHERE X note N') + rdef = self.schema['note100'].rdefs['Note', 'Int'] + self.assertEqual(rdef.formula_select.as_string(), + 'Any (N * 100) WHERE X note N, X is Note') + self.assertEqual(rdef.formula, 'Any N*100 WHERE X note N') def test_computed_attribute_sync_schema_props_perms_rdef(self): self.setup_add_score() diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/unittest_postgres.py --- a/server/test/unittest_postgres.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/test/unittest_postgres.py Thu Jun 16 14:19:20 2016 +0200 @@ -19,8 +19,11 @@ from datetime import datetime from threading import Thread +from six.moves import range + from logilab.common.testlib import SkipTest +import logilab.database as lgdb from cubicweb import ValidationError from cubicweb.devtools import PostgresApptestConfiguration, startpgcluster, stoppgcluster from cubicweb.devtools.testlib import CubicWebTC @@ -49,13 +52,21 @@ class PostgresFTITC(CubicWebTC): configcls = PostgresTimeoutConfiguration + @classmethod + def setUpClass(cls): + cls.orig_connect_hooks = lgdb.SQL_CONNECT_HOOKS['postgres'][:] + + @classmethod + def tearDownClass(cls): + lgdb.SQL_CONNECT_HOOKS['postgres'] = cls.orig_connect_hooks + def test_eid_range(self): # concurrent allocation of eid ranges source = self.session.repo.sources_by_uri['system'] range1 = [] range2 = [] def allocate_eid_ranges(session, target): - for x in xrange(1, 10): + for x in range(1, 10): eid = source.create_eid(session, count=x) target.extend(range(eid-x, eid)) @@ -116,20 +127,22 @@ 'WHERE X has_text "cubicweb"').rows, [[c1.eid,], [c3.eid,], [c2.eid,]]) - def test_tz_datetime(self): with self.admin_access.repo_cnx() as cnx: - cnx.execute("INSERT Personne X: X nom 'bob', X tzdatenaiss %(date)s", - {'date': datetime(1977, 6, 7, 2, 0, tzinfo=FixedOffset(1))}) + bob = cnx.create_entity('Personne', nom=u'bob', + tzdatenaiss=datetime(1977, 6, 7, 2, 0, tzinfo=FixedOffset(1))) datenaiss = cnx.execute("Any XD WHERE X nom 'bob', X tzdatenaiss XD")[0][0] - self.assertEqual(datenaiss.tzinfo, None) + self.assertIsNotNone(datenaiss.tzinfo) self.assertEqual(datenaiss.utctimetuple()[:5], (1977, 6, 7, 1, 0)) cnx.commit() - cnx.execute("INSERT Personne X: X nom 'boby', X tzdatenaiss %(date)s", - {'date': datetime(1977, 6, 7, 2, 0)}) + cnx.create_entity('Personne', nom=u'boby', + tzdatenaiss=datetime(1977, 6, 7, 2, 0)) datenaiss = cnx.execute("Any XD WHERE X nom 'boby', X tzdatenaiss XD")[0][0] - self.assertEqual(datenaiss.tzinfo, None) + self.assertIsNotNone(datenaiss.tzinfo) self.assertEqual(datenaiss.utctimetuple()[:5], (1977, 6, 7, 2, 0)) + rset = cnx.execute("Any X WHERE X tzdatenaiss %(d)s", + {'d': datetime(1977, 6, 7, 2, 0, tzinfo=FixedOffset(1))}) + self.assertEqual(rset.rows, [[bob.eid]]) def test_constraint_validationerror(self): with self.admin_access.repo_cnx() as cnx: diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/unittest_querier.py --- a/server/test/unittest_querier.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/test/unittest_querier.py Thu Jun 16 14:19:20 2016 +0200 @@ -21,8 +21,12 @@ from datetime import date, datetime, timedelta, tzinfo +import pytz + +from six import PY2, integer_types, binary_type, text_type + from logilab.common.testlib import TestCase, unittest_main -from rql import BadRQLQuery, RQLSyntaxError +from rql import BadRQLQuery from cubicweb import QueryError, Unauthorized, Binary from cubicweb.server.sqlutils import SQL_PREFIX @@ -32,6 +36,7 @@ from cubicweb.devtools.testlib import CubicWebTC from cubicweb.devtools.repotest import tuplify, BaseQuerierTC + class FixedOffset(tzinfo): def __init__(self, hours=0): self.hours = hours @@ -129,8 +134,8 @@ def assertRQLEqual(self, expected, got): from rql import parse - self.assertMultiLineEqual(unicode(parse(expected)), - unicode(parse(got))) + self.assertMultiLineEqual(text_type(parse(expected)), + text_type(parse(got))) def test_preprocess_security(self): s = self.user_groups_session('users') @@ -178,46 +183,46 @@ ' Comment, Division, Email, EmailPart, EmailThread, ExternalUri, File, Folder, ' ' Frozable, Note, Old, Personne, RQLExpression, Societe, State, SubDivision, ' ' SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)') - self.assertListEqual(sorted(solutions), - sorted([{'X': 'BaseTransition', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Bookmark', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Card', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Comment', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Division', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWCache', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWComputedRType', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWConstraint', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWConstraintType', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWEType', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWAttribute', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWGroup', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWRelation', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWPermission', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWProperty', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWRType', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWSource', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWUniqueTogetherConstraint', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWUser', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Email', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'EmailPart', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'EmailThread', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'ExternalUri', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'File', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Folder', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Frozable', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Note', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Old', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Personne', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'RQLExpression', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Societe', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'State', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'SubDivision', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'SubWorkflowExitPoint', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Tag', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Transition', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'TrInfo', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Workflow', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'WorkflowTransition', 'ETN': 'String', 'ET': 'CWEType'}])) + self.assertCountEqual(solutions, + [{'X': 'BaseTransition', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Bookmark', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Card', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Comment', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Division', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWCache', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWComputedRType', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWConstraint', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWConstraintType', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWEType', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWAttribute', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWGroup', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWRelation', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWPermission', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWProperty', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWRType', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWSource', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWUniqueTogetherConstraint', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWUser', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Email', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'EmailPart', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'EmailThread', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'ExternalUri', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'File', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Folder', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Frozable', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Note', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Old', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Personne', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'RQLExpression', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Societe', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'State', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'SubDivision', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'SubWorkflowExitPoint', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Tag', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Transition', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'TrInfo', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'Workflow', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'WorkflowTransition', 'ETN': 'String', 'ET': 'CWEType'}]) rql, solutions = partrqls[2] self.assertEqual(rql, 'Any ETN,X WHERE X is ET, ET name ETN, EXISTS(%(D)s use_email X), ' @@ -263,8 +268,9 @@ self.assertEqual(rset.description[0][0], 'Datetime') rset = self.qexecute('Any %(x)s', {'x': 1}) self.assertEqual(rset.description[0][0], 'Int') - rset = self.qexecute('Any %(x)s', {'x': 1L}) - self.assertEqual(rset.description[0][0], 'Int') + if PY2: + rset = self.qexecute('Any %(x)s', {'x': long(1)}) + self.assertEqual(rset.description[0][0], 'Int') rset = self.qexecute('Any %(x)s', {'x': True}) self.assertEqual(rset.description[0][0], 'Boolean') rset = self.qexecute('Any %(x)s', {'x': 1.0}) @@ -307,10 +313,6 @@ setUpClass = classmethod(setUpClass) tearDownClass = classmethod(tearDownClass) - def test_encoding_pb(self): - self.assertRaises(RQLSyntaxError, self.qexecute, - 'Any X WHERE X is CWRType, X name "öwned_by"') - def test_unknown_eid(self): # should return an empty result set self.assertFalse(self.qexecute('Any X WHERE X eid 99999999')) @@ -318,15 +320,15 @@ def test_typed_eid(self): # should return an empty result set rset = self.qexecute('Any X WHERE X eid %(x)s', {'x': '1'}) - self.assertIsInstance(rset[0][0], (int, long)) + self.assertIsInstance(rset[0][0], integer_types) def test_bytes_storage(self): feid = self.qexecute('INSERT File X: X data_name "foo.pdf", ' 'X data_format "text/plain", X data %(data)s', - {'data': Binary("xxx")})[0][0] + {'data': Binary(b"xxx")})[0][0] fdata = self.qexecute('Any D WHERE X data D, X eid %(x)s', {'x': feid})[0][0] self.assertIsInstance(fdata, Binary) - self.assertEqual(fdata.getvalue(), 'xxx') + self.assertEqual(fdata.getvalue(), b'xxx') # selection queries tests ################################################# @@ -849,8 +851,15 @@ self.assertIsInstance(rset.rows[0][0], datetime) rset = self.qexecute('Tag X WHERE X creation_date TODAY') self.assertEqual(len(rset.rows), 2) - rset = self.qexecute('Any MAX(D) WHERE X is Tag, X creation_date D') + + def test_sqlite_patch(self): + """this test monkey patch done by sqlutils._install_sqlite_querier_patch""" + self.qexecute("INSERT Personne X: X nom 'bidule', X datenaiss NOW, X tzdatenaiss NOW") + rset = self.qexecute('Any MAX(D) WHERE X is Personne, X datenaiss D') self.assertIsInstance(rset[0][0], datetime) + rset = self.qexecute('Any MAX(D) WHERE X is Personne, X tzdatenaiss D') + self.assertIsInstance(rset[0][0], datetime) + self.assertEqual(rset[0][0].tzinfo, pytz.utc) def test_today(self): self.qexecute("INSERT Tag X: X name 'bidule', X creation_date TODAY") @@ -886,18 +895,18 @@ def test_select_constant(self): rset = self.qexecute('Any X, "toto" ORDERBY X WHERE X is CWGroup') self.assertEqual(rset.rows, - map(list, zip((2,3,4,5), ('toto','toto','toto','toto',)))) - self.assertIsInstance(rset[0][1], unicode) + [list(x) for x in zip((2,3,4,5), ('toto','toto','toto','toto',))]) + self.assertIsInstance(rset[0][1], text_type) self.assertEqual(rset.description, - zip(('CWGroup', 'CWGroup', 'CWGroup', 'CWGroup'), - ('String', 'String', 'String', 'String',))) + list(zip(('CWGroup', 'CWGroup', 'CWGroup', 'CWGroup'), + ('String', 'String', 'String', 'String',)))) rset = self.qexecute('Any X, %(value)s ORDERBY X WHERE X is CWGroup', {'value': 'toto'}) self.assertEqual(rset.rows, - map(list, zip((2,3,4,5), ('toto','toto','toto','toto',)))) - self.assertIsInstance(rset[0][1], unicode) + list(map(list, zip((2,3,4,5), ('toto','toto','toto','toto',))))) + self.assertIsInstance(rset[0][1], text_type) self.assertEqual(rset.description, - zip(('CWGroup', 'CWGroup', 'CWGroup', 'CWGroup'), - ('String', 'String', 'String', 'String',))) + list(zip(('CWGroup', 'CWGroup', 'CWGroup', 'CWGroup'), + ('String', 'String', 'String', 'String',)))) rset = self.qexecute('Any X,GN WHERE X is CWUser, G is CWGroup, X login "syt", ' 'X in_group G, G name GN') @@ -1015,7 +1024,7 @@ self.assertEqual(len(rset.rows), 1) self.assertEqual(rset.description, [('Personne',)]) rset = self.qexecute('Personne X WHERE X nom "bidule"') - self.assert_(rset.rows) + self.assertTrue(rset.rows) self.assertEqual(rset.description, [('Personne',)]) def test_insert_1_multiple(self): @@ -1029,20 +1038,20 @@ rset = self.qexecute("INSERT Personne X, Personne Y: X nom 'bidule', Y nom 'tutu'") self.assertEqual(rset.description, [('Personne', 'Personne')]) rset = self.qexecute('Personne X WHERE X nom "bidule" or X nom "tutu"') - self.assert_(rset.rows) + self.assertTrue(rset.rows) self.assertEqual(rset.description, [('Personne',), ('Personne',)]) def test_insert_3(self): self.qexecute("INSERT Personne X: X nom Y WHERE U login 'admin', U login Y") rset = self.qexecute('Personne X WHERE X nom "admin"') - self.assert_(rset.rows) + self.assertTrue(rset.rows) self.assertEqual(rset.description, [('Personne',)]) def test_insert_4(self): self.qexecute("INSERT Societe Y: Y nom 'toto'") self.qexecute("INSERT Personne X: X nom 'bidule', X travaille Y WHERE Y nom 'toto'") rset = self.qexecute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y') - self.assert_(rset.rows) + self.assertTrue(rset.rows) self.assertEqual(rset.description, [('Personne', 'Societe',)]) def test_insert_4bis(self): @@ -1057,17 +1066,17 @@ def test_insert_4ter(self): peid = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0] seid = self.qexecute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X eid %(x)s", - {'x': unicode(peid)})[0][0] + {'x': text_type(peid)})[0][0] self.assertEqual(len(self.qexecute('Any X, Y WHERE X travaille Y')), 1) self.qexecute("INSERT Personne X: X nom 'chouette', X travaille Y WHERE Y eid %(x)s", - {'x': unicode(seid)}) + {'x': text_type(seid)}) self.assertEqual(len(self.qexecute('Any X, Y WHERE X travaille Y')), 2) def test_insert_5(self): self.qexecute("INSERT Personne X: X nom 'bidule'") self.qexecute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X nom 'bidule'") rset = self.qexecute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y') - self.assert_(rset.rows) + self.assertTrue(rset.rows) self.assertEqual(rset.description, [('Personne', 'Societe',)]) def test_insert_5bis(self): @@ -1075,20 +1084,20 @@ self.qexecute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X eid %(x)s", {'x': peid}) rset = self.qexecute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y') - self.assert_(rset.rows) + self.assertTrue(rset.rows) self.assertEqual(rset.description, [('Personne', 'Societe',)]) def test_insert_6(self): self.qexecute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto', X travaille Y") rset = self.qexecute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y') - self.assert_(rset.rows) + self.assertTrue(rset.rows) self.assertEqual(rset.description, [('Personne', 'Societe',)]) def test_insert_7(self): self.qexecute("INSERT Personne X, Societe Y: X nom N, Y nom 'toto', " "X travaille Y WHERE U login 'admin', U login N") rset = self.qexecute('Any X, Y WHERE X nom "admin", Y nom "toto", X travaille Y') - self.assert_(rset.rows) + self.assertTrue(rset.rows) self.assertEqual(rset.description, [('Personne', 'Societe',)]) def test_insert_7_2(self): @@ -1103,7 +1112,7 @@ self.qexecute("INSERT Societe Y, Personne X: Y nom N, X nom 'toto', X travaille Y " "WHERE U login 'admin', U login N") rset = self.qexecute('Any X, Y WHERE X nom "toto", Y nom "admin", X travaille Y') - self.assert_(rset.rows) + self.assertTrue(rset.rows) self.assertEqual(rset.description, [('Personne', 'Societe',)]) def test_insert_9(self): @@ -1267,7 +1276,7 @@ rset = self.qexecute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto'") eid1, eid2 = rset[0][0], rset[0][1] self.qexecute("SET X travaille Y WHERE X eid %(x)s, Y eid %(y)s", - {'x': unicode(eid1), 'y': unicode(eid2)}) + {'x': text_type(eid1), 'y': text_type(eid2)}) rset = self.qexecute('Any X, Y WHERE X travaille Y') self.assertEqual(len(rset.rows), 1) @@ -1317,7 +1326,7 @@ eid1, eid2 = rset[0][0], rset[0][1] rset = self.qexecute("SET X travaille Y WHERE X eid %(x)s, Y eid %(y)s, " "NOT EXISTS(Z ecrit_par X)", - {'x': unicode(eid1), 'y': unicode(eid2)}) + {'x': text_type(eid1), 'y': text_type(eid2)}) self.assertEqual(tuplify(rset.rows), [(eid1, eid2)]) def test_update_query_error(self): @@ -1364,7 +1373,7 @@ cursor = cnx.cnxset.cu cursor.execute("SELECT %supassword from %sCWUser WHERE %slogin='bob'" % (SQL_PREFIX, SQL_PREFIX, SQL_PREFIX)) - passwd = str(cursor.fetchone()[0]) + passwd = binary_type(cursor.fetchone()[0]) self.assertEqual(passwd, crypt_password('toto', passwd)) rset = self.qexecute("Any X WHERE X is CWUser, X login 'bob', X upassword %(pwd)s", {'pwd': Binary(passwd)}) @@ -1377,11 +1386,11 @@ {'pwd': 'toto'}) self.assertEqual(rset.description[0][0], 'CWUser') rset = cnx.execute("SET X upassword %(pwd)s WHERE X is CWUser, X login 'bob'", - {'pwd': 'tutu'}) + {'pwd': b'tutu'}) cursor = cnx.cnxset.cu cursor.execute("SELECT %supassword from %sCWUser WHERE %slogin='bob'" % (SQL_PREFIX, SQL_PREFIX, SQL_PREFIX)) - passwd = str(cursor.fetchone()[0]) + passwd = binary_type(cursor.fetchone()[0]) self.assertEqual(passwd, crypt_password('tutu', passwd)) rset = cnx.execute("Any X WHERE X is CWUser, X login 'bob', X upassword %(pwd)s", {'pwd': Binary(passwd)}) @@ -1394,7 +1403,7 @@ self.qexecute("INSERT Personne X: X nom 'bob', X tzdatenaiss %(date)s", {'date': datetime(1977, 6, 7, 2, 0, tzinfo=FixedOffset(1))}) datenaiss = self.qexecute("Any XD WHERE X nom 'bob', X tzdatenaiss XD")[0][0] - self.assertEqual(datenaiss.tzinfo, None) + self.assertIsNotNone(datenaiss.tzinfo) self.assertEqual(datenaiss.utctimetuple()[:5], (1977, 6, 7, 1, 0)) def test_tz_datetime_cache_nonregr(self): diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/unittest_repository.py --- a/server/test/unittest_repository.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/test/unittest_repository.py Thu Jun 16 14:19:20 2016 +0200 @@ -22,6 +22,8 @@ import time import logging +from six.moves import range + from yams.constraints import UniqueConstraint from yams import register_base_type, unregister_base_type @@ -77,7 +79,7 @@ def test_connect(self): cnxid = self.repo.connect(self.admlogin, password=self.admpassword) - self.assert_(cnxid) + self.assertTrue(cnxid) self.repo.close(cnxid) self.assertRaises(AuthenticationError, self.repo.connect, self.admlogin, password='nimportnawak') @@ -100,7 +102,7 @@ cnx.commit() repo = self.repo cnxid = repo.connect(u"barnabé", password=u"héhéhé".encode('UTF8')) - self.assert_(cnxid) + self.assertTrue(cnxid) repo.close(cnxid) def test_rollback_on_execute_validation_error(self): @@ -145,15 +147,9 @@ def test_close(self): repo = self.repo cnxid = repo.connect(self.admlogin, password=self.admpassword) - self.assert_(cnxid) + self.assertTrue(cnxid) repo.close(cnxid) - def test_check_session(self): - repo = self.repo - cnxid = repo.connect(self.admlogin, password=self.admpassword) - self.assertIsInstance(repo.check_session(cnxid), float) - repo.close(cnxid) - self.assertRaises(BadConnectionId, repo.check_session, cnxid) def test_initial_schema(self): schema = self.repo.schema @@ -192,7 +188,7 @@ constraints = schema.rschema('relation_type').rdef('CWAttribute', 'CWRType').constraints self.assertEqual(len(constraints), 1) cstr = constraints[0] - self.assert_(isinstance(cstr, RQLConstraint)) + self.assertIsInstance(cstr, RQLConstraint) self.assertEqual(cstr.expression, 'O final TRUE') ownedby = schema.rschema('owned_by') @@ -337,6 +333,15 @@ self.assertEqual(cm.exception.errors, {'hip': 'hop'}) cnx.rollback() + def test_attribute_cache(self): + with self.admin_access.repo_cnx() as cnx: + bk = cnx.create_entity('Bookmark', title=u'index', path=u'/') + cnx.commit() + self.assertEqual(bk.title, 'index') + bk.cw_set(title=u'root') + self.assertEqual(bk.title, 'root') + cnx.commit() + self.assertEqual(bk.title, 'root') class SchemaDeserialTC(CubicWebTC): @@ -589,11 +594,11 @@ with self.admin_access.repo_cnx() as cnx: personnes = [] t0 = time.time() - for i in xrange(2000): + for i in range(2000): p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') personnes.append(p) abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M') - for j in xrange(0, 2000, 100): + for j in range(0, 2000, 100): abraham.cw_set(personne_composite=personnes[j:j+100]) t1 = time.time() self.info('creation: %.2gs', (t1 - t0)) @@ -610,7 +615,7 @@ def test_add_relation_non_inlined(self): with self.admin_access.repo_cnx() as cnx: personnes = [] - for i in xrange(2000): + for i in range(2000): p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') personnes.append(p) cnx.commit() @@ -619,7 +624,7 @@ personne_composite=personnes[:100]) t1 = time.time() self.info('creation: %.2gs', (t1 - t0)) - for j in xrange(100, 2000, 100): + for j in range(100, 2000, 100): abraham.cw_set(personne_composite=personnes[j:j+100]) t2 = time.time() self.info('more relations: %.2gs', (t2-t1)) @@ -630,7 +635,7 @@ def test_add_relation_inlined(self): with self.admin_access.repo_cnx() as cnx: personnes = [] - for i in xrange(2000): + for i in range(2000): p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') personnes.append(p) cnx.commit() @@ -639,7 +644,7 @@ personne_inlined=personnes[:100]) t1 = time.time() self.info('creation: %.2gs', (t1 - t0)) - for j in xrange(100, 2000, 100): + for j in range(100, 2000, 100): abraham.cw_set(personne_inlined=personnes[j:j+100]) t2 = time.time() self.info('more relations: %.2gs', (t2-t1)) @@ -652,7 +657,7 @@ """ to be compared with test_session_add_relations""" with self.admin_access.repo_cnx() as cnx: personnes = [] - for i in xrange(2000): + for i in range(2000): p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') personnes.append(p) abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M') @@ -669,7 +674,7 @@ """ to be compared with test_session_add_relation""" with self.admin_access.repo_cnx() as cnx: personnes = [] - for i in xrange(2000): + for i in range(2000): p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') personnes.append(p) abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M') @@ -686,7 +691,7 @@ """ to be compared with test_session_add_relations""" with self.admin_access.repo_cnx() as cnx: personnes = [] - for i in xrange(2000): + for i in range(2000): p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') personnes.append(p) abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M') @@ -703,7 +708,7 @@ """ to be compared with test_session_add_relation""" with self.admin_access.repo_cnx() as cnx: personnes = [] - for i in xrange(2000): + for i in range(2000): p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M') personnes.append(p) abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M') diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/unittest_rql2sql.py --- a/server/test/unittest_rql2sql.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/test/unittest_rql2sql.py Thu Jun 16 14:19:20 2016 +0200 @@ -16,6 +16,7 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . """unit tests for module cubicweb.server.sources.rql2sql""" +from __future__ import print_function import sys import os @@ -1026,10 +1027,10 @@ FROM cw_Societe AS _S, travaille_relation AS rel_travaille0 WHERE rel_travaille0.eid_to=_S.cw_eid AND _S.cw_tel=_S.cw_fax'''), - ("Personne P where X eid 0, X creation_date D, P datenaiss < D, X is Affaire", + ("Personne P where X eid 0, X creation_date D, P tzdatenaiss < D, X is Affaire", '''SELECT _P.cw_eid FROM cw_Affaire AS _X, cw_Personne AS _P -WHERE _X.cw_eid=0 AND _P.cw_datenaiss<_X.cw_creation_date'''), +WHERE _X.cw_eid=0 AND _P.cw_tzdatenaiss<_X.cw_creation_date'''), ("Any N,T WHERE N is Note, N type T;", '''SELECT _N.cw_eid, _N.cw_type @@ -1246,13 +1247,13 @@ except Exception as ex: if 'r' in locals(): try: - print (r%args).strip() + print((r%args).strip()) except KeyError: - print 'strange, missing substitution' - print r, nargs - print '!=' - print sql.strip() - print 'RQL:', rql + print('strange, missing substitution') + print(r, nargs) + print('!=') + print(sql.strip()) + print('RQL:', rql) raise def _parse(self, rqls): @@ -1269,11 +1270,11 @@ r, args, cbs = self.o.generate(rqlst, args) self.assertEqual((r.strip(), args), sql) except Exception as ex: - print rql + print(rql) if 'r' in locals(): - print r.strip() - print '!=' - print sql[0].strip() + print(r.strip()) + print('!=') + print(sql[0].strip()) raise return diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/unittest_rqlannotation.py --- a/server/test/unittest_rqlannotation.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/test/unittest_rqlannotation.py Thu Jun 16 14:19:20 2016 +0200 @@ -64,7 +64,7 @@ rqlst = self._prepare(cnx, 'Any A,B,C WHERE A eid 12,A comment B, ' 'A ?wf_info_for C') self.assertEqual(rqlst.defined_vars['A']._q_invariant, False) - self.assert_(rqlst.defined_vars['B'].stinfo['attrvar']) + self.assertTrue(rqlst.defined_vars['B'].stinfo['attrvar']) self.assertEqual(rqlst.defined_vars['C']._q_invariant, False) self.assertEqual(rqlst.solutions, [{'A': 'TrInfo', 'B': 'String', 'C': 'Affaire'}, {'A': 'TrInfo', 'B': 'String', 'C': 'CWUser'}, @@ -87,7 +87,7 @@ 'Y nom NX, X eid XE, not Y eid XE') self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) - self.assert_(rqlst.defined_vars['XE'].stinfo['attrvar']) + self.assertTrue(rqlst.defined_vars['XE'].stinfo['attrvar']) def test_0_8(self): with self.session.new_cnx() as cnx: diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/unittest_schemaserial.py --- a/server/test/unittest_schemaserial.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/test/unittest_schemaserial.py Thu Jun 16 14:19:20 2016 +0200 @@ -292,7 +292,7 @@ {'cardinality': u'?1', 'defaultval': None, 'description': u'', - 'extra_props': '{"jungle_speed": 42}', + 'extra_props': b'{"jungle_speed": 42}', 'formula': None, 'indexed': False, 'oe': None, diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/unittest_security.py --- a/server/test/unittest_security.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/test/unittest_security.py Thu Jun 16 14:19:20 2016 +0200 @@ -17,6 +17,8 @@ # with CubicWeb. If not, see . """functional tests for server'security""" +from six.moves import range + from logilab.common.testlib import unittest_main from cubicweb.devtools.testlib import CubicWebTC @@ -33,7 +35,7 @@ with self.admin_access.client_cnx() as cnx: self.create_user(cnx, u'iaminusersgrouponly') hash = _CRYPTO_CTX.encrypt('oldpassword', scheme='des_crypt') - self.create_user(cnx, u'oldpassword', password=Binary(hash)) + self.create_user(cnx, u'oldpassword', password=Binary(hash.encode('ascii'))) class LowLevelSecurityFunctionTC(BaseSecurityTC): @@ -79,17 +81,20 @@ it will be updated on next login """ with self.repo.internal_cnx() as cnx: - oldhash = str(cnx.system_sql("SELECT cw_upassword FROM cw_CWUser " - "WHERE cw_login = 'oldpassword'").fetchone()[0]) + oldhash = cnx.system_sql("SELECT cw_upassword FROM cw_CWUser " + "WHERE cw_login = 'oldpassword'").fetchone()[0] + oldhash = self.repo.system_source.binary_to_str(oldhash) self.repo.close(self.repo.connect('oldpassword', password='oldpassword')) - newhash = str(cnx.system_sql("SELECT cw_upassword FROM cw_CWUser " - "WHERE cw_login = 'oldpassword'").fetchone()[0]) + newhash = cnx.system_sql("SELECT cw_upassword FROM cw_CWUser " + "WHERE cw_login = 'oldpassword'").fetchone()[0] + newhash = self.repo.system_source.binary_to_str(newhash) self.assertNotEqual(oldhash, newhash) - self.assertTrue(newhash.startswith('$6$')) + self.assertTrue(newhash.startswith(b'$6$')) self.repo.close(self.repo.connect('oldpassword', password='oldpassword')) - self.assertEqual(newhash, - str(cnx.system_sql("SELECT cw_upassword FROM cw_CWUser WHERE " - "cw_login = 'oldpassword'").fetchone()[0])) + newnewhash = cnx.system_sql("SELECT cw_upassword FROM cw_CWUser WHERE " + "cw_login = 'oldpassword'").fetchone()[0] + newnewhash = self.repo.system_source.binary_to_str(newnewhash) + self.assertEqual(newhash, newnewhash) class SecurityRewritingTC(BaseSecurityTC): @@ -293,7 +298,7 @@ ueid = self.create_user(cnx, u'user').eid with self.new_access(u'user').repo_cnx() as cnx: cnx.execute('SET X upassword %(passwd)s WHERE X eid %(x)s', - {'x': ueid, 'passwd': 'newpwd'}) + {'x': ueid, 'passwd': b'newpwd'}) cnx.commit() self.repo.close(self.repo.connect('user', password='newpwd')) @@ -302,7 +307,7 @@ ueid = self.create_user(cnx, u'otheruser').eid with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: cnx.execute('SET X upassword %(passwd)s WHERE X eid %(x)s', - {'x': ueid, 'passwd': 'newpwd'}) + {'x': ueid, 'passwd': b'newpwd'}) self.assertRaises(Unauthorized, cnx.commit) # read security test @@ -559,7 +564,7 @@ rset = cnx.execute('CWUser X') self.assertEqual([[anon.eid]], rset.rows) # anonymous user can read groups (necessary to check allowed transitions for instance) - self.assert_(cnx.execute('CWGroup X')) + self.assertTrue(cnx.execute('CWGroup X')) # should only be able to read the anonymous user, not another one self.assertRaises(Unauthorized, cnx.execute, 'CWUser X WHERE X eid %(x)s', {'x': admineid}) @@ -666,7 +671,7 @@ rset = cnx.execute('Any X, U WHERE X is EmailAddress, U use_email X') msg = ['Preexisting email readable by anon found!'] tmpl = ' - "%s" used by user "%s"' - for i in xrange(len(rset)): + for i in range(len(rset)): email, user = rset.get_entity(i, 0), rset.get_entity(i, 1) msg.append(tmpl % (email.dc_title(), user.dc_title())) raise RuntimeError('\n'.join(msg)) diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/unittest_serverctl.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/server/test/unittest_serverctl.py Thu Jun 16 14:19:20 2016 +0200 @@ -0,0 +1,26 @@ +import os.path as osp +import shutil + +from cubicweb.devtools import testlib, ApptestConfiguration +from cubicweb.server.serverctl import _local_dump, DBDumpCommand +from cubicweb.server.serverconfig import ServerConfiguration + +class ServerCTLTC(testlib.CubicWebTC): + def setUp(self): + super(ServerCTLTC, self).setUp() + self.orig_config_for = ServerConfiguration.config_for + config_for = lambda appid: ApptestConfiguration(appid, apphome=self.datadir) + ServerConfiguration.config_for = staticmethod(config_for) + + def tearDown(self): + ServerConfiguration.config_for = self.orig_config_for + super(ServerCTLTC, self).tearDown() + + def test_dump(self): + DBDumpCommand(None).run([self.appid]) + shutil.rmtree(osp.join(self.config.apphome, 'backup')) + + +if __name__ == '__main__': + from unittest import main + main() diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/unittest_storage.py --- a/server/test/unittest_storage.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/test/unittest_storage.py Thu Jun 16 14:19:20 2016 +0200 @@ -17,12 +17,15 @@ # with CubicWeb. If not, see . """unit tests for module cubicweb.server.sources.storages""" +from six import PY2 + from logilab.common.testlib import unittest_main, tag, Tags from cubicweb.devtools.testlib import CubicWebTC from glob import glob import os import os.path as osp +import sys import shutil import tempfile @@ -57,24 +60,26 @@ def setup_database(self): self.tempdir = tempfile.mkdtemp() bfs_storage = storages.BytesFileSystemStorage(self.tempdir) + self.bfs_storage = bfs_storage storages.set_attribute_storage(self.repo, 'File', 'data', bfs_storage) storages.set_attribute_storage(self.repo, 'BFSSTestable', 'opt_attr', bfs_storage) def tearDown(self): super(StorageTC, self).tearDown() storages.unset_attribute_storage(self.repo, 'File', 'data') + del self.bfs_storage shutil.rmtree(self.tempdir) - def create_file(self, cnx, content='the-data'): + def create_file(self, cnx, content=b'the-data'): return cnx.create_entity('File', data=Binary(content), data_format=u'text/plain', data_name=u'foo.pdf') def fspath(self, cnx, entity): fspath = cnx.execute('Any fspath(D) WHERE F eid %(f)s, F data D', - {'f': entity.eid})[0][0] - return fspath.getvalue() + {'f': entity.eid})[0][0].getvalue() + return fspath if PY2 else fspath.decode('utf-8') def test_bfss_wrong_fspath_usage(self): with self.admin_access.repo_cnx() as cnx: @@ -109,7 +114,7 @@ # add f1 back to the entity cache with req as _cw f1 = req.entity_from_eid(f1.eid) - f1.cw_set(data=Binary('the new data')) + f1.cw_set(data=Binary(b'the new data')) cnx.rollback() self.assertEqual(open(expected_filepath).read(), 'the-data') f1.cw_delete() @@ -132,7 +137,7 @@ with self.admin_access.repo_cnx() as cnx: cnx.transaction_data['fs_importing'] = True filepath = osp.abspath(__file__) - f1 = cnx.create_entity('File', data=Binary(filepath), + f1 = cnx.create_entity('File', data=Binary(filepath.encode(sys.getfilesystemencoding())), data_format=u'text/plain', data_name=u'foo') self.assertEqual(self.fspath(cnx, f1), filepath) @@ -185,8 +190,8 @@ self.assertEqual(len(rset), 2) self.assertEqual(rset[0][0], f1.eid) self.assertEqual(rset[1][0], f1.eid) - self.assertEqual(rset[0][1].getvalue(), 'the-data') - self.assertEqual(rset[1][1].getvalue(), 'the-data') + self.assertEqual(rset[0][1].getvalue(), b'the-data') + self.assertEqual(rset[1][1].getvalue(), b'the-data') rset = cnx.execute('Any X,LENGTH(D) WHERE X eid %(x)s, X data D', {'x': f1.eid}) self.assertEqual(len(rset), 1) @@ -212,31 +217,31 @@ with self.admin_access.repo_cnx() as cnx: cnx.transaction_data['fs_importing'] = True filepath = osp.abspath(__file__) - f1 = cnx.create_entity('File', data=Binary(filepath), + f1 = cnx.create_entity('File', data=Binary(filepath.encode(sys.getfilesystemencoding())), data_format=u'text/plain', data_name=u'foo') cw_value = f1.data.getvalue() - fs_value = file(filepath).read() + fs_value = open(filepath, 'rb').read() if cw_value != fs_value: self.fail('cw value %r is different from file content' % cw_value) @tag('update') def test_bfss_update_with_existing_data(self): with self.admin_access.repo_cnx() as cnx: - f1 = cnx.create_entity('File', data=Binary('some data'), + f1 = cnx.create_entity('File', data=Binary(b'some data'), data_format=u'text/plain', data_name=u'foo') # NOTE: do not use cw_set() which would automatically # update f1's local dict. We want the pure rql version to work cnx.execute('SET F data %(d)s WHERE F eid %(f)s', - {'d': Binary('some other data'), 'f': f1.eid}) - self.assertEqual(f1.data.getvalue(), 'some other data') + {'d': Binary(b'some other data'), 'f': f1.eid}) + self.assertEqual(f1.data.getvalue(), b'some other data') cnx.commit() f2 = cnx.execute('Any F WHERE F eid %(f)s, F is File', {'f': f1.eid}).get_entity(0, 0) - self.assertEqual(f2.data.getvalue(), 'some other data') + self.assertEqual(f2.data.getvalue(), b'some other data') @tag('update', 'extension', 'commit') def test_bfss_update_with_different_extension_commited(self): with self.admin_access.repo_cnx() as cnx: - f1 = cnx.create_entity('File', data=Binary('some data'), + f1 = cnx.create_entity('File', data=Binary(b'some data'), data_format=u'text/plain', data_name=u'foo.txt') # NOTE: do not use cw_set() which would automatically # update f1's local dict. We want the pure rql version to work @@ -246,7 +251,7 @@ self.assertEqual(osp.splitext(old_path)[1], '.txt') cnx.execute('SET F data %(d)s, F data_name %(dn)s, ' 'F data_format %(df)s WHERE F eid %(f)s', - {'d': Binary('some other data'), 'f': f1.eid, + {'d': Binary(b'some other data'), 'f': f1.eid, 'dn': u'bar.jpg', 'df': u'image/jpeg'}) cnx.commit() # the new file exists with correct extension @@ -260,7 +265,7 @@ @tag('update', 'extension', 'rollback') def test_bfss_update_with_different_extension_rolled_back(self): with self.admin_access.repo_cnx() as cnx: - f1 = cnx.create_entity('File', data=Binary('some data'), + f1 = cnx.create_entity('File', data=Binary(b'some data'), data_format=u'text/plain', data_name=u'foo.txt') # NOTE: do not use cw_set() which would automatically # update f1's local dict. We want the pure rql version to work @@ -271,7 +276,7 @@ self.assertEqual(osp.splitext(old_path)[1], '.txt') cnx.execute('SET F data %(d)s, F data_name %(dn)s, ' 'F data_format %(df)s WHERE F eid %(f)s', - {'d': Binary('some other data'), + {'d': Binary(b'some other data'), 'f': f1.eid, 'dn': u'bar.jpg', 'df': u'image/jpeg'}) @@ -290,7 +295,7 @@ @tag('update', 'NULL') def test_bfss_update_to_None(self): with self.admin_access.repo_cnx() as cnx: - f = cnx.create_entity('Affaire', opt_attr=Binary('toto')) + f = cnx.create_entity('Affaire', opt_attr=Binary(b'toto')) cnx.commit() f.cw_set(opt_attr=None) cnx.commit() @@ -298,17 +303,17 @@ @tag('fs_importing', 'update') def test_bfss_update_with_fs_importing(self): with self.admin_access.repo_cnx() as cnx: - f1 = cnx.create_entity('File', data=Binary('some data'), + f1 = cnx.create_entity('File', data=Binary(b'some data'), data_format=u'text/plain', data_name=u'foo') old_fspath = self.fspath(cnx, f1) cnx.transaction_data['fs_importing'] = True new_fspath = osp.join(self.tempdir, 'newfile.txt') - file(new_fspath, 'w').write('the new data') + open(new_fspath, 'w').write('the new data') cnx.execute('SET F data %(d)s WHERE F eid %(f)s', - {'d': Binary(new_fspath), 'f': f1.eid}) + {'d': Binary(new_fspath.encode(sys.getfilesystemencoding())), 'f': f1.eid}) cnx.commit() - self.assertEqual(f1.data.getvalue(), 'the new data') + self.assertEqual(f1.data.getvalue(), b'the new data') self.assertEqual(self.fspath(cnx, f1), new_fspath) self.assertFalse(osp.isfile(old_fspath)) diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/unittest_undo.py --- a/server/test/unittest_undo.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/test/unittest_undo.py Thu Jun 16 14:19:20 2016 +0200 @@ -17,6 +17,8 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . +from six import text_type + from cubicweb import ValidationError from cubicweb.devtools.testlib import CubicWebTC import cubicweb.server.session @@ -255,7 +257,7 @@ "%s doesn't exist anymore." % g.eid]) with self.assertRaises(ValidationError) as cm: cnx.commit() - cm.exception.translate(unicode) + cm.exception.translate(text_type) self.assertEqual(cm.exception.entity, self.totoeid) self.assertEqual(cm.exception.errors, {'in_group-subject': u'at least one relation in_group is ' @@ -461,7 +463,7 @@ # problem occurs in string manipulation for python < 2.6 def test___unicode__method(self): u = _UndoException(u"voilà") - self.assertIsInstance(unicode(u), unicode) + self.assertIsInstance(text_type(u), text_type) if __name__ == '__main__': diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/test/unittest_utils.py --- a/server/test/unittest_utils.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/test/unittest_utils.py Thu Jun 16 14:19:20 2016 +0200 @@ -26,13 +26,13 @@ def test_crypt(self): for hash in ( utils.crypt_password('xxx'), # default sha512 - 'ab$5UsKFxRKKN.d8iBIFBnQ80', # custom md5 - 'ab4Vlm81ZUHlg', # DES + b'ab$5UsKFxRKKN.d8iBIFBnQ80', # custom md5 + b'ab4Vlm81ZUHlg', # DES ): self.assertEqual(utils.crypt_password('xxx', hash), hash) self.assertEqual(utils.crypt_password(u'xxx', hash), hash) - self.assertEqual(utils.crypt_password(u'xxx', unicode(hash)), hash) - self.assertEqual(utils.crypt_password('yyy', hash), '') + self.assertEqual(utils.crypt_password(u'xxx', hash.decode('ascii')), hash.decode('ascii')) + self.assertEqual(utils.crypt_password('yyy', hash), b'') # accept any password for empty hashes (is it a good idea?) self.assertEqual(utils.crypt_password('xxx', ''), '') diff -r a4fcee1e9789 -r 19fcce6dc6d1 server/utils.py --- a/server/utils.py Thu Mar 24 09:43:25 2016 +0100 +++ b/server/utils.py Thu Jun 16 14:19:20 2016 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -16,6 +16,7 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . """Some utilities for the CubicWeb server.""" +from __future__ import print_function __docformat__ = "restructuredtext en" @@ -24,9 +25,14 @@ from threading import Timer, Thread from getpass import getpass +from six import PY2, text_type +from six.moves import input + from passlib.utils import handlers as uh, to_hash_str from passlib.context import CryptContext +from logilab.common.deprecation import deprecated + from cubicweb.md5crypt import crypt as md5crypt @@ -60,7 +66,7 @@ """return the encrypted password using the given salt or a generated one """ if salt is None: - return _CRYPTO_CTX.encrypt(passwd) + return _CRYPTO_CTX.encrypt(passwd).encode('ascii') # empty hash, accept any password for backwards compat if salt == '': return salt @@ -70,18 +76,16 @@ except ValueError: # e.g. couldn't identify hash pass # wrong password - return '' + return b'' - +@deprecated('[3.22] no more necessary, directly get eschema.eid') def eschema_eid(cnx, eschema): - """get eid of the CWEType entity for the given yams type. You should use - this because when schema has been loaded from the file-system, not from the - database, (e.g. during tests), eschema.eid is not set. + """get eid of the CWEType entity for the given yams type. + + This used to be necessary because when the schema has been loaded from the + file-system, not from the database, (e.g. during tests), eschema.eid was + not set. """ - if eschema.eid is None: - eschema.eid = cnx.execute( - 'Any X WHERE X is CWEType, X name %(name)s', - {'name': unicode(eschema)})[0][0] return eschema.eid @@ -92,17 +96,18 @@ passwdmsg='password'): if not user: if msg: - print msg + print(msg) while not user: - user = raw_input('login: ') - user = unicode(user, sys.stdin.encoding) + user = input('login: ') + if PY2: + user = unicode(user, sys.stdin.encoding) passwd = getpass('%s: ' % passwdmsg) if confirm: while True: passwd2 = getpass('confirm password: ') if passwd == passwd2: break - print 'password doesn\'t match' + print('password doesn\'t match') passwd = getpass('password: ') # XXX decode password using stdin encoding then encode it using appl'encoding return user, passwd @@ -236,4 +241,3 @@ from logging import getLogger from cubicweb import set_log_methods set_log_methods(TasksManager, getLogger('cubicweb.repository')) - diff -r a4fcee1e9789 -r 19fcce6dc6d1 setup.py --- a/setup.py Thu Mar 24 09:43:25 2016 +0100 +++ b/setup.py Thu Jun 16 14:19:20 2016 +0200 @@ -42,7 +42,7 @@ from __pkginfo__ import modname, version, license, description, web, \ author, author_email -long_description = file('README').read() +long_description = open('README').read() # import optional features import __pkginfo__ @@ -51,7 +51,7 @@ for entry in ("__depends__",): # "__recommends__"): requires.update(getattr(__pkginfo__, entry, {})) install_requires = [("%s %s" % (d, v and v or "")).strip() - for d, v in requires.iteritems()] + for d, v in requires.items()] else: install_requires = [] diff -r a4fcee1e9789 -r 19fcce6dc6d1 skeleton/DISTNAME.spec.tmpl --- a/skeleton/DISTNAME.spec.tmpl Thu Mar 24 09:43:25 2016 +0100 +++ b/skeleton/DISTNAME.spec.tmpl Thu Jun 16 14:19:20 2016 +0200 @@ -21,6 +21,7 @@ BuildRequires: %%{python} %%{python}-setuptools Requires: cubicweb >= %(version)s +Requires: %%{python}-six >= 1.4.0 %%description %(longdesc)s @@ -43,4 +44,4 @@ %%files %%defattr(-, root, root) -/* +%%{_prefix}/share/cubicweb/cubes/* diff -r a4fcee1e9789 -r 19fcce6dc6d1 skeleton/debian/control.tmpl --- a/skeleton/debian/control.tmpl Thu Mar 24 09:43:25 2016 +0100 +++ b/skeleton/debian/control.tmpl Thu Jun 16 14:19:20 2016 +0200 @@ -12,6 +12,7 @@ Architecture: all Depends: cubicweb-common (>= %(version)s), + python-six (>= 1.4.0), ${python:Depends}, ${misc:Depends}, Description: %(shortdesc)s diff -r a4fcee1e9789 -r 19fcce6dc6d1 skeleton/debian/rules --- a/skeleton/debian/rules Thu Mar 24 09:43:25 2016 +0100 +++ b/skeleton/debian/rules Thu Jun 16 14:19:20 2016 +0200 @@ -5,10 +5,5 @@ %: dh $@ --with python2 -override_dh_auto_install: - dh_auto_install - # remove generated .egg-info file - rm -rf debian/*/usr/lib/python* - override_dh_python2: dh_python2 -i /usr/share/cubicweb diff -r a4fcee1e9789 -r 19fcce6dc6d1 skeleton/setup.py --- a/skeleton/setup.py Thu Mar 24 09:43:25 2016 +0100 +++ b/skeleton/setup.py Thu Jun 16 14:19:20 2016 +0200 @@ -4,7 +4,7 @@ # copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # -# This file is part of CubicWeb tag cube. +# This file is part of CubicWeb. # # CubicWeb is free software: you can redistribute it and/or modify it under the # terms of the GNU Lesser General Public License as published by the Free @@ -44,7 +44,7 @@ author, author_email, classifiers if exists('README'): - long_description = file('README').read() + long_description = open('README').read() else: long_description = '' @@ -55,7 +55,7 @@ for entry in ("__depends__",): # "__recommends__"): requires.update(getattr(__pkginfo__, entry, {})) install_requires = [("%s %s" % (d, v and v or "")).strip() - for d, v in requires.iteritems()] + for d, v in requires.items()] else: install_requires = [] diff -r a4fcee1e9789 -r 19fcce6dc6d1 sobjects/__init__.py --- a/sobjects/__init__.py Thu Mar 24 09:43:25 2016 +0100 +++ b/sobjects/__init__.py Thu Jun 16 14:19:20 2016 +0200 @@ -20,11 +20,11 @@ import os.path as osp def registration_callback(vreg): - vreg.register_all(globals().itervalues(), __name__) + vreg.register_all(globals().values(), __name__) global URL_MAPPING URL_MAPPING = {} if vreg.config.apphome: url_mapping_file = osp.join(vreg.config.apphome, 'urlmapping.py') if osp.exists(url_mapping_file): - URL_MAPPING = eval(file(url_mapping_file).read()) + URL_MAPPING = eval(open(url_mapping_file).read()) vreg.info('using url mapping %s from %s', URL_MAPPING, url_mapping_file) diff -r a4fcee1e9789 -r 19fcce6dc6d1 sobjects/cwxmlparser.py --- a/sobjects/cwxmlparser.py Thu Mar 24 09:43:25 2016 +0100 +++ b/sobjects/cwxmlparser.py Thu Jun 16 14:19:20 2016 +0200 @@ -32,9 +32,12 @@ """ from datetime import datetime, time -import urlparse import urllib +from six import text_type +from six.moves.urllib.parse import urlparse, urlunparse, parse_qs, urlencode + +import pytz from logilab.common.date import todate, totime from logilab.common.textutils import splitstrip, text_to_dict from logilab.common.decorators import classproperty @@ -50,7 +53,7 @@ # XXX see cubicweb.cwvreg.YAMS_TO_PY # XXX see cubicweb.web.views.xmlrss.SERIALIZERS DEFAULT_CONVERTERS = BASE_CONVERTERS.copy() -DEFAULT_CONVERTERS['String'] = unicode +DEFAULT_CONVERTERS['String'] = text_type DEFAULT_CONVERTERS['Password'] = lambda x: x.encode('utf8') def convert_date(ustr): return todate(datetime.strptime(ustr, '%Y-%m-%d')) @@ -63,7 +66,11 @@ # XXX handle timezone, though this will be enough as TZDatetime are # serialized without time zone by default (UTC time). See # cw.web.views.xmlrss.SERIALIZERS. -DEFAULT_CONVERTERS['TZDatetime'] = convert_datetime +def convert_tzdatetime(ustr): + date = convert_datetime(ustr) + date = date.replace(tzinfo=pytz.utc) + return date +DEFAULT_CONVERTERS['TZDatetime'] = convert_tzdatetime def convert_time(ustr): return totime(datetime.strptime(ustr, '%H:%M:%S')) DEFAULT_CONVERTERS['Time'] = convert_time @@ -124,7 +131,7 @@ def list_actions(self): reg = self._cw.vreg['components'] - return sorted(clss[0].action for rid, clss in reg.iteritems() + return sorted(clss[0].action for rid, clss in reg.items() if rid.startswith('cw.entityxml.action.')) # mapping handling ######################################################### @@ -204,7 +211,7 @@ * `rels` is for relations and structured as {role: {relation: [(related item, related rels)...]} """ - entity = self.extid2entity(str(item['cwuri']), item['cwtype'], + entity = self.extid2entity(item['cwuri'].encode('ascii'), item['cwtype'], cwsource=item['cwsource'], item=item, raise_on_error=raise_on_error) if entity is None: @@ -220,7 +227,7 @@ def process_relations(self, entity, rels): etype = entity.cw_etype - for (rtype, role, action), rules in self.source.mapping.get(etype, {}).iteritems(): + for (rtype, role, action), rules in self.source.mapping.get(etype, {}).items(): try: related_items = rels[role][rtype] except KeyError: @@ -242,14 +249,14 @@ def normalize_url(self, url): """overridden to add vid=xml if vid is not set in the qs""" url = super(CWEntityXMLParser, self).normalize_url(url) - purl = urlparse.urlparse(url) + purl = urlparse(url) if purl.scheme in ('http', 'https'): - params = urlparse.parse_qs(purl.query) + params = parse_qs(purl.query) if 'vid' not in params: params['vid'] = ['xml'] purl = list(purl) - purl[4] = urllib.urlencode(params, doseq=True) - return urlparse.urlunparse(purl) + purl[4] = urlencode(params, doseq=True) + return urlunparse(purl) return url def complete_url(self, url, etype=None, known_relations=None): @@ -263,8 +270,8 @@ If `known_relations` is given, it should be a dictionary of already known relations, so they don't get queried again. """ - purl = urlparse.urlparse(url) - params = urlparse.parse_qs(purl.query) + purl = urlparse(url) + params = parse_qs(purl.query) if etype is None: etype = purl.path.split('/')[-1] try: @@ -277,8 +284,8 @@ continue relations.add('%s-%s' % (rtype, role)) purl = list(purl) - purl[4] = urllib.urlencode(params, doseq=True) - return urlparse.urlunparse(purl) + purl[4] = urlencode(params, doseq=True) + return urlunparse(purl) def complete_item(self, item, rels): try: @@ -314,7 +321,7 @@ """ node = self.node item = dict(node.attrib.items()) - item['cwtype'] = unicode(node.tag) + item['cwtype'] = text_type(node.tag) item.setdefault('cwsource', None) try: item['eid'] = int(item['eid']) @@ -331,7 +338,7 @@ related += self.parser.parse_etree(child) elif child.text: # attribute - item[child.tag] = unicode(child.text) + item[child.tag] = text_type(child.text) else: # None attribute (empty tag) item[child.tag] = None diff -r a4fcee1e9789 -r 19fcce6dc6d1 sobjects/ldapparser.py --- a/sobjects/ldapparser.py Thu Mar 24 09:43:25 2016 +0100 +++ b/sobjects/ldapparser.py Thu Jun 16 14:19:20 2016 +0200 @@ -1,4 +1,4 @@ -# copyright 2011-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2011-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -20,12 +20,28 @@ unlike ldapuser source, this source is copy based and will import ldap content (beside passwords for authentication) into the system source. """ +from six.moves import map, filter + from logilab.common.decorators import cached, cachedproperty from logilab.common.shellutils import generate_password from cubicweb import Binary, ConfigurationError from cubicweb.server.utils import crypt_password from cubicweb.server.sources import datafeed +from cubicweb.dataimport import stores, importer + + +class UserMetaGenerator(stores.MetaGenerator): + """Specific metadata generator, used to see newly created user into their initial state. + """ + @cached + def base_etype_dicts(self, entity): + entity, rels = super(UserMetaGenerator, self).base_etype_dicts(entity) + if entity.cw_etype == 'CWUser': + wf_state = self._cnx.execute('Any S WHERE ET default_workflow WF, ET name %(etype)s, ' + 'WF initial_state S', {'etype': entity.cw_etype}).one() + rels['in_state'] = wf_state.eid + return entity, rels class DataFeedLDAPAdapter(datafeed.DataFeedParser): @@ -48,8 +64,8 @@ def user_source_entities_by_extid(self): source = self.source if source.user_base_dn.strip(): - attrs = map(str, source.user_attrs.keys()) - return dict((userdict['dn'], userdict) + attrs = list(map(str, source.user_attrs.keys())) + return dict((userdict['dn'].encode('ascii'), userdict) for userdict in source._search(self._cw, source.user_base_dn, source.user_base_scope, @@ -61,8 +77,8 @@ def group_source_entities_by_extid(self): source = self.source if source.group_base_dn.strip(): - attrs = map(str, ['modifyTimestamp'] + source.group_attrs.keys()) - return dict((groupdict['dn'], groupdict) + attrs = list(map(str, ['modifyTimestamp'] + list(source.group_attrs.keys()))) + return dict((groupdict['dn'].encode('ascii'), groupdict) for groupdict in source._search(self._cw, source.group_base_dn, source.group_base_scope, @@ -70,171 +86,170 @@ attrs)) return {} - def _process(self, etype, sdict, raise_on_error=False): - self.debug('fetched %s %s', etype, sdict) - extid = sdict['dn'] - entity = self.extid2entity(extid, etype, - raise_on_error=raise_on_error, **sdict) - if entity is not None and not self.created_during_pull(entity): - self.notify_updated(entity) - attrs = self.ldap2cwattrs(sdict, etype) - self.update_if_necessary(entity, attrs) - if etype == 'CWUser': - self._process_email(entity, sdict) - if etype == 'CWGroup': - self._process_membership(entity, sdict) - def process(self, url, raise_on_error=False): """IDataFeedParser main entry point""" self.debug('processing ldapfeed source %s %s', self.source, self.searchfilterstr) - for userdict in self.user_source_entities_by_extid.itervalues(): - self._process('CWUser', userdict) + self._group_members = {} + eeimporter = self.build_importer(raise_on_error) + for name in self.source.user_default_groups: + geid = self._get_group(name) + eeimporter.extid2eid[geid] = geid + entities = self.extentities_generator() + set_cwuri = importer.use_extid_as_cwuri(eeimporter.extid2eid) + eeimporter.import_entities(set_cwuri(entities)) + self.stats['created'] = eeimporter.created + self.stats['updated'] = eeimporter.updated + # handle in_group relation + for group, members in self._group_members.items(): + self._cw.execute('DELETE U in_group G WHERE G name %(g)s', {'g': group}) + if members: + members = ["'%s'" % e for e in members] + rql = 'SET U in_group G WHERE G name %%(g)s, U login IN (%s)' % ','.join(members) + self._cw.execute(rql, {'g': group}) + # ensure updated users are activated + for eid in eeimporter.updated: + entity = self._cw.entity_from_eid(eid) + if entity.cw_etype == 'CWUser': + self.ensure_activated(entity) + # manually set primary email if necessary, it's not handled automatically since hooks are + # deactivated + self._cw.execute('SET X primary_email E WHERE NOT X primary_email E, X use_email E, ' + 'X cw_source S, S eid %(s)s, X in_state ST, TS name "activated"', + {'s': self.source.eid}) + + def build_importer(self, raise_on_error): + """Instantiate and configure an importer""" + etypes = ('CWUser', 'EmailAddress', 'CWGroup') + extid2eid = dict((self.source.decode_extid(x), y) for x, y in + self._cw.system_sql('select extid, eid from entities where asource = %(s)s', {'s': self.source.uri})) + existing_relations = {} + for rtype in ('in_group', 'use_email', 'owned_by'): + rql = 'Any S,O WHERE S {} O, S cw_source SO, SO eid %(s)s'.format(rtype) + rset = self._cw.execute(rql, {'s': self.source.eid}) + existing_relations[rtype] = set(tuple(x) for x in rset) + return importer.ExtEntitiesImporter(self._cw.vreg.schema, self.build_store(), + extid2eid=extid2eid, + existing_relations=existing_relations, + etypes_order_hint=etypes, + import_log=self.import_log, + raise_on_error=raise_on_error) + + def build_store(self): + """Instantiate and configure a store""" + metagenerator = UserMetaGenerator(self._cw, source=self.source) + return stores.NoHookRQLObjectStore(self._cw, metagenerator) + + def extentities_generator(self): self.debug('processing ldapfeed source %s %s', self.source, self.searchgroupfilterstr) - for groupdict in self.group_source_entities_by_extid.itervalues(): - self._process('CWGroup', groupdict, raise_on_error=raise_on_error) + # generate users and email addresses + for userdict in self.user_source_entities_by_extid.values(): + attrs = self.ldap2cwattrs(userdict, 'CWUser') + pwd = attrs.get('upassword') + if not pwd: + # generate a dumb password if not fetched from ldap (see + # userPassword) + pwd = crypt_password(generate_password()) + attrs['upassword'] = set([Binary(pwd)]) + extuser = importer.ExtEntity('CWUser', userdict['dn'].encode('ascii'), attrs) + extuser.values['owned_by'] = set([extuser.extid]) + for extemail in self._process_email(extuser, userdict): + yield extemail + groups = list(filter(None, [self._get_group(name) + for name in self.source.user_default_groups])) + if groups: + extuser.values['in_group'] = groups + yield extuser + # generate groups + for groupdict in self.group_source_entities_by_extid.values(): + attrs = self.ldap2cwattrs(groupdict, 'CWGroup') + extgroup = importer.ExtEntity('CWGroup', groupdict['dn'].encode('ascii'), attrs) + yield extgroup + # record group membership for later insertion + members = groupdict.get(self.source.group_rev_attrs['member'], ()) + self._group_members[attrs['name']] = members + + def _process_email(self, extuser, userdict): + try: + emailaddrs = userdict.pop(self.source.user_rev_attrs['email']) + except KeyError: + return # no email for that user, nothing to do + if not isinstance(emailaddrs, list): + emailaddrs = [emailaddrs] + for emailaddr in emailaddrs: + # search for existing email first, may be coming from another source + rset = self._cw.execute('EmailAddress X WHERE X address %(addr)s', + {'addr': emailaddr}) + emailextid = (userdict['dn'] + '@@' + emailaddr).encode('ascii') + if not rset: + # not found, create it. first forge an external id + extuser.values.setdefault('use_email', []).append(emailextid) + yield importer.ExtEntity('EmailAddress', emailextid, dict(address=[emailaddr])) + elif self.sourceuris: + # pop from sourceuris anyway, else email may be removed by the + # source once import is finished + self.sourceuris.pop(emailextid, None) + # XXX else check use_email relation? def handle_deletion(self, config, cnx, myuris): if config['delete-entities']: super(DataFeedLDAPAdapter, self).handle_deletion(config, cnx, myuris) return if myuris: - byetype = {} - for extid, (eid, etype) in myuris.iteritems(): - if self.is_deleted(extid, etype, eid): - byetype.setdefault(etype, []).append(str(eid)) - - for etype, eids in byetype.iteritems(): - if etype != 'CWUser': + for extid, (eid, etype) in myuris.items(): + if etype != 'CWUser' or not self.is_deleted(extid, etype, eid): continue - self.info('deactivate %s %s entities', len(eids), etype) - for eid in eids: - wf = cnx.entity_from_eid(eid).cw_adapt_to('IWorkflowable') - wf.fire_transition_if_possible('deactivate') + self.info('deactivate user %s', eid) + wf = cnx.entity_from_eid(eid).cw_adapt_to('IWorkflowable') + wf.fire_transition_if_possible('deactivate') cnx.commit() - def update_if_necessary(self, entity, attrs): - # disable read security to allow password selection - with entity._cw.security_enabled(read=False): - entity.complete(tuple(attrs)) + def ensure_activated(self, entity): if entity.cw_etype == 'CWUser': wf = entity.cw_adapt_to('IWorkflowable') if wf.state == 'deactivated': wf.fire_transition('activate') self.info('user %s reactivated', entity.login) - mdate = attrs.get('modification_date') - if not mdate or mdate > entity.modification_date: - attrs = dict( (k, v) for k, v in attrs.iteritems() - if v != getattr(entity, k)) - if attrs: - entity.cw_set(**attrs) - self.notify_updated(entity) + + def ldap2cwattrs(self, sdict, etype): + """Transform dictionary of LDAP attributes to CW. - def ldap2cwattrs(self, sdict, etype, tdict=None): - """ Transform dictionary of LDAP attributes to CW - etype must be CWUser or CWGroup """ - if tdict is None: - tdict = {} + etype must be CWUser or CWGroup + """ + assert etype in ('CWUser', 'CWGroup'), etype + tdict = {} if etype == 'CWUser': - items = self.source.user_attrs.iteritems() + items = self.source.user_attrs.items() elif etype == 'CWGroup': - items = self.source.group_attrs.iteritems() + items = self.source.group_attrs.items() for sattr, tattr in items: if tattr not in self.non_attribute_keys: try: - tdict[tattr] = sdict[sattr] + value = sdict[sattr] except KeyError: - raise ConfigurationError('source attribute %s has not ' - 'been found in the source, ' - 'please check the %s-attrs-map ' - 'field and the permissions of ' - 'the LDAP binding user' % - (sattr, etype[2:].lower())) + raise ConfigurationError( + 'source attribute %s has not been found in the source, ' + 'please check the %s-attrs-map field and the permissions of ' + 'the LDAP binding user' % (sattr, etype[2:].lower())) + if not isinstance(value, list): + value = [value] + tdict[tattr] = value return tdict - def before_entity_copy(self, entity, sourceparams): - etype = entity.cw_etype - if etype == 'EmailAddress': - entity.cw_edited['address'] = sourceparams['address'] - else: - self.ldap2cwattrs(sourceparams, etype, tdict=entity.cw_edited) - if etype == 'CWUser': - pwd = entity.cw_edited.get('upassword') - if not pwd: - # generate a dumb password if not fetched from ldap (see - # userPassword) - pwd = crypt_password(generate_password()) - entity.cw_edited['upassword'] = Binary(pwd) - return entity - - def after_entity_copy(self, entity, sourceparams): - super(DataFeedLDAPAdapter, self).after_entity_copy(entity, sourceparams) - etype = entity.cw_etype - if etype == 'EmailAddress': - return - # all CWUsers must be treated before CWGroups to have the in_group relation - # set correctly in _associate_ldapusers - elif etype == 'CWUser': - groups = filter(None, [self._get_group(name) - for name in self.source.user_default_groups]) - if groups: - entity.cw_set(in_group=groups) - self._process_email(entity, sourceparams) - elif etype == 'CWGroup': - self._process_membership(entity, sourceparams) - def is_deleted(self, extidplus, etype, eid): try: - extid, _ = extidplus.rsplit('@@', 1) + extid = extidplus.rsplit(b'@@', 1)[0] except ValueError: # for some reason extids here tend to come in both forms, e.g: # dn, dn@@Babar extid = extidplus return extid not in self.user_source_entities_by_extid - def _process_email(self, entity, userdict): - try: - emailaddrs = userdict[self.source.user_rev_attrs['email']] - except KeyError: - return # no email for that user, nothing to do - if not isinstance(emailaddrs, list): - emailaddrs = [emailaddrs] - for emailaddr in emailaddrs: - # search for existing email first, may be coming from another source - rset = self._cw.execute('EmailAddress X WHERE X address %(addr)s', - {'addr': emailaddr}) - if not rset: - # not found, create it. first forge an external id - emailextid = userdict['dn'] + '@@' + emailaddr.encode('utf-8') - email = self.extid2entity(emailextid, 'EmailAddress', - address=emailaddr) - entity.cw_set(use_email=email) - elif self.sourceuris: - # pop from sourceuris anyway, else email may be removed by the - # source once import is finished - uri = userdict['dn'] + '@@' + emailaddr.encode('utf-8') - self.sourceuris.pop(uri, None) - # XXX else check use_email relation? - - def _process_membership(self, entity, sourceparams): - """ Find existing CWUsers with the same login as the memberUids in the - CWGroup entity and create the in_group relationship """ - mdate = sourceparams.get('modification_date') - if (not mdate or mdate > entity.modification_date): - self._cw.execute('DELETE U in_group G WHERE G eid %(g)s', - {'g':entity.eid}) - members = sourceparams.get(self.source.group_rev_attrs['member']) - if members: - members = ["'%s'" % e for e in members] - rql = 'SET U in_group G WHERE G eid %%(g)s, U login IN (%s)' % ','.join(members) - self._cw.execute(rql, {'g':entity.eid, }) - @cached def _get_group(self, name): try: return self._cw.execute('Any X WHERE X is CWGroup, X name %(name)s', - {'name': name}).get_entity(0, 0) + {'name': name})[0][0] except IndexError: self.error('group %r referenced by source configuration %r does not exist', name, self.source.uri) return None - diff -r a4fcee1e9789 -r 19fcce6dc6d1 sobjects/notification.py --- a/sobjects/notification.py Thu Mar 24 09:43:25 2016 +0100 +++ b/sobjects/notification.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,10 +18,12 @@ """some views to handle notification on data changes""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from itertools import repeat +from six import text_type + from logilab.common.textutils import normalize_text from logilab.common.deprecation import class_renamed, class_moved, deprecated from logilab.common.registry import yes @@ -181,8 +183,8 @@ def context(self, **kwargs): entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) - for key, val in kwargs.iteritems(): - if val and isinstance(val, unicode) and val.strip(): + for key, val in kwargs.items(): + if val and isinstance(val, text_type) and val.strip(): kwargs[key] = self._cw._(val) kwargs.update({'user': self.user_data['login'], 'eid': entity.eid, @@ -255,7 +257,7 @@ def format_value(value): - if isinstance(value, unicode): + if isinstance(value, text_type): return u'"%s"' % value return value diff -r a4fcee1e9789 -r 19fcce6dc6d1 sobjects/services.py --- a/sobjects/services.py Thu Mar 24 09:43:25 2016 +0100 +++ b/sobjects/services.py Thu Jun 16 14:19:20 2016 +0200 @@ -19,7 +19,10 @@ import threading +from six import text_type + from yams.schema import role_name + from cubicweb import ValidationError from cubicweb.server import Service from cubicweb.predicates import match_user_groups, match_kwargs @@ -58,6 +61,7 @@ results['threads'] = [t.name for t in threading.enumerate()] return results + class GcStatsService(Service): """Return a dictionary containing some statistics about the repository resources usage. @@ -94,9 +98,9 @@ results = {} counters, ocounters, garbage = gc_info(lookupclasses, viewreferrersclasses=()) - values = sorted(counters.iteritems(), key=lambda x: x[1], reverse=True) + values = sorted(counters.items(), key=lambda x: x[1], reverse=True) results['lookupclasses'] = values - values = sorted(ocounters.iteritems(), key=lambda x: x[1], reverse=True)[:nmax] + values = sorted(ocounters.items(), key=lambda x: x[1], reverse=True)[:nmax] results['referenced'] = values results['unreachable'] = garbage return results @@ -129,7 +133,7 @@ qname = role_name('login', 'subject') raise ValidationError(None, {qname: errmsg % login}) - if isinstance(password, unicode): + if isinstance(password, text_type): # password should *always* be utf8 encoded password = password.encode('UTF8') cwuserkwargs['login'] = login @@ -154,3 +158,17 @@ 'WHERE U login %(login)s', d, build_descr=False) return user + + +class SourceSynchronizationService(Service): + """Force synchronization of a datafeed source""" + __regid__ = 'source-sync' + __select__ = Service.__select__ & match_user_groups('managers') + + def call(self, source_eid): + source_entity = self._cw.entity_from_eid(source_eid) + repo = self._cw.repo # Service are repo side only. + with repo.internal_cnx() as cnx: + source = repo.sources_by_uri[source_entity.name] + source.pull_data(cnx) + diff -r a4fcee1e9789 -r 19fcce6dc6d1 sobjects/supervising.py --- a/sobjects/supervising.py Thu Mar 24 09:43:25 2016 +0100 +++ b/sobjects/supervising.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,7 +18,7 @@ """some hooks and views to handle supervising of any data changes""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from cubicweb import UnknownEid from cubicweb.predicates import none_rset @@ -128,13 +128,15 @@ # XXX print changes self.w(u' %s' % changedescr.entity.absolute_url()) - def delete_entity(self, (eid, etype, title)): + def delete_entity(self, args): + eid, etype, title = args msg = self._cw._('deleted %(etype)s #%(eid)s (%(title)s)') etype = display_name(self._cw, etype).lower() self.w(msg % locals()) - def change_state(self, (entity, fromstate, tostate)): + def change_state(self, args): _ = self._cw._ + entity, fromstate, tostate = args msg = _('changed state of %(etype)s #%(eid)s (%(title)s)') self.w(u'%s\n' % (msg % self._entity_context(entity))) self.w(_(' from state %(fromstate)s to state %(tostate)s\n' % diff -r a4fcee1e9789 -r 19fcce6dc6d1 sobjects/test/unittest_cwxmlparser.py --- a/sobjects/test/unittest_cwxmlparser.py Thu Mar 24 09:43:25 2016 +0100 +++ b/sobjects/test/unittest_cwxmlparser.py Thu Jun 16 14:19:20 2016 +0200 @@ -17,8 +17,10 @@ # with CubicWeb. If not, see . from datetime import datetime -from urlparse import urlsplit, parse_qsl +from six.moves.urllib.parse import urlsplit, parse_qsl + +import pytz from cubicweb.devtools.testlib import CubicWebTC from cubicweb.sobjects.cwxmlparser import CWEntityXMLParser @@ -214,8 +216,8 @@ with self.admin_access.web_request() as req: user = req.execute('CWUser X WHERE X login "sthenault"').get_entity(0, 0) - self.assertEqual(user.creation_date, datetime(2010, 01, 22, 10, 27, 59)) - self.assertEqual(user.modification_date, datetime(2011, 01, 25, 14, 14, 06)) + self.assertEqual(user.creation_date, datetime(2010, 1, 22, 10, 27, 59, tzinfo=pytz.utc)) + self.assertEqual(user.modification_date, datetime(2011, 1, 25, 14, 14, 6, tzinfo=pytz.utc)) self.assertEqual(user.cwuri, 'http://pouet.org/5') self.assertEqual(user.cw_source[0].name, 'myfeed') self.assertEqual(user.absolute_url(), 'http://pouet.org/5') @@ -299,8 +301,8 @@ with self.repo.internal_cnx() as cnx: stats = dfsource.pull_data(cnx, force=True, raise_on_error=True) user = cnx.execute('CWUser X WHERE X login "sthenault"').get_entity(0, 0) - self.assertEqual(user.creation_date, datetime(2010, 01, 22, 10, 27, 59)) - self.assertEqual(user.modification_date, datetime(2011, 01, 25, 14, 14, 06)) + self.assertEqual(user.creation_date, datetime(2010, 1, 22, 10, 27, 59, tzinfo=pytz.utc)) + self.assertEqual(user.modification_date, datetime(2011, 1, 25, 14, 14, 6, tzinfo=pytz.utc)) self.assertEqual(user.cwuri, 'http://pouet.org/5') self.assertEqual(user.cw_source[0].name, 'myfeed') diff -r a4fcee1e9789 -r 19fcce6dc6d1 sobjects/test/unittest_notification.py --- a/sobjects/test/unittest_notification.py Thu Mar 24 09:43:25 2016 +0100 +++ b/sobjects/test/unittest_notification.py Thu Jun 16 14:19:20 2016 +0200 @@ -1,4 +1,3 @@ -# -*- coding: iso-8859-1 -*- # copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # @@ -16,47 +15,10 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . +"""Tests for notification sobjects""" -from socket import gethostname - -from logilab.common.testlib import unittest_main, TestCase from cubicweb.devtools.testlib import CubicWebTC, MAILBOX -from cubicweb.mail import construct_message_id, parse_message_id - -class MessageIdTC(TestCase): - def test_base(self): - msgid1 = construct_message_id('testapp', 21) - msgid2 = construct_message_id('testapp', 21) - self.assertNotEqual(msgid1, msgid2) - self.assertNotIn('&', msgid1) - self.assertNotIn('=', msgid1) - self.assertNotIn('/', msgid1) - self.assertNotIn('+', msgid1) - values = parse_message_id(msgid1, 'testapp') - self.assertTrue(values) - # parse_message_id should work with or without surrounding <> - self.assertEqual(values, parse_message_id(msgid1[1:-1], 'testapp')) - self.assertEqual(values['eid'], '21') - self.assertIn('timestamp', values) - self.assertEqual(parse_message_id(msgid1[1:-1], 'anotherapp'), None) - - def test_notimestamp(self): - msgid1 = construct_message_id('testapp', 21, False) - msgid2 = construct_message_id('testapp', 21, False) - values = parse_message_id(msgid1, 'testapp') - self.assertEqual(values, {'eid': '21'}) - - def test_parse_message_doesnt_raise(self): - self.assertEqual(parse_message_id('oijioj@bla.bla', 'tesapp'), None) - self.assertEqual(parse_message_id('oijioj@bla', 'tesapp'), None) - self.assertEqual(parse_message_id('oijioj', 'tesapp'), None) - - - def test_nonregr_empty_message_id(self): - for eid in (1, 12, 123, 1234): - msgid1 = construct_message_id('testapp', eid, 12) - self.assertNotEqual(msgid1, '<@testapp.%s>' % gethostname()) class NotificationTC(CubicWebTC): @@ -67,7 +29,7 @@ 'WHERE U eid %(x)s', {'x': urset[0][0]}) req.execute('INSERT CWProperty X: X pkey "ui.language", X value "fr", X for_user U ' 'WHERE U eid %(x)s', {'x': urset[0][0]}) - req.cnx.commit() # commit so that admin get its properties updated + req.cnx.commit() # commit so that admin get its properties updated finder = self.vreg['components'].select('recipients_finder', req, rset=urset) self.set_option('default-recipients-mode', 'none') @@ -76,7 +38,8 @@ self.assertEqual(finder.recipients(), [(u'admin@logilab.fr', 'fr')]) self.set_option('default-recipients-mode', 'default-dest-addrs') self.set_option('default-dest-addrs', 'abcd@logilab.fr, efgh@logilab.fr') - self.assertEqual(finder.recipients(), [('abcd@logilab.fr', 'en'), ('efgh@logilab.fr', 'en')]) + self.assertEqual(list(finder.recipients()), + [('abcd@logilab.fr', 'en'), ('efgh@logilab.fr', 'en')]) def test_status_change_view(self): with self.admin_access.web_request() as req: @@ -99,5 +62,7 @@ self.assertEqual(email.subject, 'status changed CWUser #%s (admin)' % u.eid) + if __name__ == '__main__': + from logilab.common.testlib import unittest_main unittest_main() diff -r a4fcee1e9789 -r 19fcce6dc6d1 sobjects/test/unittest_supervising.py --- a/sobjects/test/unittest_supervising.py Thu Mar 24 09:43:25 2016 +0100 +++ b/sobjects/test/unittest_supervising.py Thu Jun 16 14:19:20 2016 +0200 @@ -77,7 +77,7 @@ # check prepared email op._prepare_email() self.assertEqual(len(op.to_send), 1) - self.assert_(op.to_send[0][0]) + self.assertTrue(op.to_send[0][0]) self.assertEqual(op.to_send[0][1], ['test@logilab.fr']) cnx.commit() # some other changes ####### diff -r a4fcee1e9789 -r 19fcce6dc6d1 spa2rql.py --- a/spa2rql.py Thu Mar 24 09:43:25 2016 +0100 +++ b/spa2rql.py Thu Jun 16 14:19:20 2016 +0200 @@ -146,9 +146,9 @@ def finalize(self): """return corresponding rql query (string) / args (dict)""" - for varname, ptypes in self.possible_types.iteritems(): + for varname, ptypes in self.possible_types.items(): if len(ptypes) == 1: - self.restrictions.append('%s is %s' % (varname, iter(ptypes).next())) + self.restrictions.append('%s is %s' % (varname, next(iter(ptypes)))) unions = [] for releq, subjvar, obj in self.union_params: thisunions = [] diff -r a4fcee1e9789 -r 19fcce6dc6d1 tags.py --- a/tags.py Thu Mar 24 09:43:25 2016 +0100 +++ b/tags.py Thu Jun 16 14:19:20 2016 +0200 @@ -59,4 +59,3 @@ html += options html.append(u'') return u'\n'.join(html) - diff -r a4fcee1e9789 -r 19fcce6dc6d1 test/data/cubes/file/__pkginfo__.py --- a/test/data/cubes/file/__pkginfo__.py Thu Mar 24 09:43:25 2016 +0100 +++ b/test/data/cubes/file/__pkginfo__.py Thu Jun 16 14:19:20 2016 +0200 @@ -23,4 +23,3 @@ numversion = (1, 4, 3) version = '.'.join(str(num) for num in numversion) - diff -r a4fcee1e9789 -r 19fcce6dc6d1 test/data/rqlexpr_on_computedrel.py --- a/test/data/rqlexpr_on_computedrel.py Thu Mar 24 09:43:25 2016 +0100 +++ b/test/data/rqlexpr_on_computedrel.py Thu Jun 16 14:19:20 2016 +0200 @@ -14,5 +14,3 @@ class computed(ComputedRelation): rule = 'S relation O' __permissions__ = {'read': (RRQLExpression('S is ET'),)} - - diff -r a4fcee1e9789 -r 19fcce6dc6d1 test/data/schema.py --- a/test/data/schema.py Thu Mar 24 09:43:25 2016 +0100 +++ b/test/data/schema.py Thu Jun 16 14:19:20 2016 +0200 @@ -17,13 +17,17 @@ # with CubicWeb. If not, see . from yams.buildobjs import (EntityType, String, RichString, Bytes, - SubjectRelation, RelationDefinition) + ComputedRelation, SubjectRelation, RelationDefinition) from cubicweb.schema import (WorkflowableEntityType, RQLConstraint, RQLVocabularyConstraint) -_ = unicode +from cubicweb import _ + + +class buddies(ComputedRelation): + rule = 'S in_group G, O in_group G' class Personne(EntityType): diff -r a4fcee1e9789 -r 19fcce6dc6d1 test/data_schemareader/schema.py --- a/test/data_schemareader/schema.py Thu Mar 24 09:43:25 2016 +0100 +++ b/test/data_schemareader/schema.py Thu Jun 16 14:19:20 2016 +0200 @@ -7,5 +7,3 @@ cw_for_source.__permissions__ = {'read': ('managers', 'users'), 'add': ('managers',), 'delete': ('managers',)} - - diff -r a4fcee1e9789 -r 19fcce6dc6d1 test/requirements.txt diff -r a4fcee1e9789 -r 19fcce6dc6d1 test/unittest_binary.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/unittest_binary.py Thu Jun 16 14:19:20 2016 +0200 @@ -0,0 +1,78 @@ +# copyright 2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from unittest import TestCase +import os.path as osp +import pickle + +from six import PY2 + +from logilab.common.shellutils import tempdir + +from cubicweb import Binary + + +class BinaryTC(TestCase): + def test_init(self): + Binary() + Binary(b'toto') + Binary(bytearray(b'toto')) + if PY2: + Binary(buffer('toto')) + else: + Binary(memoryview(b'toto')) + with self.assertRaises((AssertionError, TypeError)): + # TypeError is raised by BytesIO if python runs with -O + Binary(u'toto') + + def test_write(self): + b = Binary() + b.write(b'toto') + b.write(bytearray(b'toto')) + if PY2: + b.write(buffer('toto')) + else: + b.write(memoryview(b'toto')) + with self.assertRaises((AssertionError, TypeError)): + # TypeError is raised by BytesIO if python runs with -O + b.write(u'toto') + + def test_gzpickle_roundtrip(self): + old = (u'foo', b'bar', 42, {}) + new = Binary.zpickle(old).unzpickle() + self.assertEqual(old, new) + self.assertIsNot(old, new) + + def test_from_file_to_file(self): + with tempdir() as dpath: + fpath = osp.join(dpath, 'binary.bin') + with open(fpath, 'wb') as fobj: + Binary(b'binaryblob').to_file(fobj) + + bobj = Binary.from_file(fpath) + self.assertEqual(bobj.getvalue(), b'binaryblob') + + def test_pickleable(self): + b = Binary(b'toto') + bb = pickle.loads(pickle.dumps(b)) + self.assertEqual(b, bb) + + +if __name__ == '__main__': + from unittest import main + main() diff -r a4fcee1e9789 -r 19fcce6dc6d1 test/unittest_cwconfig.py --- a/test/unittest_cwconfig.py Thu Mar 24 09:43:25 2016 +0100 +++ b/test/unittest_cwconfig.py Thu Jun 16 14:19:20 2016 +0200 @@ -86,15 +86,6 @@ finally: comment_pkginfo.__recommends_cubes__ = {} - -# def test_vc_config(self): -# vcconf = self.config.vc_config() -# self.assertIsInstance(vcconf['EEMAIL'], Version) -# self.assertEqual(vcconf['EEMAIL'], (0, 3, 1)) -# self.assertEqual(vcconf['CW'], (2, 31, 2)) -# self.assertRaises(KeyError, vcconf.__getitem__, 'CW_VERSION') -# self.assertRaises(KeyError, vcconf.__getitem__, 'CRM') - def test_expand_cubes(self): self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR] self.config.adjust_sys_path() diff -r a4fcee1e9789 -r 19fcce6dc6d1 test/unittest_cwctl.py --- a/test/unittest_cwctl.py Thu Mar 24 09:43:25 2016 +0100 +++ b/test/unittest_cwctl.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,7 +18,10 @@ import sys import os from os.path import join -from cStringIO import StringIO +from io import StringIO, BytesIO + +from six import PY2 + from logilab.common.testlib import TestCase, unittest_main from cubicweb.cwconfig import CubicWebConfiguration @@ -30,7 +33,7 @@ class CubicWebCtlTC(TestCase): def setUp(self): - self.stream = StringIO() + self.stream = BytesIO() if PY2 else StringIO() sys.stdout = self.stream def tearDown(self): sys.stdout = sys.__stdout__ @@ -57,7 +60,7 @@ funcname=None) for script, args in scripts.items(): scriptname = os.path.join(self.datadir, 'scripts', script) - self.assert_(os.path.exists(scriptname)) + self.assertTrue(os.path.exists(scriptname)) mih.cmd_process_script(scriptname, None, scriptargs=args) diff -r a4fcee1e9789 -r 19fcce6dc6d1 test/unittest_entity.py --- a/test/unittest_entity.py Thu Mar 24 09:43:25 2016 +0100 +++ b/test/unittest_entity.py Thu Jun 16 14:19:20 2016 +0200 @@ -20,6 +20,8 @@ from datetime import datetime +from six import text_type + from logilab.common import tempattr from logilab.common.decorators import clear_cache @@ -136,28 +138,33 @@ e.cw_clear_relation_cache('in_state', 'subject') self.assertEqual(e.cw_adapt_to('IWorkflowable').state, 'activated') + def test_copy_exclude_computed_relations(self): + """The `CWUser buddies CWUser` (computed) relation should not be copied. + """ + with self.admin_access.cnx() as cnx: + friends = cnx.create_entity('CWGroup', name=u'friends') + bob = self.create_user(cnx, u'bob', groups=('friends',)) + cnx.create_entity('EmailAddress', address=u'bob@cubicweb.org', + reverse_use_email=bob) + alice = self.create_user(cnx, u'alices', groups=('friends',)) + cnx.commit() + charles = self.create_user(cnx, u'charles') + cnx.commit() + # Just ensure this does not crash (it would if computed relation + # attempted to be copied). + charles.copy_relations(bob.eid) + def test_related_cache_both(self): with self.admin_access.web_request() as req: user = req.execute('Any X WHERE X eid %(x)s', {'x':req.user.eid}).get_entity(0, 0) adeleid = req.execute('INSERT EmailAddress X: X address "toto@logilab.org", U use_email X WHERE U login "admin"')[0][0] - self.assertEqual({}, user._cw_related_cache) req.cnx.commit() - self.assertEqual(['primary_email_subject', 'use_email_subject', 'wf_info_for_object'], - sorted(user._cw_related_cache)) + self.assertEqual(user._cw_related_cache, {}) email = user.primary_email[0] - self.assertEqual(u'toto@logilab.org', email.address) - self.assertEqual(['created_by_subject', - 'cw_source_subject', - 'is_instance_of_subject', - 'is_subject', - 'owned_by_subject', - 'prefered_form_object', - 'prefered_form_subject', - 'primary_email_object', - 'use_email_object'], - sorted(email._cw_related_cache)) - self.assertEqual('admin', email._cw_related_cache['primary_email_object'][1][0].login) + self.assertEqual(sorted(user._cw_related_cache), ['primary_email_subject']) + self.assertEqual(list(email._cw_related_cache), ['primary_email_object']) groups = user.in_group + self.assertEqual(sorted(user._cw_related_cache), ['in_group_subject', 'primary_email_subject']) for group in groups: self.assertNotIn('in_group_subject', group._cw_related_cache) user.cw_clear_all_caches() @@ -254,7 +261,7 @@ Personne.fetch_attrs = ('nom', 'prenom', 'travaille') Societe.fetch_attrs = ('nom', 'evaluee') self.assertEqual(Personne.fetch_rql(user), - 'Any X,AA,AB,AC,AD,AE,AF ORDERBY AA,AE DESC ' + 'Any X,AA,AB,AC,AD,AE,AF ORDERBY AA ' 'WHERE X is_instance_of Personne, X nom AA, X prenom AB, X travaille AC?, ' 'AC evaluee AD?, AD modification_date AE, AC nom AF') # testing symmetric relation @@ -644,7 +651,7 @@ def test_printable_value_bytes(self): with self.admin_access.web_request() as req: - e = req.create_entity('FakeFile', data=Binary('lambda x: 1'), data_format=u'text/x-python', + e = req.create_entity('FakeFile', data=Binary(b'lambda x: 1'), data_format=u'text/x-python', data_encoding=u'ascii', data_name=u'toto.py') from cubicweb import mttransforms if mttransforms.HAS_PYGMENTS_TRANSFORMS: @@ -663,8 +670,10 @@ lambda x: 1 ''') - e = req.create_entity('FakeFile', data=Binary('*héhéhé*'), data_format=u'text/rest', - data_encoding=u'utf-8', data_name=u'toto.txt') + e = req.create_entity('FakeFile', + data=Binary(u'*héhéhé*'.encode('utf-8')), + data_format=u'text/rest', + data_encoding=u'utf-8', data_name=u'toto.txt') self.assertEqual(e.printable_value('data'), u'

héhéhé

') @@ -717,7 +726,7 @@ e = self.vreg['etypes'].etype_class('FakeFile')(req) e.cw_attr_cache['description'] = 'du html' e.cw_attr_cache['description_format'] = 'text/html' - e.cw_attr_cache['data'] = Binary('some data') + e.cw_attr_cache['data'] = Binary(b'some data') e.cw_attr_cache['data_name'] = 'an html file' e.cw_attr_cache['data_format'] = 'text/html' e.cw_attr_cache['data_encoding'] = 'ascii' @@ -769,11 +778,11 @@ # ambiguity test person2 = req.create_entity('Personne', prenom=u'remi', nom=u'doe') person.cw_clear_all_caches() - self.assertEqual(person.rest_path(), unicode(person.eid)) - self.assertEqual(person2.rest_path(), unicode(person2.eid)) + self.assertEqual(person.rest_path(), text_type(person.eid)) + self.assertEqual(person2.rest_path(), text_type(person2.eid)) # unique attr with None value (nom in this case) friend = req.create_entity('Ami', prenom=u'bob') - self.assertEqual(friend.rest_path(), unicode(friend.eid)) + self.assertEqual(friend.rest_path(), text_type(friend.eid)) # 'ref' below is created without the unique but not required # attribute, make sur that the unique _and_ required 'ean' is used # as the rest attribute @@ -853,4 +862,3 @@ if __name__ == '__main__': from logilab.common.testlib import unittest_main unittest_main() - diff -r a4fcee1e9789 -r 19fcce6dc6d1 test/unittest_mail.py --- a/test/unittest_mail.py Thu Mar 24 09:43:25 2016 +0100 +++ b/test/unittest_mail.py Thu Jun 16 14:19:20 2016 +0200 @@ -16,19 +16,18 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""unit tests for module cubicweb.mail - -""" +"""unit tests for module cubicweb.mail""" import os import re +from socket import gethostname import sys +from unittest import TestCase -from logilab.common.testlib import unittest_main from logilab.common.umessage import message_from_string from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.mail import format_mail +from cubicweb.mail import format_mail, construct_message_id, parse_message_id def getlogin(): @@ -74,7 +73,6 @@ self.assertEqual(msg.get('reply-to'), u'oim , BimBam ') self.assertEqual(msg.get_payload(decode=True), u'un petit cöucou') - def test_format_mail_euro(self): mail = format_mail({'name': u'oîm', 'email': u'oim@logilab.fr'}, ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €') @@ -99,7 +97,6 @@ self.assertEqual(msg.get('reply-to'), u'oîm ') self.assertEqual(msg.get_payload(decode=True), u'un petit cöucou €') - def test_format_mail_from_reply_to(self): # no sender-name, sender-addr in the configuration self.set_option('sender-name', '') @@ -125,18 +122,20 @@ self.set_option('sender-addr', 'cubicweb-test@logilab.fr') # anonymous notification: no name and no email specified msg = format_mail({'name': u'', 'email': u''}, - ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', - config=self.config) + ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', + config=self.config) msg = message_from_string(msg.as_string()) self.assertEqual(msg.get('from'), u'cubicweb-test ') self.assertEqual(msg.get('reply-to'), u'cubicweb-test ') # anonymous notification: only email specified msg = format_mail({'email': u'tutu@logilab.fr'}, - ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', - config=self.config) + ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', + config=self.config) msg = message_from_string(msg.as_string()) self.assertEqual(msg.get('from'), u'cubicweb-test ') - self.assertEqual(msg.get('reply-to'), u'cubicweb-test , cubicweb-test ') + self.assertEqual( + msg.get('reply-to'), + u'cubicweb-test , cubicweb-test ') # anonymous notification: only name specified msg = format_mail({'name': u'tutu'}, ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', @@ -146,7 +145,41 @@ self.assertEqual(msg.get('reply-to'), u'tutu ') +class MessageIdTC(TestCase): + + def test_base(self): + msgid1 = construct_message_id('testapp', 21) + msgid2 = construct_message_id('testapp', 21) + self.assertNotEqual(msgid1, msgid2) + self.assertNotIn('&', msgid1) + self.assertNotIn('=', msgid1) + self.assertNotIn('/', msgid1) + self.assertNotIn('+', msgid1) + values = parse_message_id(msgid1, 'testapp') + self.assertTrue(values) + # parse_message_id should work with or without surrounding <> + self.assertEqual(values, parse_message_id(msgid1[1:-1], 'testapp')) + self.assertEqual(values['eid'], '21') + self.assertIn('timestamp', values) + self.assertEqual(parse_message_id(msgid1[1:-1], 'anotherapp'), None) + + def test_notimestamp(self): + msgid1 = construct_message_id('testapp', 21, False) + construct_message_id('testapp', 21, False) + values = parse_message_id(msgid1, 'testapp') + self.assertEqual(values, {'eid': '21'}) + + def test_parse_message_doesnt_raise(self): + self.assertEqual(parse_message_id('oijioj@bla.bla', 'tesapp'), None) + self.assertEqual(parse_message_id('oijioj@bla', 'tesapp'), None) + self.assertEqual(parse_message_id('oijioj', 'tesapp'), None) + + def test_nonregr_empty_message_id(self): + for eid in (1, 12, 123, 1234): + msgid1 = construct_message_id('testapp', eid, 12) + self.assertNotEqual(msgid1, '<@testapp.%s>' % gethostname()) + if __name__ == '__main__': + from logilab.common.testlib import unittest_main unittest_main() - diff -r a4fcee1e9789 -r 19fcce6dc6d1 test/unittest_migration.py --- a/test/unittest_migration.py Thu Mar 24 09:43:25 2016 +0100 +++ b/test/unittest_migration.py Thu Jun 16 14:19:20 2016 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -22,7 +22,7 @@ from cubicweb.devtools import TestServerConfiguration from cubicweb.cwconfig import CubicWebConfiguration -from cubicweb.migration import MigrationHelper, filter_scripts +from cubicweb.migration import MigrationHelper, filter_scripts, version_strictly_lower from cubicweb.server.migractions import ServerMigrationHelper @@ -76,8 +76,6 @@ def test_filter_scripts_for_mode(self): config = CubicWebConfiguration('data') config.verbosity = 0 - self.assertNotIsInstance(config.migration_handler(), ServerMigrationHelper) - self.assertIsInstance(config.migration_handler(), MigrationHelper) config = self.config config.__class__.name = 'repository' self.assertListEqual(filter_scripts(config, TMIGRDIR, (0,0,4), (0,1,0)), @@ -91,6 +89,10 @@ ((0, 1 ,0), TMIGRDIR+'0.1.0_repository.py')]) config.__class__.name = 'repository' + def test_version_strictly_lower(self): + self.assertTrue(version_strictly_lower(None, '1.0.0')) + self.assertFalse(version_strictly_lower('1.0.0', None)) + from cubicweb.devtools import ApptestConfiguration, get_test_db_handler diff -r a4fcee1e9789 -r 19fcce6dc6d1 test/unittest_predicates.py --- a/test/unittest_predicates.py Thu Mar 24 09:43:25 2016 +0100 +++ b/test/unittest_predicates.py Thu Jun 16 14:19:20 2016 +0200 @@ -20,14 +20,17 @@ from operator import eq, lt, le, gt from contextlib import contextmanager +from six.moves import range + from logilab.common.testlib import TestCase, unittest_main from logilab.common.decorators import clear_cache from cubicweb import Binary from cubicweb.devtools.testlib import CubicWebTC from cubicweb.predicates import (is_instance, adaptable, match_kwargs, match_user_groups, - multi_lines_rset, score_entity, is_in_state, - rql_condition, relation_possible, match_form_params) + multi_lines_rset, score_entity, is_in_state, + rql_condition, relation_possible, match_form_params, + paginated_rset) from cubicweb.selectors import on_transition # XXX on_transition is deprecated from cubicweb.view import EntityAdapter from cubicweb.web import action @@ -37,7 +40,7 @@ class ImplementsTC(CubicWebTC): def test_etype_priority(self): with self.admin_access.web_request() as req: - f = req.create_entity('FakeFile', data_name=u'hop.txt', data=Binary('hop'), + f = req.create_entity('FakeFile', data_name=u'hop.txt', data=Binary(b'hop'), data_format=u'text/plain') rset = f.as_rset() anyscore = is_instance('Any')(f.__class__, req, rset=rset) @@ -488,6 +491,34 @@ "match_form_params() positional arguments must be strings") +class PaginatedTC(CubicWebTC): + """tests for paginated_rset predicate""" + + def setup_database(self): + with self.admin_access.repo_cnx() as cnx: + for i in range(30): + cnx.create_entity('CWGroup', name=u"group%d" % i) + cnx.commit() + + def test_paginated_rset(self): + default_nb_pages = 1 + web_request = self.admin_access.web_request + with web_request() as req: + rset = req.execute('Any G WHERE G is CWGroup') + self.assertEqual(len(rset), 34) + with web_request(vid='list', page_size='10') as req: + self.assertEqual(paginated_rset()(None, req, rset), default_nb_pages) + with web_request(vid='list', page_size='20') as req: + self.assertEqual(paginated_rset()(None, req, rset), default_nb_pages) + with web_request(vid='list', page_size='50') as req: + self.assertEqual(paginated_rset()(None, req, rset), 0) + with web_request(vid='list', page_size='10/') as req: + self.assertEqual(paginated_rset()(None, req, rset), 0) + with web_request(vid='list', page_size='.1') as req: + self.assertEqual(paginated_rset()(None, req, rset), 0) + with web_request(vid='list', page_size='not_an_int') as req: + self.assertEqual(paginated_rset()(None, req, rset), 0) + + if __name__ == '__main__': unittest_main() - diff -r a4fcee1e9789 -r 19fcce6dc6d1 test/unittest_rqlrewrite.py --- a/test/unittest_rqlrewrite.py Thu Mar 24 09:43:25 2016 +0100 +++ b/test/unittest_rqlrewrite.py Thu Jun 16 14:19:20 2016 +0200 @@ -16,6 +16,8 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . +from six import string_types + from logilab.common.testlib import unittest_main, TestCase from logilab.common.testlib import mock_object from yams import BadSchemaDefinition @@ -67,9 +69,9 @@ rewriter = _prepare_rewriter(rqlrewrite.RQLRewriter, kwargs) snippets = [] for v, exprs in sorted(snippets_map.items()): - rqlexprs = [isinstance(snippet, basestring) - and mock_object(snippet_rqlst=parse('Any X WHERE '+snippet).children[0], - expression='Any X WHERE '+snippet) + rqlexprs = [isinstance(snippet, string_types) + and mock_object(snippet_rqlst=parse(u'Any X WHERE '+snippet).children[0], + expression=u'Any X WHERE '+snippet) or snippet for snippet in exprs] snippets.append((dict([v]), rqlexprs)) @@ -90,7 +92,7 @@ selects.append(stmt) assert node in selects, (node, selects) for stmt in selects: - for var in stmt.defined_vars.itervalues(): + for var in stmt.defined_vars.values(): assert var.stinfo['references'] vrefmap = vrefmaps[stmt] assert not (var.stinfo['references'] ^ vrefmap[var.name]), (node.as_string(), var, var.stinfo['references'], vrefmap[var.name]) @@ -108,90 +110,90 @@ def test_base_var(self): constraint = ('X in_state S, U in_group G, P require_state S,' 'P name "read", P require_group G') - rqlst = parse('Card C') + rqlst = parse(u'Card C') rewrite(rqlst, {('C', 'X'): (constraint,)}, {}) self.assertEqual(rqlst.as_string(), - u"Any C WHERE C is Card, B eid %(D)s, " - "EXISTS(C in_state A, B in_group E, F require_state A, " - "F name 'read', F require_group E, A is State, E is CWGroup, F is CWPermission)") + u'Any C WHERE C is Card, B eid %(D)s, ' + 'EXISTS(C in_state A, B in_group E, F require_state A, ' + 'F name "read", F require_group E, A is State, E is CWGroup, F is CWPermission)') def test_multiple_var(self): card_constraint = ('X in_state S, U in_group G, P require_state S,' 'P name "read", P require_group G') affaire_constraints = ('X ref LIKE "PUBLIC%"', 'U in_group G, G name "public"') kwargs = {'u':2} - rqlst = parse('Any S WHERE S documented_by C, C eid %(u)s') + rqlst = parse(u'Any S WHERE S documented_by C, C eid %(u)s') rewrite(rqlst, {('C', 'X'): (card_constraint,), ('S', 'X'): affaire_constraints}, kwargs) self.assertMultiLineEqual( rqlst.as_string(), - "Any S WHERE S documented_by C, C eid %(u)s, B eid %(D)s, " - "EXISTS(C in_state A, B in_group E, F require_state A, " - "F name 'read', F require_group E, A is State, E is CWGroup, F is CWPermission), " - "(EXISTS(S ref LIKE 'PUBLIC%')) OR (EXISTS(B in_group G, G name 'public', G is CWGroup)), " - "S is Affaire") + u'Any S WHERE S documented_by C, C eid %(u)s, B eid %(D)s, ' + 'EXISTS(C in_state A, B in_group E, F require_state A, ' + 'F name "read", F require_group E, A is State, E is CWGroup, F is CWPermission), ' + '(EXISTS(S ref LIKE "PUBLIC%")) OR (EXISTS(B in_group G, G name "public", G is CWGroup)), ' + 'S is Affaire') self.assertIn('D', kwargs) def test_or(self): constraint = '(X identity U) OR (X in_state ST, CL identity U, CL in_state ST, ST name "subscribed")' - rqlst = parse('Any S WHERE S owned_by C, C eid %(u)s, S is in (CWUser, CWGroup)') + rqlst = parse(u'Any S WHERE S owned_by C, C eid %(u)s, S is in (CWUser, CWGroup)') rewrite(rqlst, {('C', 'X'): (constraint,)}, {'u':1}) self.assertEqual(rqlst.as_string(), - "Any S WHERE S owned_by C, C eid %(u)s, S is IN(CWUser, CWGroup), A eid %(B)s, " - "EXISTS((C identity A) OR (C in_state D, E identity A, " - "E in_state D, D name 'subscribed'), D is State, E is CWUser)") + 'Any S WHERE S owned_by C, C eid %(u)s, S is IN(CWUser, CWGroup), A eid %(B)s, ' + 'EXISTS((C identity A) OR (C in_state D, E identity A, ' + 'E in_state D, D name "subscribed"), D is State, E is CWUser)') def test_simplified_rqlst(self): constraint = ('X in_state S, U in_group G, P require_state S,' 'P name "read", P require_group G') - rqlst = parse('Any 2') # this is the simplified rql st for Any X WHERE X eid 12 + rqlst = parse(u'Any 2') # this is the simplified rql st for Any X WHERE X eid 12 rewrite(rqlst, {('2', 'X'): (constraint,)}, {}) self.assertEqual(rqlst.as_string(), - u"Any 2 WHERE B eid %(C)s, " - "EXISTS(2 in_state A, B in_group D, E require_state A, " - "E name 'read', E require_group D, A is State, D is CWGroup, E is CWPermission)") + u'Any 2 WHERE B eid %(C)s, ' + 'EXISTS(2 in_state A, B in_group D, E require_state A, ' + 'E name "read", E require_group D, A is State, D is CWGroup, E is CWPermission)') def test_optional_var_1(self): constraint = ('X in_state S, U in_group G, P require_state S,' 'P name "read", P require_group G') - rqlst = parse('Any A,C WHERE A documented_by C?') + rqlst = parse(u'Any A,C WHERE A documented_by C?') rewrite(rqlst, {('C', 'X'): (constraint,)}, {}) self.assertEqual(rqlst.as_string(), - "Any A,C WHERE A documented_by C?, A is Affaire " - "WITH C BEING " - "(Any C WHERE EXISTS(C in_state B, D in_group F, G require_state B, G name 'read', " - "G require_group F), D eid %(A)s, C is Card)") + u'Any A,C WHERE A documented_by C?, A is Affaire ' + 'WITH C BEING ' + '(Any C WHERE EXISTS(C in_state B, D in_group F, G require_state B, G name "read", ' + 'G require_group F), D eid %(A)s, C is Card)') def test_optional_var_2(self): constraint = ('X in_state S, U in_group G, P require_state S,' 'P name "read", P require_group G') - rqlst = parse('Any A,C,T WHERE A documented_by C?, C title T') + rqlst = parse(u'Any A,C,T WHERE A documented_by C?, C title T') rewrite(rqlst, {('C', 'X'): (constraint,)}, {}) self.assertEqual(rqlst.as_string(), - "Any A,C,T WHERE A documented_by C?, A is Affaire " - "WITH C,T BEING " - "(Any C,T WHERE C title T, EXISTS(C in_state B, D in_group F, " - "G require_state B, G name 'read', G require_group F), " - "D eid %(A)s, C is Card)") + u'Any A,C,T WHERE A documented_by C?, A is Affaire ' + 'WITH C,T BEING ' + '(Any C,T WHERE C title T, EXISTS(C in_state B, D in_group F, ' + 'G require_state B, G name "read", G require_group F), ' + 'D eid %(A)s, C is Card)') def test_optional_var_3(self): constraint1 = ('X in_state S, U in_group G, P require_state S,' 'P name "read", P require_group G') constraint2 = 'X in_state S, S name "public"' - rqlst = parse('Any A,C,T WHERE A documented_by C?, C title T') + rqlst = parse(u'Any A,C,T WHERE A documented_by C?, C title T') rewrite(rqlst, {('C', 'X'): (constraint1, constraint2)}, {}) self.assertEqual(rqlst.as_string(), - "Any A,C,T WHERE A documented_by C?, A is Affaire " - "WITH C,T BEING (Any C,T WHERE C title T, " - "(EXISTS(C in_state B, D in_group F, G require_state B, G name 'read', G require_group F)) " - "OR (EXISTS(C in_state E, E name 'public')), " - "D eid %(A)s, C is Card)") + u'Any A,C,T WHERE A documented_by C?, A is Affaire ' + 'WITH C,T BEING (Any C,T WHERE C title T, ' + '(EXISTS(C in_state B, D in_group F, G require_state B, G name "read", G require_group F)) ' + 'OR (EXISTS(C in_state E, E name "public")), ' + 'D eid %(A)s, C is Card)') def test_optional_var_4(self): constraint1 = 'A created_by U, X documented_by A' constraint2 = 'A created_by U, X concerne A' constraint3 = 'X created_by U' - rqlst = parse('Any X,LA,Y WHERE LA? documented_by X, LA concerne Y') + rqlst = parse(u'Any X,LA,Y WHERE LA? documented_by X, LA concerne Y') rewrite(rqlst, {('LA', 'X'): (constraint1, constraint2), ('X', 'X'): (constraint3,), ('Y', 'X'): (constraint3,)}, {}) @@ -208,7 +210,7 @@ # see test of the same name in RewriteFullTC: original problem is # unreproducible here because it actually lies in # RQLRewriter.insert_local_checks - rqlst = parse('Any A,AR,X,CD WHERE A concerne X?, A ref AR, A eid %(a)s, X creation_date CD') + rqlst = parse(u'Any A,AR,X,CD WHERE A concerne X?, A ref AR, A eid %(a)s, X creation_date CD') rewrite(rqlst, {('X', 'X'): ('X created_by U',),}, {'a': 3}) self.assertEqual(rqlst.as_string(), u'Any A,AR,X,CD WHERE A concerne X?, A ref AR, A eid %(a)s WITH X,CD BEING (Any X,CD WHERE X creation_date CD, EXISTS(X created_by B), B eid %(A)s, X is IN(Division, Note, Societe))') @@ -216,7 +218,7 @@ def test_optional_var_inlined(self): c1 = ('X require_permission P') c2 = ('X inlined_card O, O require_permission P') - rqlst = parse('Any C,A,R WHERE A? inlined_card C, A ref R') + rqlst = parse(u'Any C,A,R WHERE A? inlined_card C, A ref R') rewrite(rqlst, {('C', 'X'): (c1,), ('A', 'X'): (c2,), }, {}) @@ -231,7 +233,7 @@ # def test_optional_var_inlined_has_perm(self): # c1 = ('X require_permission P') # c2 = ('X inlined_card O, U has_read_permission O') - # rqlst = parse('Any C,A,R WHERE A? inlined_card C, A ref R') + # rqlst = parse(u'Any C,A,R WHERE A? inlined_card C, A ref R') # rewrite(rqlst, {('C', 'X'): (c1,), # ('A', 'X'): (c2,), # }, {}) @@ -241,7 +243,7 @@ def test_optional_var_inlined_imbricated_error(self): c1 = ('X require_permission P') c2 = ('X inlined_card O, O require_permission P') - rqlst = parse('Any C,A,R,A2,R2 WHERE A? inlined_card C, A ref R,A2? inlined_card C, A2 ref R2') + rqlst = parse(u'Any C,A,R,A2,R2 WHERE A? inlined_card C, A ref R,A2? inlined_card C, A2 ref R2') self.assertRaises(BadSchemaDefinition, rewrite, rqlst, {('C', 'X'): (c1,), ('A', 'X'): (c2,), @@ -251,7 +253,7 @@ def test_optional_var_inlined_linked(self): c1 = ('X require_permission P') c2 = ('X inlined_card O, O require_permission P') - rqlst = parse('Any A,W WHERE A inlined_card C?, C inlined_note N, ' + rqlst = parse(u'Any A,W WHERE A inlined_card C?, C inlined_note N, ' 'N inlined_affaire W') rewrite(rqlst, {('C', 'X'): (c1,)}, {}) self.assertEqual(rqlst.as_string(), @@ -265,70 +267,70 @@ # relation used in the rql expression can be ignored and S replaced by # the variable from the incoming query snippet = ('X in_state S, S name "hop"') - rqlst = parse('Card C WHERE C in_state STATE') + rqlst = parse(u'Card C WHERE C in_state STATE') rewrite(rqlst, {('C', 'X'): (snippet,)}, {}) self.assertEqual(rqlst.as_string(), - "Any C WHERE C in_state STATE, C is Card, " - "EXISTS(STATE name 'hop'), STATE is State") + 'Any C WHERE C in_state STATE, C is Card, ' + 'EXISTS(STATE name "hop"), STATE is State') def test_relation_optimization_1_rhs(self): snippet = ('TW subworkflow_exit X, TW name "hop"') - rqlst = parse('WorkflowTransition C WHERE C subworkflow_exit EXIT') + rqlst = parse(u'WorkflowTransition C WHERE C subworkflow_exit EXIT') rewrite(rqlst, {('EXIT', 'X'): (snippet,)}, {}) self.assertEqual(rqlst.as_string(), - "Any C WHERE C subworkflow_exit EXIT, C is WorkflowTransition, " - "EXISTS(C name 'hop'), EXIT is SubWorkflowExitPoint") + 'Any C WHERE C subworkflow_exit EXIT, C is WorkflowTransition, ' + 'EXISTS(C name "hop"), EXIT is SubWorkflowExitPoint') def test_relation_optimization_2_lhs(self): # optional relation can be shared if also optional in the snippet snippet = ('X in_state S?, S name "hop"') - rqlst = parse('Card C WHERE C in_state STATE?') + rqlst = parse(u'Card C WHERE C in_state STATE?') rewrite(rqlst, {('C', 'X'): (snippet,)}, {}) self.assertEqual(rqlst.as_string(), - "Any C WHERE C in_state STATE?, C is Card, " - "EXISTS(STATE name 'hop'), STATE is State") + 'Any C WHERE C in_state STATE?, C is Card, ' + 'EXISTS(STATE name "hop"), STATE is State') def test_relation_optimization_2_rhs(self): snippet = ('TW? subworkflow_exit X, TW name "hop"') - rqlst = parse('SubWorkflowExitPoint EXIT WHERE C? subworkflow_exit EXIT') + rqlst = parse(u'SubWorkflowExitPoint EXIT WHERE C? subworkflow_exit EXIT') rewrite(rqlst, {('EXIT', 'X'): (snippet,)}, {}) self.assertEqual(rqlst.as_string(), - "Any EXIT WHERE C? subworkflow_exit EXIT, EXIT is SubWorkflowExitPoint, " - "EXISTS(C name 'hop'), C is WorkflowTransition") + 'Any EXIT WHERE C? subworkflow_exit EXIT, EXIT is SubWorkflowExitPoint, ' + 'EXISTS(C name "hop"), C is WorkflowTransition') def test_relation_optimization_3_lhs(self): # optional relation in the snippet but not in the orig tree can be shared snippet = ('X in_state S?, S name "hop"') - rqlst = parse('Card C WHERE C in_state STATE') + rqlst = parse(u'Card C WHERE C in_state STATE') rewrite(rqlst, {('C', 'X'): (snippet,)}, {}) self.assertEqual(rqlst.as_string(), - "Any C WHERE C in_state STATE, C is Card, " - "EXISTS(STATE name 'hop'), STATE is State") + 'Any C WHERE C in_state STATE, C is Card, ' + 'EXISTS(STATE name "hop"), STATE is State') def test_relation_optimization_3_rhs(self): snippet = ('TW? subworkflow_exit X, TW name "hop"') - rqlst = parse('WorkflowTransition C WHERE C subworkflow_exit EXIT') + rqlst = parse(u'WorkflowTransition C WHERE C subworkflow_exit EXIT') rewrite(rqlst, {('EXIT', 'X'): (snippet,)}, {}) self.assertEqual(rqlst.as_string(), - "Any C WHERE C subworkflow_exit EXIT, C is WorkflowTransition, " - "EXISTS(C name 'hop'), EXIT is SubWorkflowExitPoint") + 'Any C WHERE C subworkflow_exit EXIT, C is WorkflowTransition, ' + 'EXISTS(C name "hop"), EXIT is SubWorkflowExitPoint') def test_relation_non_optimization_1_lhs(self): # but optional relation in the orig tree but not in the snippet can't be shared snippet = ('X in_state S, S name "hop"') - rqlst = parse('Card C WHERE C in_state STATE?') + rqlst = parse(u'Card C WHERE C in_state STATE?') rewrite(rqlst, {('C', 'X'): (snippet,)}, {}) self.assertEqual(rqlst.as_string(), - "Any C WHERE C in_state STATE?, C is Card, " - "EXISTS(C in_state A, A name 'hop', A is State), STATE is State") + 'Any C WHERE C in_state STATE?, C is Card, ' + 'EXISTS(C in_state A, A name "hop", A is State), STATE is State') def test_relation_non_optimization_1_rhs(self): snippet = ('TW subworkflow_exit X, TW name "hop"') - rqlst = parse('SubWorkflowExitPoint EXIT WHERE C? subworkflow_exit EXIT') + rqlst = parse(u'SubWorkflowExitPoint EXIT WHERE C? subworkflow_exit EXIT') rewrite(rqlst, {('EXIT', 'X'): (snippet,)}, {}) self.assertEqual(rqlst.as_string(), - "Any EXIT WHERE C? subworkflow_exit EXIT, EXIT is SubWorkflowExitPoint, " - "EXISTS(A subworkflow_exit EXIT, A name 'hop', A is WorkflowTransition), " - "C is WorkflowTransition") + 'Any EXIT WHERE C? subworkflow_exit EXIT, EXIT is SubWorkflowExitPoint, ' + 'EXISTS(A subworkflow_exit EXIT, A name "hop", A is WorkflowTransition), ' + 'C is WorkflowTransition') def test_relation_non_optimization_2(self): """See #3024730""" @@ -336,7 +338,7 @@ # previously inserted, else this may introduce duplicated results, as N # will then be shared by multiple EXISTS and so at SQL generation time, # the table will be in the FROM clause of the outermost query - rqlst = parse('Any A,C WHERE A inlined_card C') + rqlst = parse(u'Any A,C WHERE A inlined_card C') rewrite(rqlst, {('A', 'X'): ('X inlined_card C, C inlined_note N, N owned_by U',), ('C', 'X'): ('X inlined_note N, N owned_by U',)}, {}) self.assertEqual(rqlst.as_string(), @@ -348,35 +350,35 @@ def test_unsupported_constraint_1(self): # CWUser doesn't have require_permission trinfo_constraint = ('X wf_info_for Y, Y require_permission P, P name "read"') - rqlst = parse('Any U,T WHERE U is CWUser, T wf_info_for U') + rqlst = parse(u'Any U,T WHERE U is CWUser, T wf_info_for U') self.assertRaises(Unauthorized, rewrite, rqlst, {('T', 'X'): (trinfo_constraint,)}, {}) def test_unsupported_constraint_2(self): trinfo_constraint = ('X wf_info_for Y, Y require_permission P, P name "read"') - rqlst = parse('Any U,T WHERE U is CWUser, T wf_info_for U') + rqlst = parse(u'Any U,T WHERE U is CWUser, T wf_info_for U') rewrite(rqlst, {('T', 'X'): (trinfo_constraint, 'X wf_info_for Y, Y in_group G, G name "managers"')}, {}) self.assertEqual(rqlst.as_string(), - u"Any U,T WHERE U is CWUser, T wf_info_for U, " - "EXISTS(U in_group B, B name 'managers', B is CWGroup), T is TrInfo") + u'Any U,T WHERE U is CWUser, T wf_info_for U, ' + 'EXISTS(U in_group B, B name "managers", B is CWGroup), T is TrInfo') def test_unsupported_constraint_3(self): self.skipTest('raise unauthorized for now') trinfo_constraint = ('X wf_info_for Y, Y require_permission P, P name "read"') - rqlst = parse('Any T WHERE T wf_info_for X') + rqlst = parse(u'Any T WHERE T wf_info_for X') rewrite(rqlst, {('T', 'X'): (trinfo_constraint, 'X in_group G, G name "managers"')}, {}) self.assertEqual(rqlst.as_string(), u'XXX dunno what should be generated') def test_add_ambiguity_exists(self): constraint = ('X concerne Y') - rqlst = parse('Affaire X') + rqlst = parse(u'Affaire X') rewrite(rqlst, {('X', 'X'): (constraint,)}, {}) self.assertEqual(rqlst.as_string(), u"Any X WHERE X is Affaire, ((EXISTS(X concerne A, A is Division)) OR (EXISTS(X concerne C, C is Societe))) OR (EXISTS(X concerne B, B is Note))") def test_add_ambiguity_outerjoin(self): constraint = ('X concerne Y') - rqlst = parse('Any X,C WHERE X? documented_by C') + rqlst = parse(u'Any X,C WHERE X? documented_by C') rewrite(rqlst, {('X', 'X'): (constraint,)}, {}) # ambiguity are kept in the sub-query, no need to be resolved using OR self.assertEqual(rqlst.as_string(), @@ -385,76 +387,76 @@ def test_rrqlexpr_nonexistant_subject_1(self): constraint = RRQLExpression('S owned_by U') - rqlst = parse('Card C') + rqlst = parse(u'Card C') rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'SU') self.assertEqual(rqlst.as_string(), u"Any C WHERE C is Card, A eid %(B)s, EXISTS(C owned_by A)") - rqlst = parse('Card C') + rqlst = parse(u'Card C') rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'OU') self.assertEqual(rqlst.as_string(), u"Any C WHERE C is Card") - rqlst = parse('Card C') + rqlst = parse(u'Card C') rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'SOU') self.assertEqual(rqlst.as_string(), u"Any C WHERE C is Card, A eid %(B)s, EXISTS(C owned_by A)") def test_rrqlexpr_nonexistant_subject_2(self): constraint = RRQLExpression('S owned_by U, O owned_by U, O is Card') - rqlst = parse('Card C') + rqlst = parse(u'Card C') rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'SU') self.assertEqual(rqlst.as_string(), 'Any C WHERE C is Card, A eid %(B)s, EXISTS(C owned_by A)') - rqlst = parse('Card C') + rqlst = parse(u'Card C') rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'OU') self.assertEqual(rqlst.as_string(), 'Any C WHERE C is Card, B eid %(D)s, EXISTS(A owned_by B, A is Card)') - rqlst = parse('Card C') + rqlst = parse(u'Card C') rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'SOU') self.assertEqual(rqlst.as_string(), 'Any C WHERE C is Card, A eid %(B)s, EXISTS(C owned_by A, D owned_by A, D is Card)') def test_rrqlexpr_nonexistant_subject_3(self): constraint = RRQLExpression('U in_group G, G name "users"') - rqlst = parse('Card C') + rqlst = parse(u'Card C') rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'SU') self.assertEqual(rqlst.as_string(), u'Any C WHERE C is Card, A eid %(B)s, EXISTS(A in_group D, D name "users", D is CWGroup)') def test_rrqlexpr_nonexistant_subject_4(self): constraint = RRQLExpression('U in_group G, G name "users", S owned_by U') - rqlst = parse('Card C') + rqlst = parse(u'Card C') rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'SU') self.assertEqual(rqlst.as_string(), u'Any C WHERE C is Card, A eid %(B)s, EXISTS(A in_group D, D name "users", C owned_by A, D is CWGroup)') - rqlst = parse('Card C') + rqlst = parse(u'Card C') rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'OU') self.assertEqual(rqlst.as_string(), u'Any C WHERE C is Card, A eid %(B)s, EXISTS(A in_group D, D name "users", D is CWGroup)') def test_rrqlexpr_nonexistant_subject_5(self): constraint = RRQLExpression('S owned_by Z, O owned_by Z, O is Card') - rqlst = parse('Card C') + rqlst = parse(u'Card C') rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'S') self.assertEqual(rqlst.as_string(), u"Any C WHERE C is Card, EXISTS(C owned_by A, A is CWUser)") def test_rqlexpr_not_relation_1_1(self): constraint = ERQLExpression('X owned_by Z, Z login "hop"', 'X') - rqlst = parse('Affaire A WHERE NOT EXISTS(A documented_by C)') + rqlst = parse(u'Affaire A WHERE NOT EXISTS(A documented_by C)') rewrite(rqlst, {('C', 'X'): (constraint,)}, {}, 'X') self.assertEqual(rqlst.as_string(), u'Any A WHERE NOT EXISTS(A documented_by C, EXISTS(C owned_by B, B login "hop", B is CWUser), C is Card), A is Affaire') def test_rqlexpr_not_relation_1_2(self): constraint = ERQLExpression('X owned_by Z, Z login "hop"', 'X') - rqlst = parse('Affaire A WHERE NOT EXISTS(A documented_by C)') + rqlst = parse(u'Affaire A WHERE NOT EXISTS(A documented_by C)') rewrite(rqlst, {('A', 'X'): (constraint,)}, {}, 'X') self.assertEqual(rqlst.as_string(), u'Any A WHERE NOT EXISTS(A documented_by C, C is Card), A is Affaire, EXISTS(A owned_by B, B login "hop", B is CWUser)') def test_rqlexpr_not_relation_2(self): constraint = ERQLExpression('X owned_by Z, Z login "hop"', 'X') - rqlst = rqlhelper.parse('Affaire A WHERE NOT A documented_by C', annotate=False) + rqlst = rqlhelper.parse(u'Affaire A WHERE NOT A documented_by C', annotate=False) rewrite(rqlst, {('C', 'X'): (constraint,)}, {}, 'X') self.assertEqual(rqlst.as_string(), u'Any A WHERE NOT EXISTS(A documented_by C, EXISTS(C owned_by B, B login "hop", B is CWUser), C is Card), A is Affaire') @@ -463,7 +465,7 @@ c1 = ERQLExpression('X owned_by Z, Z login "hop"', 'X') c2 = ERQLExpression('X owned_by Z, Z login "hip"', 'X') c3 = ERQLExpression('X owned_by Z, Z login "momo"', 'X') - rqlst = rqlhelper.parse('Any A WHERE A documented_by C?', annotate=False) + rqlst = rqlhelper.parse(u'Any A WHERE A documented_by C?', annotate=False) rewrite(rqlst, {('C', 'X'): (c1, c2, c3)}, {}, 'X') self.assertEqual(rqlst.as_string(), u'Any A WHERE A documented_by C?, A is Affaire ' @@ -484,12 +486,12 @@ # 4. this variable require a rewrite c_bad = ERQLExpression('X documented_by R, A in_state R') - rqlst = parse('Any A, R WHERE A ref R, S is Affaire') + rqlst = parse(u'Any A, R WHERE A ref R, S is Affaire') rewrite(rqlst, {('A', 'X'): (c_ok, c_bad)}, {}) def test_nonregr_is_instance_of(self): user_expr = ERQLExpression('NOT X in_group AF, AF name "guests"') - rqlst = parse('Any O WHERE S use_email O, S is CWUser, O is_instance_of EmailAddress') + rqlst = parse(u'Any O WHERE S use_email O, S is CWUser, O is_instance_of EmailAddress') rewrite(rqlst, {('S', 'X'): (user_expr,)}, {}) self.assertEqual(rqlst.as_string(), 'Any O WHERE S use_email O, S is CWUser, O is EmailAddress, ' @@ -600,7 +602,7 @@ # Basic tests def test_base_rule(self): rules = {'participated_in': 'S contributor O'} - rqlst = rqlhelper.parse('Any X WHERE X participated_in S') + rqlst = rqlhelper.parse(u'Any X WHERE X participated_in S') rule_rewrite(rqlst, rules) self.assertEqual('Any X WHERE X contributor S', rqlst.as_string()) @@ -609,7 +611,7 @@ rules = {'illustrator_of': ('C is Contribution, C contributor S, ' 'C manifestation O, C role R, ' 'R name "illustrator"')} - rqlst = rqlhelper.parse('Any A,B WHERE A illustrator_of B') + rqlst = rqlhelper.parse(u'Any A,B WHERE A illustrator_of B') rule_rewrite(rqlst, rules) self.assertEqual('Any A,B WHERE C is Contribution, ' 'C contributor A, C manifestation B, ' @@ -620,7 +622,7 @@ rules = {'illustrator_of': ('C is Contribution, C contributor S, ' 'C manifestation O, C role R, ' 'R name "illustrator"')} - rqlst = rqlhelper.parse('Any A WHERE EXISTS(A illustrator_of B)') + rqlst = rqlhelper.parse(u'Any A WHERE EXISTS(A illustrator_of B)') rule_rewrite(rqlst, rules) self.assertEqual('Any A WHERE EXISTS(C is Contribution, ' 'C contributor A, C manifestation B, ' @@ -631,7 +633,7 @@ def test_rewrite2(self): rules = {'illustrator_of': 'C is Contribution, C contributor S, ' 'C manifestation O, C role R, R name "illustrator"'} - rqlst = rqlhelper.parse('Any A,B WHERE A illustrator_of B, C require_permission R, S' + rqlst = rqlhelper.parse(u'Any A,B WHERE A illustrator_of B, C require_permission R, S' 'require_state O') rule_rewrite(rqlst, rules) self.assertEqual('Any A,B WHERE C require_permission R, S require_state O, ' @@ -642,7 +644,7 @@ def test_rewrite3(self): rules = {'illustrator_of': 'C is Contribution, C contributor S, ' 'C manifestation O, C role R, R name "illustrator"'} - rqlst = rqlhelper.parse('Any A,B WHERE E require_permission T, A illustrator_of B') + rqlst = rqlhelper.parse(u'Any A,B WHERE E require_permission T, A illustrator_of B') rule_rewrite(rqlst, rules) self.assertEqual('Any A,B WHERE E require_permission T, ' 'C is Contribution, C contributor A, C manifestation B, ' @@ -652,7 +654,7 @@ def test_rewrite4(self): rules = {'illustrator_of': 'C is Contribution, C contributor S, ' 'C manifestation O, C role R, R name "illustrator"'} - rqlst = rqlhelper.parse('Any A,B WHERE C require_permission R, A illustrator_of B') + rqlst = rqlhelper.parse(u'Any A,B WHERE C require_permission R, A illustrator_of B') rule_rewrite(rqlst, rules) self.assertEqual('Any A,B WHERE C require_permission R, ' 'D is Contribution, D contributor A, D manifestation B, ' @@ -662,7 +664,7 @@ def test_rewrite5(self): rules = {'illustrator_of': 'C is Contribution, C contributor S, ' 'C manifestation O, C role R, R name "illustrator"'} - rqlst = rqlhelper.parse('Any A,B WHERE C require_permission R, A illustrator_of B, ' + rqlst = rqlhelper.parse(u'Any A,B WHERE C require_permission R, A illustrator_of B, ' 'S require_state O') rule_rewrite(rqlst, rules) self.assertEqual('Any A,B WHERE C require_permission R, S require_state O, ' @@ -674,7 +676,7 @@ def test_rewrite_with(self): rules = {'illustrator_of': 'C is Contribution, C contributor S, ' 'C manifestation O, C role R, R name "illustrator"'} - rqlst = rqlhelper.parse('Any A,B WITH A, B BEING(Any X, Y WHERE X illustrator_of Y)') + rqlst = rqlhelper.parse(u'Any A,B WITH A, B BEING(Any X, Y WHERE X illustrator_of Y)') rule_rewrite(rqlst, rules) self.assertEqual('Any A,B WITH A,B BEING ' '(Any X,Y WHERE A is Contribution, A contributor X, ' @@ -684,7 +686,7 @@ def test_rewrite_with2(self): rules = {'illustrator_of': 'C is Contribution, C contributor S, ' 'C manifestation O, C role R, R name "illustrator"'} - rqlst = rqlhelper.parse('Any A,B WHERE T require_permission C WITH A, B BEING(Any X, Y WHERE X illustrator_of Y)') + rqlst = rqlhelper.parse(u'Any A,B WHERE T require_permission C WITH A, B BEING(Any X, Y WHERE X illustrator_of Y)') rule_rewrite(rqlst, rules) self.assertEqual('Any A,B WHERE T require_permission C ' 'WITH A,B BEING (Any X,Y WHERE A is Contribution, ' @@ -693,7 +695,7 @@ def test_rewrite_with3(self): rules = {'participated_in': 'S contributor O'} - rqlst = rqlhelper.parse('Any A,B WHERE A participated_in B ' + rqlst = rqlhelper.parse(u'Any A,B WHERE A participated_in B ' 'WITH A, B BEING(Any X,Y WHERE X contributor Y)') rule_rewrite(rqlst, rules) self.assertEqual('Any A,B WHERE A contributor B WITH A,B BEING ' @@ -703,7 +705,7 @@ def test_rewrite_with4(self): rules = {'illustrator_of': 'C is Contribution, C contributor S, ' 'C manifestation O, C role R, R name "illustrator"'} - rqlst = rqlhelper.parse('Any A,B WHERE A illustrator_of B ' + rqlst = rqlhelper.parse(u'Any A,B WHERE A illustrator_of B ' 'WITH A, B BEING(Any X, Y WHERE X illustrator_of Y)') rule_rewrite(rqlst, rules) self.assertEqual('Any A,B WHERE C is Contribution, ' @@ -717,7 +719,7 @@ def test_rewrite_union(self): rules = {'illustrator_of': 'C is Contribution, C contributor S, ' 'C manifestation O, C role R, R name "illustrator"'} - rqlst = rqlhelper.parse('(Any A,B WHERE A illustrator_of B) UNION' + rqlst = rqlhelper.parse(u'(Any A,B WHERE A illustrator_of B) UNION' '(Any X,Y WHERE X is CWUser, Z manifestation Y)') rule_rewrite(rqlst, rules) self.assertEqual('(Any A,B WHERE C is Contribution, ' @@ -728,7 +730,7 @@ def test_rewrite_union2(self): rules = {'illustrator_of': 'C is Contribution, C contributor S, ' 'C manifestation O, C role R, R name "illustrator"'} - rqlst = rqlhelper.parse('(Any Y WHERE Y match W) UNION ' + rqlst = rqlhelper.parse(u'(Any Y WHERE Y match W) UNION ' '(Any A WHERE A illustrator_of B) UNION ' '(Any Y WHERE Y is ArtWork)') rule_rewrite(rqlst, rules) @@ -742,7 +744,7 @@ def test_rewrite_exists(self): rules = {'illustrator_of': 'C is Contribution, C contributor S, ' 'C manifestation O, C role R, R name "illustrator"'} - rqlst = rqlhelper.parse('(Any A,B WHERE A illustrator_of B, ' + rqlst = rqlhelper.parse(u'(Any A,B WHERE A illustrator_of B, ' 'EXISTS(B is ArtWork))') rule_rewrite(rqlst, rules) self.assertEqual('Any A,B WHERE EXISTS(B is ArtWork), ' @@ -753,7 +755,7 @@ def test_rewrite_exists2(self): rules = {'illustrator_of': 'C is Contribution, C contributor S, ' 'C manifestation O, C role R, R name "illustrator"'} - rqlst = rqlhelper.parse('(Any A,B WHERE B contributor A, EXISTS(A illustrator_of W))') + rqlst = rqlhelper.parse(u'(Any A,B WHERE B contributor A, EXISTS(A illustrator_of W))') rule_rewrite(rqlst, rules) self.assertEqual('Any A,B WHERE B contributor A, ' 'EXISTS(C is Contribution, C contributor A, C manifestation W, ' @@ -763,7 +765,7 @@ def test_rewrite_exists3(self): rules = {'illustrator_of': 'C is Contribution, C contributor S, ' 'C manifestation O, C role R, R name "illustrator"'} - rqlst = rqlhelper.parse('(Any A,B WHERE A illustrator_of B, EXISTS(A illustrator_of W))') + rqlst = rqlhelper.parse(u'(Any A,B WHERE A illustrator_of B, EXISTS(A illustrator_of W))') rule_rewrite(rqlst, rules) self.assertEqual('Any A,B WHERE EXISTS(C is Contribution, C contributor A, ' 'C manifestation W, C role D, D name "illustrator"), ' @@ -774,7 +776,7 @@ # Test for GROUPBY def test_rewrite_groupby(self): rules = {'participated_in': 'S contributor O'} - rqlst = rqlhelper.parse('Any SUM(SA) GROUPBY S WHERE P participated_in S, P manifestation SA') + rqlst = rqlhelper.parse(u'Any SUM(SA) GROUPBY S WHERE P participated_in S, P manifestation SA') rule_rewrite(rqlst, rules) self.assertEqual('Any SUM(SA) GROUPBY S WHERE P manifestation SA, P contributor S', rqlst.as_string()) diff -r a4fcee1e9789 -r 19fcce6dc6d1 test/unittest_rset.py --- a/test/unittest_rset.py Thu Mar 24 09:43:25 2016 +0100 +++ b/test/unittest_rset.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,8 +18,9 @@ # with CubicWeb. If not, see . """unit tests for module cubicweb.utils""" -from urlparse import urlsplit -import pickle +from six import string_types +from six.moves import cPickle as pickle +from six.moves.urllib.parse import urlsplit from rql import parse @@ -100,7 +101,11 @@ def test_pickle(self): del self.rset.req - self.assertEqual(len(pickle.dumps(self.rset)), 376) + rs2 = pickle.loads(pickle.dumps(self.rset)) + self.assertEqual(self.rset.rows, rs2.rows) + self.assertEqual(self.rset.rowcount, rs2.rowcount) + self.assertEqual(self.rset.rql, rs2.rql) + self.assertEqual(self.rset.description, rs2.description) def test_build_url(self): with self.admin_access.web_request() as req: @@ -274,13 +279,13 @@ """make sure syntax tree is cached""" rqlst1 = self.rset.syntax_tree() rqlst2 = self.rset.syntax_tree() - self.assert_(rqlst1 is rqlst2) + self.assertIs(rqlst1, rqlst2) def test_get_entity_simple(self): with self.admin_access.web_request() as req: req.create_entity('CWUser', login=u'adim', upassword='adim', surname=u'di mascio', firstname=u'adrien') - req.cnx.drop_entity_cache() + req.drop_entity_cache() e = req.execute('Any X,T WHERE X login "adim", X surname T').get_entity(0, 0) self.assertEqual(e.cw_attr_cache['surname'], 'di mascio') self.assertRaises(KeyError, e.cw_attr_cache.__getitem__, 'firstname') @@ -293,7 +298,7 @@ def test_get_entity_advanced(self): with self.admin_access.web_request() as req: req.create_entity('Bookmark', title=u'zou', path=u'/view') - req.cnx.drop_entity_cache() + req.drop_entity_cache() req.execute('SET X bookmarked_by Y WHERE X is Bookmark, Y login "anon"') rset = req.execute('Any X,Y,XT,YN WHERE X bookmarked_by Y, X title XT, Y login YN') @@ -340,7 +345,7 @@ e = rset.get_entity(0, 0) self.assertEqual(e.cw_attr_cache['title'], 'zou') self.assertEqual(pprelcachedict(e._cw_related_cache), - [('created_by_subject', [req.user.eid])]) + [('created_by_subject', [req.user.eid])]) # first level of recursion u = e.created_by[0] self.assertEqual(u.cw_attr_cache['login'], 'admin') @@ -369,7 +374,7 @@ def test_get_entity_union(self): with self.admin_access.web_request() as req: e = req.create_entity('Bookmark', title=u'manger', path=u'path') - req.cnx.drop_entity_cache() + req.drop_entity_cache() rset = req.execute('Any X,N ORDERBY N WITH X,N BEING ' '((Any X,N WHERE X is Bookmark, X title N)' ' UNION ' @@ -550,19 +555,32 @@ def test_str(self): with self.admin_access.web_request() as req: rset = req.execute('(Any X,N WHERE X is CWGroup, X name N)') - self.assertIsInstance(str(rset), basestring) + self.assertIsInstance(str(rset), string_types) self.assertEqual(len(str(rset).splitlines()), 1) def test_repr(self): with self.admin_access.web_request() as req: rset = req.execute('(Any X,N WHERE X is CWGroup, X name N)') - self.assertIsInstance(repr(rset), basestring) + self.assertIsInstance(repr(rset), string_types) self.assertTrue(len(repr(rset).splitlines()) > 1) rset = req.execute('(Any X WHERE X is CWGroup, X name "managers")') - self.assertIsInstance(str(rset), basestring) + self.assertIsInstance(str(rset), string_types) self.assertEqual(len(str(rset).splitlines()), 1) + def test_slice(self): + rs = ResultSet([[12000, 'adim', u'Adim chez les pinguins'], + [12000, 'adim', u'Jardiner facile'], + [13000, 'syt', u'Le carrelage en 42 leçons'], + [14000, 'nico', u'La tarte tatin en 15 minutes'], + [14000, 'nico', u"L'épluchage du castor commun"]], + 'Any U, L, T WHERE U is CWUser, U login L,'\ + 'D created_by U, D title T', + description=[['CWUser', 'String', 'String']] * 5) + self.assertEqual(rs[1::2], + [[12000, 'adim', u'Jardiner facile'], + [14000, 'nico', u'La tarte tatin en 15 minutes']]) + def test_nonregr_symmetric_relation(self): # see https://www.cubicweb.org/ticket/4739253 with self.admin_access.client_cnx() as cnx: diff -r a4fcee1e9789 -r 19fcce6dc6d1 test/unittest_schema.py --- a/test/unittest_schema.py Thu Mar 24 09:43:25 2016 +0100 +++ b/test/unittest_schema.py Thu Jun 16 14:19:20 2016 +0200 @@ -105,10 +105,9 @@ # # isinstance(cstr, RQLConstraint) # -> expected to return RQLConstraint instances but not - # RRQLVocabularyConstraint and QLUniqueConstraint + # RQLVocabularyConstraint and RQLUniqueConstraint self.assertFalse(issubclass(RQLUniqueConstraint, RQLVocabularyConstraint)) self.assertFalse(issubclass(RQLUniqueConstraint, RQLConstraint)) - self.assertTrue(issubclass(RQLConstraint, RQLVocabularyConstraint)) def test_entity_perms(self): self.assertEqual(eperson.get_groups('read'), set(('managers', 'users', 'guests'))) @@ -158,7 +157,7 @@ def test_knownValues_load_schema(self): schema = loader.load(config) - self.assert_(isinstance(schema, CubicWebSchema)) + self.assertIsInstance(schema, CubicWebSchema) self.assertEqual(schema.name, 'data') entities = sorted([str(e) for e in schema.entities()]) expected_entities = ['Ami', 'BaseTransition', 'BigInt', 'Bookmark', 'Boolean', 'Bytes', 'Card', @@ -179,7 +178,7 @@ self.assertListEqual(sorted(expected_entities), entities) relations = sorted([str(r) for r in schema.relations()]) expected_relations = ['actionnaire', 'add_permission', 'address', 'alias', 'allowed_transition', 'associe', - 'bookmarked_by', 'by_transition', + 'bookmarked_by', 'by_transition', 'buddies', 'cardinality', 'comment', 'comment_format', 'composite', 'condition', 'config', 'connait', @@ -226,7 +225,7 @@ eschema = schema.eschema('CWUser') rels = sorted(str(r) for r in eschema.subject_relations()) - self.assertListEqual(rels, ['created_by', 'creation_date', 'custom_workflow', + self.assertListEqual(rels, ['buddies', 'created_by', 'creation_date', 'custom_workflow', 'cw_source', 'cwuri', 'eid', 'evaluee', 'firstname', 'has_group_permission', 'has_text', 'identity', @@ -236,7 +235,7 @@ 'primary_email', 'surname', 'upassword', 'use_email']) rels = sorted(r.type for r in eschema.object_relations()) - self.assertListEqual(rels, ['bookmarked_by', 'created_by', 'for_user', + self.assertListEqual(rels, ['bookmarked_by', 'buddies', 'created_by', 'for_user', 'identity', 'owned_by', 'wf_info_for']) rschema = schema.rschema('relation_type') properties = rschema.rdef('CWAttribute', 'CWRType') @@ -273,11 +272,13 @@ config = TestConfiguration('data', apphome=join(dirname(__file__), 'data_schemareader')) config.bootstrap_cubes() schema = loader.load(config) - self.assertEqual(schema['in_group'].rdefs.values()[0].permissions, + rdef = next(iter(schema['in_group'].rdefs.values())) + self.assertEqual(rdef.permissions, {'read': ('managers',), 'add': ('managers',), 'delete': ('managers',)}) - self.assertEqual(schema['cw_for_source'].rdefs.values()[0].permissions, + rdef = next(iter(schema['cw_for_source'].rdefs.values())) + self.assertEqual(rdef.permissions, {'read': ('managers', 'users'), 'add': ('managers',), 'delete': ('managers',)}) @@ -355,11 +356,11 @@ # check object/subject type self.assertEqual([('Person','Service')], - schema['produces_and_buys'].rdefs.keys()) + list(schema['produces_and_buys'].rdefs.keys())) self.assertEqual([('Person','Service')], - schema['produces_and_buys2'].rdefs.keys()) + list(schema['produces_and_buys2'].rdefs.keys())) self.assertCountEqual([('Company', 'Service'), ('Person', 'Service')], - schema['reproduce'].rdefs.keys()) + list(schema['reproduce'].rdefs.keys())) # check relation definitions are marked infered rdef = schema['produces_and_buys'].rdefs[('Person','Service')] self.assertTrue(rdef.infered) @@ -426,7 +427,9 @@ def test(self): self.assertEqual(normalize_expression('X bla Y,Y blur Z , Z zigoulou X '), - 'X bla Y, Y blur Z, Z zigoulou X') + 'X bla Y, Y blur Z, Z zigoulou X') + self.assertEqual(normalize_expression('X bla Y, Y name "x,y"'), + 'X bla Y, Y name "x,y"') class RQLExpressionTC(TestCase): @@ -553,7 +556,7 @@ self.set_description('composite rdefs for %s' % etype) yield self.assertEqual, self.composites[etype], \ sorted([(r.rtype.type, r.subject.type, r.object.type, role) - for r, role in sorted(schema[etype].composite_rdef_roles)]) + for r, role in schema[etype].composite_rdef_roles]) if __name__ == '__main__': diff -r a4fcee1e9789 -r 19fcce6dc6d1 test/unittest_uilib.py --- a/test/unittest_uilib.py Thu Mar 24 09:43:25 2016 +0100 +++ b/test/unittest_uilib.py Thu Jun 16 14:19:20 2016 +0200 @@ -200,4 +200,3 @@ if __name__ == '__main__': unittest_main() - diff -r a4fcee1e9789 -r 19fcce6dc6d1 test/unittest_utils.py --- a/test/unittest_utils.py Thu Mar 24 09:43:25 2016 +0100 +++ b/test/unittest_utils.py Thu Jun 16 14:19:20 2016 +0200 @@ -21,12 +21,13 @@ import decimal import datetime +from six.moves import range from logilab.common.testlib import TestCase, DocTest, unittest_main from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.utils import (make_uid, UStringIO, SizeConstrainedList, - RepeatList, HTMLHead, QueryCache, parse_repo_uri) +from cubicweb.utils import (make_uid, UStringIO, RepeatList, HTMLHead, + QueryCache, parse_repo_uri) from cubicweb.entity import Entity try: @@ -67,7 +68,7 @@ def test_querycache(self): c = QueryCache(ceiling=20) # write only - for x in xrange(10): + for x in range(10): c[x] = x self.assertEqual(c._usage_report(), {'transientcount': 0, @@ -75,7 +76,7 @@ 'permanentcount': 0}) c = QueryCache(ceiling=10) # we should also get a warning - for x in xrange(20): + for x in range(20): c[x] = x self.assertEqual(c._usage_report(), {'transientcount': 0, @@ -83,8 +84,8 @@ 'permanentcount': 0}) # write + reads c = QueryCache(ceiling=20) - for n in xrange(4): - for x in xrange(10): + for n in range(4): + for x in range(10): c[x] = x c[x] self.assertEqual(c._usage_report(), @@ -92,8 +93,8 @@ 'itemcount': 10, 'permanentcount': 0}) c = QueryCache(ceiling=20) - for n in xrange(17): - for x in xrange(10): + for n in range(17): + for x in range(10): c[x] = x c[x] self.assertEqual(c._usage_report(), @@ -101,8 +102,8 @@ 'itemcount': 10, 'permanentcount': 10}) c = QueryCache(ceiling=20) - for n in xrange(17): - for x in xrange(10): + for n in range(17): + for x in range(10): c[x] = x if n % 2: c[x] @@ -115,7 +116,7 @@ class UStringIOTC(TestCase): def test_boolean_value(self): - self.assert_(UStringIO()) + self.assertTrue(UStringIO()) class RepeatListTC(TestCase): @@ -165,25 +166,6 @@ self.assertEqual(l, [(1, 3)]*2) -class SizeConstrainedListTC(TestCase): - - def test_append(self): - l = SizeConstrainedList(10) - for i in xrange(12): - l.append(i) - self.assertEqual(l, range(2, 12)) - - def test_extend(self): - testdata = [(range(5), range(5)), - (range(10), range(10)), - (range(12), range(2, 12)), - ] - for extension, expected in testdata: - l = SizeConstrainedList(10) - l.extend(extension) - yield self.assertEqual, l, expected - - class JSONEncoderTC(TestCase): def setUp(self): if json is None: diff -r a4fcee1e9789 -r 19fcce6dc6d1 toolsutils.py --- a/toolsutils.py Thu Mar 24 09:43:25 2016 +0100 +++ b/toolsutils.py Thu Jun 16 14:19:20 2016 +0200 @@ -16,11 +16,13 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . """some utilities for cubicweb command line tools""" +from __future__ import print_function __docformat__ = "restructuredtext en" # XXX move most of this in logilab.common (shellutils ?) +import io import os, sys import subprocess from os import listdir, makedirs, environ, chmod, walk, remove @@ -37,6 +39,8 @@ def symlink(*args): raise NotImplementedError +from six import add_metaclass + from logilab.common.clcommands import Command as BaseCommand from logilab.common.shellutils import ASK @@ -62,29 +66,29 @@ """create a directory if it doesn't exist yet""" try: makedirs(directory) - print '-> created directory %s' % directory + print('-> created directory %s' % directory) except OSError as ex: import errno if ex.errno != errno.EEXIST: raise - print '-> no need to create existing directory %s' % directory + print('-> no need to create existing directory %s' % directory) def create_symlink(source, target): """create a symbolic link""" if exists(target): remove(target) symlink(source, target) - print '[symlink] %s <-- %s' % (target, source) + print('[symlink] %s <-- %s' % (target, source)) def create_copy(source, target): import shutil - print '[copy] %s <-- %s' % (target, source) + print('[copy] %s <-- %s' % (target, source)) shutil.copy2(source, target) def rm(whatever): import shutil shutil.rmtree(whatever) - print '-> removed %s' % whatever + print('-> removed %s' % whatever) def show_diffs(appl_file, ref_file, askconfirm=True): """interactivly replace the old file with the new file according to @@ -95,8 +99,8 @@ diffs = pipe.stdout.read() if diffs: if askconfirm: - print - print diffs + print() + print(diffs) action = ASK.ask('Replace ?', ('Y', 'n', 'q'), 'Y').lower() else: action = 'y' @@ -106,17 +110,17 @@ except IOError: os.system('chmod a+w %s' % appl_file) shutil.copyfile(ref_file, appl_file) - print 'replaced' + print('replaced') elif action == 'q': sys.exit(0) else: copy_file = appl_file + '.default' - copy = file(copy_file, 'w') + copy = open(copy_file, 'w') copy.write(open(ref_file).read()) copy.close() - print 'keep current version, the new file has been written to', copy_file + print('keep current version, the new file has been written to', copy_file) else: - print 'no diff between %s and %s' % (appl_file, ref_file) + print('no diff between %s and %s' % (appl_file, ref_file)) SKEL_EXCLUDE = ('*.py[co]', '*.orig', '*~', '*_flymake.py') def copy_skeleton(skeldir, targetdir, context, @@ -143,25 +147,25 @@ if not askconfirm or not exists(tfpath) or \ ASK.confirm('%s exists, overwrite?' % tfpath): fill_templated_file(fpath, tfpath, context) - print '[generate] %s <-- %s' % (tfpath, fpath) + print('[generate] %s <-- %s' % (tfpath, fpath)) elif exists(tfpath): show_diffs(tfpath, fpath, askconfirm) else: shutil.copyfile(fpath, tfpath) def fill_templated_file(fpath, tfpath, context): - fobj = file(tfpath, 'w') - templated = file(fpath).read() - fobj.write(templated % context) - fobj.close() + with io.open(fpath, encoding='ascii') as fobj: + template = fobj.read() + with io.open(tfpath, 'w', encoding='ascii') as fobj: + fobj.write(template % context) def restrict_perms_to_user(filepath, log=None): """set -rw------- permission on the given file""" if log: log('set permissions to 0600 for %s', filepath) else: - print '-> set permissions to 0600 for %s' % filepath - chmod(filepath, 0600) + print('-> set permissions to 0600 for %s' % filepath) + chmod(filepath, 0o600) def read_config(config_file, raise_if_unreadable=False): """read some simple configuration from `config_file` and return it as a @@ -209,12 +213,13 @@ return cls +@add_metaclass(metacmdhandler) class CommandHandler(object): """configuration specific helper for cubicweb-ctl commands""" - __metaclass__ = metacmdhandler def __init__(self, config): self.config = config + class Command(BaseCommand): """base class for cubicweb-ctl commands""" @@ -234,7 +239,7 @@ raise ConfigurationError(msg) def fail(self, reason): - print "command failed:", reason + print("command failed:", reason) sys.exit(1) diff -r a4fcee1e9789 -r 19fcce6dc6d1 tox.ini --- a/tox.ini Thu Mar 24 09:43:25 2016 +0100 +++ b/tox.ini Thu Jun 16 14:19:20 2016 +0200 @@ -1,50 +1,26 @@ [tox] -env = py27 +envlist = cubicweb,dataimport,devtools,entities,etwist,ext,hooks,server,sobjects,web,wsgi [testenv] sitepackages = True -commands = pytest -t {envname}/test {posargs} +deps = + cubicweb: -r{toxinidir}/test/requirements.txt + devtools: -r{toxinidir}/devtools/test/requirements.txt + entities: -r{toxinidir}/entities/test/requirements.txt + etwist: -r{toxinidir}/etwist/test/requirements.txt + ext: -r{toxinidir}/ext/test/requirements.txt + hooks: -r{toxinidir}/hooks/test/requirements.txt + server: -r{toxinidir}/server/test/requirements.txt + sobjects: -r{toxinidir}/sobjects/test/requirements.txt + web: -r{toxinidir}/web/test/requirements.txt + wsgi: -r{toxinidir}/wsgi/test/requirements.txt +commands = + {envpython} -c 'from logilab.common import pytest; pytest.run()' -t {toxinidir}/{envname}/test {posargs} [testenv:cubicweb] -deps = - -r{toxinidir}/test/requirements.txt -commands = pytest -t test {posargs} - -[testenv:dataimport] - -[testenv:devtools] -deps = - -r{toxinidir}/devtools/test/requirements.txt - -[testenv:entities] -deps = - -r{toxinidir}/entities/test/requirements.txt - -[testenv:etwist] -deps = - -r{toxinidir}/etwist/test/requirements.txt - -[testenv:ext] -deps = - -r{toxinidir}/ext/test/requirements.txt - -[testenv:hooks] - -[testenv:server] -deps = - -r{toxinidir}/server/test/requirements.txt - -[testenv:sobjects] -deps = - -r{toxinidir}/sobjects/test/requirements.txt - -[testenv:web] -deps = - -r{toxinidir}/web/test/requirements.txt - -[testenv:wsgi] -deps = - -r{toxinidir}/wsgi/test/requirements.txt +commands = + {envpython} -m pip install --upgrade --no-deps --quiet git+git://github.com/logilab/yapps@master#egg=yapps + {envpython} -c 'from logilab.common import pytest; pytest.run()' -t {toxinidir}/test {posargs} [testenv:doc] changedir = doc diff -r a4fcee1e9789 -r 19fcce6dc6d1 transaction.py --- a/transaction.py Thu Mar 24 09:43:25 2016 +0100 +++ b/transaction.py Thu Jun 16 14:19:20 2016 +0200 @@ -17,7 +17,7 @@ # with CubicWeb. If not, see . """ undoable transaction objects. """ __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from cubicweb import RepositoryError diff -r a4fcee1e9789 -r 19fcce6dc6d1 uilib.py --- a/uilib.py Thu Mar 24 09:43:25 2016 +0100 +++ b/uilib.py Thu Jun 16 14:19:20 2016 +0200 @@ -26,12 +26,15 @@ import csv import re -from StringIO import StringIO +from io import StringIO + +from six import PY2, PY3, text_type, binary_type, string_types, integer_types from logilab.mtconverter import xml_escape, html_unescape from logilab.common.date import ustrftime from logilab.common.deprecation import deprecated +from cubicweb import _ from cubicweb.utils import js_dumps @@ -62,7 +65,7 @@ return value def print_int(value, req, props, displaytime=True): - return unicode(value) + return text_type(value) def print_date(value, req, props, displaytime=True): return ustrftime(value, req.property_value('ui.date-format')) @@ -92,7 +95,7 @@ _('%d seconds') def print_timedelta(value, req, props, displaytime=True): - if isinstance(value, (int, long)): + if isinstance(value, integer_types): # `date - date`, unlike `datetime - datetime` gives an int # (number of days), not a timedelta # XXX should rql be fixed to return Int instead of Interval in @@ -122,7 +125,7 @@ return req._('no') def print_float(value, req, props, displaytime=True): - return unicode(req.property_value('ui.float-format') % value) + return text_type(req.property_value('ui.float-format') % value) # XXX cast needed ? PRINTERS = { 'Bytes': print_bytes, @@ -337,9 +340,8 @@ def __unicode__(self): if self.parent: return u'%s.%s' % (self.parent, self.id) - return unicode(self.id) - def __str__(self): - return unicode(self).encode('utf8') + return text_type(self.id) + __str__ = __unicode__ if PY3 else lambda self: self.__unicode__().encode('utf-8') def __getattr__(self, attr): return _JSId(attr, self) def __call__(self, *args): @@ -357,6 +359,7 @@ if self.parent: return u'%s(%s)' % (self.parent, ','.join(args)) return ','.join(args) + __str__ = __unicode__ if PY3 else lambda self: self.__unicode__().encode('utf-8') class _JS(object): def __getattr__(self, attr): @@ -389,7 +392,7 @@ 'img', 'area', 'input', 'col')) def sgml_attributes(attrs): - return u' '.join(u'%s="%s"' % (attr, xml_escape(unicode(value))) + return u' '.join(u'%s="%s"' % (attr, xml_escape(text_type(value))) for attr, value in sorted(attrs.items()) if value is not None) @@ -407,7 +410,7 @@ value += u' ' + sgml_attributes(attrs) if content: if escapecontent: - content = xml_escape(unicode(content)) + content = xml_escape(text_type(content)) value += u'>%s' % (content, tag) else: if tag in HTML4_EMPTY_TAGS: @@ -436,8 +439,8 @@ stream = StringIO() #UStringIO() don't want unicode assertion formater.format(layout, stream) res = stream.getvalue() - if isinstance(res, str): - res = unicode(res, 'UTF8') + if isinstance(res, binary_type): + res = res.decode('UTF8') return res # traceback formatting ######################################################## @@ -445,14 +448,17 @@ import traceback def exc_message(ex, encoding): - try: - excmsg = unicode(ex) - except Exception: + if PY3: + excmsg = str(ex) + else: try: - excmsg = unicode(str(ex), encoding, 'replace') + excmsg = unicode(ex) except Exception: - excmsg = unicode(repr(ex), encoding, 'replace') - exctype = unicode(ex.__class__.__name__) + try: + excmsg = unicode(str(ex), encoding, 'replace') + except Exception: + excmsg = unicode(repr(ex), encoding, 'replace') + exctype = ex.__class__.__name__ return u'%s: %s' % (exctype, excmsg) @@ -462,7 +468,10 @@ for stackentry in traceback.extract_tb(info[2]): res.append(u'\tFile %s, line %s, function %s' % tuple(stackentry[:3])) if stackentry[3]: - res.append(u'\t %s' % stackentry[3].decode('utf-8', 'replace')) + data = xml_escape(stackentry[3]) + if PY2: + data = data.decode('utf-8', 'replace') + res.append(u'\t %s' % data) res.append(u'\n') try: res.append(u'\t Error: %s\n' % exception) @@ -496,14 +505,16 @@ u'%s:
'%( xml_escape(stackentry[0]), stackentry[1], xml_escape(stackentry[2]))) if stackentry[3]: - string = xml_escape(stackentry[3]).decode('utf-8', 'replace') + string = xml_escape(stackentry[3]) + if PY2: + string = string.decode('utf-8', 'replace') strings.append(u'  %s
\n' % (string)) # add locals info for each entry try: local_context = tcbk.tb_frame.f_locals html_info = [] chars = 0 - for name, value in local_context.iteritems(): + for name, value in local_context.items(): value = xml_escape(repr(value)) info = u'%s=%s, ' % (name, value) line_length = len(name) + len(value) @@ -526,7 +537,9 @@ # csv files / unicode support ################################################# class UnicodeCSVWriter: - """proxies calls to csv.writer.writerow to be able to deal with unicode""" + """proxies calls to csv.writer.writerow to be able to deal with unicode + + Under Python 3, this code no longer encodes anything.""" def __init__(self, wfunc, encoding, **kwargs): self.writer = csv.writer(self, **kwargs) @@ -537,9 +550,12 @@ self.wfunc(data) def writerow(self, row): + if PY3: + self.writer.writerow(row) + return csvrow = [] for elt in row: - if isinstance(elt, unicode): + if isinstance(elt, text_type): csvrow.append(elt.encode(self.encoding)) else: csvrow.append(str(elt)) @@ -559,7 +575,7 @@ def __call__(self, function): def newfunc(*args, **kwargs): ret = function(*args, **kwargs) - if isinstance(ret, basestring): + if isinstance(ret, string_types): return ret[:self.maxsize] return ret return newfunc @@ -568,6 +584,6 @@ def htmlescape(function): def newfunc(*args, **kwargs): ret = function(*args, **kwargs) - assert isinstance(ret, basestring) + assert isinstance(ret, string_types) return xml_escape(ret) return newfunc diff -r a4fcee1e9789 -r 19fcce6dc6d1 utils.py --- a/utils.py Thu Mar 24 09:43:25 2016 +0100 +++ b/utils.py Thu Jun 16 14:19:20 2016 +0200 @@ -33,9 +33,10 @@ from uuid import uuid4 from warnings import warn from threading import Lock -from urlparse import urlparse +from logging import getLogger -from logging import getLogger +from six import text_type +from six.moves.urllib.parse import urlparse from logilab.mtconverter import xml_escape from logilab.common.deprecation import deprecated @@ -100,7 +101,7 @@ """ def __init__(self, w, tag, closetag=None): self.written = False - self.tag = unicode(tag) + self.tag = text_type(tag) self.closetag = closetag self.w = w @@ -116,7 +117,7 @@ def __exit__(self, exctype, value, traceback): if self.written is True: if self.closetag: - self.w(unicode(self.closetag)) + self.w(text_type(self.closetag)) else: self.w(self.tag.replace('<', '>> l = SizeConstrainedList(2) - >>> l.append(1) - >>> l.append(2) - >>> l - [1, 2] - >>> l.append(3) - >>> l - [2, 3] - """ - def __init__(self, maxsize): - self.maxsize = maxsize - - def append(self, element): - if len(self) == self.maxsize: - del self[0] - super(SizeConstrainedList, self).append(element) - - def extend(self, sequence): - super(SizeConstrainedList, self).extend(sequence) - keepafter = len(self) - self.maxsize - if keepafter > 0: - del self[:keepafter] - - __iadd__ = extend - - class RepeatList(object): """fake a list with the same element in each row""" __slots__ = ('_size', '_item') @@ -185,13 +153,13 @@ def __iter__(self): return repeat(self._item, self._size) def __getitem__(self, index): + if isinstance(index, slice): + # XXX could be more efficient, but do we bother? + return ([self._item] * self._size)[index] return self._item def __delitem__(self, idc): assert self._size > 0 self._size -= 1 - def __getslice__(self, i, j): - # XXX could be more efficient, but do we bother? - return ([self._item] * self._size)[i:j] def __add__(self, other): if isinstance(other, RepeatList): if other._item == self._item: @@ -208,8 +176,10 @@ if isinstance(other, RepeatList): return other._size == self._size and other._item == self._item return self[:] == other - # py3k future warning "Overriding __eq__ blocks inheritance of __hash__ in 3.x" - # is annoying but won't go away because we don't want to hash() the repeatlist + def __ne__(self, other): + return not (self == other) + def __hash__(self): + raise NotImplementedError def pop(self, i): self._size -= 1 @@ -223,11 +193,13 @@ self.tracewrites = tracewrites super(UStringIO, self).__init__(*args, **kwargs) - def __nonzero__(self): + def __bool__(self): return True + __nonzero__ = __bool__ + def write(self, value): - assert isinstance(value, unicode), u"unicode required not %s : %s"\ + assert isinstance(value, text_type), u"unicode required not %s : %s"\ % (type(value).__name__, repr(value)) if self.tracewrites: from traceback import format_stack @@ -553,9 +525,9 @@ def _dict2js(d, predictable=False): if predictable: - it = sorted(d.iteritems()) + it = sorted(d.items()) else: - it = d.iteritems() + it = d.items() res = [key + ': ' + js_dumps(val, predictable) for key, val in it] return '{%s}' % ', '.join(res) diff -r a4fcee1e9789 -r 19fcce6dc6d1 view.py --- a/view.py Thu Mar 24 09:43:25 2016 +0100 +++ b/view.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,12 +18,14 @@ """abstract views and templates classes for CubicWeb web client""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from io import BytesIO from warnings import warn from functools import partial +from six.moves import range + from logilab.common.deprecation import deprecated from logilab.common.registry import yes from logilab.mtconverter import xml_escape @@ -173,7 +175,7 @@ # specific view if rset.rowcount != 1: kwargs.setdefault('initargs', self.cw_extra_kwargs) - for i in xrange(len(rset)): + for i in range(len(rset)): if wrap: self.w(u'
') self.wview(self.__regid__, rset, row=i, **kwargs) @@ -213,7 +215,7 @@ return self._cw.build_url('view', vid=self.__regid__) coltypes = rset.column_types(0) if len(coltypes) == 1: - etype = iter(coltypes).next() + etype = next(iter(coltypes)) if not self._cw.vreg.schema.eschema(etype).final: if len(rset) == 1: entity = rset.get_entity(0, 0) @@ -281,7 +283,7 @@ else : etypes = rset.column_types(0) if len(etypes) == 1: - etype = iter(etypes).next() + etype = next(iter(etypes)) clabel = display_name(self._cw, etype, 'plural') else : clabel = u'#[*] (%s)' % vtitle @@ -394,7 +396,7 @@ if rset is None: rset = self.cw_rset = self._cw.execute(self.startup_rql()) if rset: - for i in xrange(len(rset)): + for i in range(len(rset)): self.wview(self.__regid__, rset, row=i, **kwargs) else: self.no_entities(**kwargs) diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/__init__.py --- a/web/__init__.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/__init__.py Thu Jun 16 14:19:20 2016 +0200 @@ -20,10 +20,9 @@ """ __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ -from urllib import quote as urlquote - +from six.moves.urllib.parse import quote as urlquote from logilab.common.deprecation import deprecated from cubicweb.web._exceptions import * diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/_exceptions.py --- a/web/_exceptions.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/_exceptions.py Thu Jun 16 14:19:20 2016 +0200 @@ -20,7 +20,7 @@ __docformat__ = "restructuredtext en" -import httplib +from six.moves import http_client from cubicweb._exceptions import * from cubicweb.utils import json_dumps @@ -41,7 +41,7 @@ """base class for publishing related exception""" def __init__(self, *args, **kwargs): - self.status = kwargs.pop('status', httplib.OK) + self.status = kwargs.pop('status', http_client.OK) super(PublishException, self).__init__(*args, **kwargs) class LogOut(PublishException): @@ -52,7 +52,7 @@ class Redirect(PublishException): """raised to redirect the http request""" - def __init__(self, location, status=httplib.SEE_OTHER): + def __init__(self, location, status=http_client.SEE_OTHER): super(Redirect, self).__init__(status=status) self.location = location @@ -71,7 +71,7 @@ """raised when a request can't be served because of a bad input""" def __init__(self, *args, **kwargs): - kwargs.setdefault('status', httplib.BAD_REQUEST) + kwargs.setdefault('status', http_client.BAD_REQUEST) super(RequestError, self).__init__(*args, **kwargs) @@ -79,14 +79,14 @@ """raised when an edit request doesn't specify any eid to edit""" def __init__(self, *args, **kwargs): - kwargs.setdefault('status', httplib.BAD_REQUEST) + kwargs.setdefault('status', http_client.BAD_REQUEST) super(NothingToEdit, self).__init__(*args, **kwargs) class ProcessFormError(RequestError): """raised when posted data can't be processed by the corresponding field """ def __init__(self, *args, **kwargs): - kwargs.setdefault('status', httplib.BAD_REQUEST) + kwargs.setdefault('status', http_client.BAD_REQUEST) super(ProcessFormError, self).__init__(*args, **kwargs) class NotFound(RequestError): @@ -94,13 +94,13 @@ a 404 error should be returned""" def __init__(self, *args, **kwargs): - kwargs.setdefault('status', httplib.NOT_FOUND) + kwargs.setdefault('status', http_client.NOT_FOUND) super(NotFound, self).__init__(*args, **kwargs) class RemoteCallFailed(RequestError): """raised when a json remote call fails """ - def __init__(self, reason='', status=httplib.INTERNAL_SERVER_ERROR): + def __init__(self, reason='', status=http_client.INTERNAL_SERVER_ERROR): super(RemoteCallFailed, self).__init__(reason, status=status) self.reason = reason diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/action.py --- a/web/action.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/action.py Thu Jun 16 14:19:20 2016 +0200 @@ -33,7 +33,7 @@ """ __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from cubicweb import target from cubicweb.predicates import (partial_relation_possible, match_search_state, @@ -91,7 +91,7 @@ """base class for actions consisting to create a new object with an initial relation set to an entity. - Additionaly to EntityAction behaviour, this class is parametrized using + Additionally to EntityAction behaviour, this class is parametrized using .rtype, .role and .target_etype attributes to check if the action apply and if the logged user has access to it (see :class:`~cubicweb.selectors.partial_relation_possible` selector @@ -111,4 +111,3 @@ return self._cw.vreg["etypes"].etype_class(ttype).cw_create_url(self._cw, __redirectpath=entity.rest_path(), __linkto=linkto, __redirectvid=self._cw.form.get('__redirectvid', '')) - diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/application.py --- a/web/application.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/application.py Thu Jun 16 14:19:20 2016 +0200 @@ -25,7 +25,8 @@ from warnings import warn import json -import httplib +from six import text_type, binary_type +from six.moves import http_client from logilab.common.deprecation import deprecated @@ -68,8 +69,8 @@ def __init__(self, appli): self.repo = appli.repo self.vreg = appli.vreg - self.session_manager = self.vreg['components'].select('sessionmanager', - repo=self.repo) + self.session_manager = self.vreg['sessions'].select('sessionmanager', + repo=self.repo) global SESSION_MANAGER SESSION_MANAGER = self.session_manager if self.vreg.config.mode != 'test': @@ -80,8 +81,8 @@ def reset_session_manager(self): data = self.session_manager.dump_data() - self.session_manager = self.vreg['components'].select('sessionmanager', - repo=self.repo) + self.session_manager = self.vreg['sessions'].select('sessionmanager', + repo=self.repo) self.session_manager.restore_data(data) global SESSION_MANAGER SESSION_MANAGER = self.session_manager @@ -256,7 +257,7 @@ # activate realm-based auth realm = self.vreg.config['realm'] req.set_header('WWW-Authenticate', [('Basic', {'realm' : realm })], raw=False) - content = '' + content = b'' try: try: session = self.get_session(req) @@ -290,7 +291,7 @@ if self.vreg.config['auth-mode'] == 'cookie' and ex.url: req.headers_out.setHeader('location', str(ex.url)) if ex.status is not None: - req.status_out = httplib.SEE_OTHER + req.status_out = http_client.SEE_OTHER # When the authentification is handled by http we must # explicitly ask for authentification to flush current http # authentification information @@ -310,23 +311,24 @@ # the request does not use https, redirect to login form https_url = self.vreg.config['https-url'] if https_url and req.base_url() != https_url: - req.status_out = httplib.SEE_OTHER + req.status_out = http_client.SEE_OTHER req.headers_out.setHeader('location', https_url + 'login') else: # We assume here that in http auth mode the user *May* provide # Authentification Credential if asked kindly. if self.vreg.config['auth-mode'] == 'http': - req.status_out = httplib.UNAUTHORIZED + req.status_out = http_client.UNAUTHORIZED # In the other case (coky auth) we assume that there is no way # for the user to provide them... # XXX But WHY ? else: - req.status_out = httplib.FORBIDDEN + req.status_out = http_client.FORBIDDEN # If previous error handling already generated a custom content # do not overwrite it. This is used by LogOut Except # XXX ensure we don't actually serve content if not content: content = self.need_login_content(req) + assert isinstance(content, binary_type) return content @@ -368,7 +370,7 @@ except cors.CORSPreflight: # Return directly an empty 200 req.status_out = 200 - result = '' + result = b'' except StatusResponse as ex: warn('[3.16] StatusResponse is deprecated use req.status_out', DeprecationWarning, stacklevel=2) @@ -394,12 +396,12 @@ except Unauthorized as ex: req.data['errmsg'] = req._('You\'re not authorized to access this page. ' 'If you think you should, please contact the site administrator.') - req.status_out = httplib.FORBIDDEN + req.status_out = http_client.FORBIDDEN result = self.error_handler(req, ex, tb=False) except Forbidden as ex: req.data['errmsg'] = req._('This action is forbidden. ' 'If you think it should be allowed, please contact the site administrator.') - req.status_out = httplib.FORBIDDEN + req.status_out = http_client.FORBIDDEN result = self.error_handler(req, ex, tb=False) except (BadRQLQuery, RequestError) as ex: result = self.error_handler(req, ex, tb=False) @@ -413,7 +415,7 @@ raise ### Last defense line except BaseException as ex: - req.status_out = httplib.INTERNAL_SERVER_ERROR + req.status_out = http_client.INTERNAL_SERVER_ERROR result = self.error_handler(req, ex, tb=True) finally: if req.cnx and not commited: @@ -437,7 +439,7 @@ req.headers_out.setHeader('location', str(ex.location)) assert 300 <= ex.status < 400 req.status_out = ex.status - return '' + return b'' def validation_error_handler(self, req, ex): ex.translate(req._) # translate messages using ui language @@ -453,9 +455,9 @@ # messages. location = req.form['__errorurl'].rsplit('#', 1)[0] req.headers_out.setHeader('location', str(location)) - req.status_out = httplib.SEE_OTHER - return '' - req.status_out = httplib.CONFLICT + req.status_out = http_client.SEE_OTHER + return b'' + req.status_out = http_client.CONFLICT return self.error_handler(req, ex, tb=False) def error_handler(self, req, ex, tb=False): @@ -491,14 +493,14 @@ def ajax_error_handler(self, req, ex): req.set_header('content-type', 'application/json') - status = httplib.INTERNAL_SERVER_ERROR + status = http_client.INTERNAL_SERVER_ERROR if isinstance(ex, PublishException) and ex.status is not None: status = ex.status if req.status_out < 400: # don't overwrite it if it's already set req.status_out = status - json_dumper = getattr(ex, 'dumps', lambda : json.dumps({'reason': unicode(ex)})) - return json_dumper() + json_dumper = getattr(ex, 'dumps', lambda : json.dumps({'reason': text_type(ex)})) + return json_dumper().encode('utf-8') # special case handling diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/box.py --- a/web/box.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/box.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,7 +18,9 @@ """abstract box classes for CubicWeb web client""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ + +from six import add_metaclass from logilab.mtconverter import xml_escape from logilab.common.deprecation import class_deprecated, class_renamed @@ -41,7 +43,7 @@ actions_by_cat.setdefault(action.category, []).append( (action.title, action) ) for key, values in actions_by_cat.items(): - actions_by_cat[key] = [act for title, act in sorted(values)] + actions_by_cat[key] = [act for title, act in sorted(values, key=lambda x: x[0])] if categories_in_order: for cat in categories_in_order: if cat in actions_by_cat: @@ -53,6 +55,7 @@ # old box system, deprecated ################################################### +@add_metaclass(class_deprecated) class BoxTemplate(View): """base template for boxes, usually a (contextual) list of possible actions. Various classes attributes may be used to control the box @@ -66,7 +69,6 @@ box.render(self.w) """ - __metaclass__ = class_deprecated __deprecation_warning__ = '[3.10] *BoxTemplate classes are deprecated, use *CtxComponent instead (%(cls)s)' __registry__ = 'ctxcomponents' @@ -193,4 +195,3 @@ AjaxEditRelationBoxTemplate = class_renamed( 'AjaxEditRelationBoxTemplate', AjaxEditRelationCtxComponent, '[3.10] AjaxEditRelationBoxTemplate has been renamed to AjaxEditRelationCtxComponent (%(cls)s)') - diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/captcha.py --- a/web/captcha.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/captcha.py Thu Jun 16 14:19:20 2016 +0200 @@ -22,7 +22,9 @@ __docformat__ = "restructuredtext en" from random import randint, choice -from cStringIO import StringIO +from io import BytesIO + +from six.moves import range from PIL import Image, ImageFont, ImageDraw, ImageFilter @@ -51,7 +53,7 @@ draw = ImageDraw.Draw(img) # draw 100 random colored boxes on the background x, y = img.size - for num in xrange(100): + for num in range(100): draw.rectangle((randint(0, x), randint(0, y), randint(0, x), randint(0, y)), fill=randint(0, 0xffffff)) @@ -67,7 +69,7 @@ """ text = u''.join(choice('QWERTYUOPASDFGHJKLZXCVBNM') for i in range(size)) img = pil_captcha(text, fontfile, fontsize) - out = StringIO() + out = BytesIO() img.save(out, format) out.seek(0) return text, out diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/component.py --- a/web/component.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/component.py Thu Jun 16 14:19:20 2016 +0200 @@ -20,10 +20,12 @@ """ __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from warnings import warn +from six import PY3, add_metaclass, text_type + from logilab.common.deprecation import class_deprecated, class_renamed, deprecated from logilab.mtconverter import xml_escape @@ -69,10 +71,13 @@ except AttributeError: page_size = self.cw_extra_kwargs.get('page_size') if page_size is None: - if 'page_size' in self._cw.form: - page_size = int(self._cw.form['page_size']) - else: - page_size = self._cw.property_value(self.page_size_property) + try: + page_size = int(self._cw.form.get('page_size')) + except (ValueError, TypeError): + # no or invalid value, fall back + pass + if page_size is None: + page_size = self._cw.property_value(self.page_size_property) self._page_size = page_size return page_size @@ -215,6 +220,9 @@ def __unicode__(self): return tags.a(self.label, href=self.href, **self.attrs) + if PY3: + __str__ = __unicode__ + def render(self, w): w(tags.a(self.label, href=self.href, **self.attrs)) @@ -425,7 +433,7 @@ @property def domid(self): - return domid(self.__regid__) + unicode(self.entity.eid) + return domid(self.__regid__) + text_type(self.entity.eid) def lazy_view_holder(self, w, entity, oid, registry='views'): """add a holder and return a URL that may be used to replace this @@ -498,7 +506,7 @@ args['subject'], args['object']) return u'[%s] %s' % ( - xml_escape(unicode(jscall)), label, etarget.view('incontext')) + xml_escape(text_type(jscall)), label, etarget.view('incontext')) def related_boxitems(self, entity): return [self.box_item(entity, etarget, 'delete_relation', u'-') @@ -515,7 +523,7 @@ """returns the list of unrelated entities, using the entity's appropriate vocabulary function """ - skip = set(unicode(e.eid) for e in entity.related(self.rtype, role(self), + skip = set(text_type(e.eid) for e in entity.related(self.rtype, role(self), entities=True)) skip.add(None) skip.add(INTERNAL_FIELD_VALUE) @@ -571,7 +579,7 @@ # to be defined in concrete classes rtype = role = target_etype = None - # class attributes below *must* be set in concrete classes (additionaly to + # class attributes below *must* be set in concrete classes (additionally to # rtype / role [/ target_etype]. They should correspond to js_* methods on # the json controller @@ -633,7 +641,7 @@ if maydel: if not js_css_added: js_css_added = self.add_js_css() - jscall = unicode(js.ajaxBoxRemoveLinkedEntity( + jscall = text_type(js.ajaxBoxRemoveLinkedEntity( self.__regid__, entity.eid, rentity.eid, self.fname_remove, self.removed_msg and _(self.removed_msg))) @@ -648,7 +656,7 @@ if mayadd: multiple = self.rdef.role_cardinality(self.role) in '*+' w(u'' % xml_escape(colname)) self.w(u'\n') - diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/tabs.py --- a/web/views/tabs.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/tabs.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,7 +18,9 @@ """base classes to handle tabbed views""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ + +from six import string_types from logilab.common.deprecation import class_renamed from logilab.mtconverter import xml_escape @@ -114,7 +116,7 @@ active_tab = uilib.domid(default_tab) viewsvreg = self._cw.vreg['views'] for tab in tabs: - if isinstance(tab, basestring): + if isinstance(tab, string_types): tabid, tabkwargs = tab, {} else: tabid, tabkwargs = tab diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/timetable.py --- a/web/views/timetable.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/timetable.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,7 +18,9 @@ """html timetable views""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ + +from six.moves import range from logilab.mtconverter import xml_escape from logilab.common.date import ONEDAY, date_range, todatetime @@ -51,7 +53,7 @@ users = [] users_max = {} # XXX: try refactoring with calendar.py:OneMonthCal - for row in xrange(self.cw_rset.rowcount): + for row in range(self.cw_rset.rowcount): task = self.cw_rset.get_entity(row, 0) icalendarable = task.cw_adapt_to('ICalendarable') if len(self.cw_rset[row]) > 1 and self.cw_rset.description[row][1] == 'CWUser': @@ -88,7 +90,7 @@ rows = [] # colors here are class names defined in cubicweb.css - colors = ["col%x" % i for i in xrange(12)] + colors = ["col%x" % i for i in range(12)] next_color_index = 0 visited_tasks = {} # holds a description of a task for a user diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/treeview.py --- a/web/views/treeview.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/treeview.py Thu Jun 16 14:19:20 2016 +0200 @@ -20,7 +20,7 @@ """ __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from warnings import warn @@ -140,7 +140,7 @@ ajaxargs = json.loads(form.pop('morekwargs')) # got unicode & python keywords must be strings morekwargs.update(dict((str(k), v) - for k, v in ajaxargs.iteritems())) + for k, v in ajaxargs.items())) toplevel_thru_ajax = form.pop('treeview_top', False) or initial_thru_ajax toplevel = toplevel_thru_ajax or (initial_load and not form.get('fname')) return subvid, treeid, toplevel_thru_ajax, toplevel diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/uicfg.py --- a/web/views/uicfg.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/uicfg.py Thu Jun 16 14:19:20 2016 +0200 @@ -57,6 +57,8 @@ from warnings import warn +from six import string_types + from cubicweb import neg_role from cubicweb.rtags import (RelationTags, RelationTagsBool, RelationTagsSet, RelationTagsDict, NoTargetRelationTagsDict, @@ -267,7 +269,7 @@ if not 'inlined' in sectdict: sectdict['inlined'] = sectdict['main'] # recompute formsections and set it to avoid recomputing - for formtype, section in sectdict.iteritems(): + for formtype, section in sectdict.items(): formsections.add('%s_%s' % (formtype, section)) def tag_relation(self, key, formtype, section): @@ -302,7 +304,7 @@ rtags[section] = value cls = self.tag_container_cls rtags = cls('_'.join([section,value]) - for section,value in rtags.iteritems()) + for section,value in rtags.items()) return rtags def get(self, *key): @@ -650,7 +652,7 @@ self.tag_relation((sschema, rschema, oschema, role), True) def _tag_etype_attr(self, etype, attr, desttype='*', *args, **kwargs): - if isinstance(attr, basestring): + if isinstance(attr, string_types): attr, role = attr, 'subject' else: attr, role = attr @@ -687,5 +689,5 @@ def registration_callback(vreg): - vreg.register_all(globals().itervalues(), __name__) + vreg.register_all(globals().values(), __name__) indexview_etype_section.init(vreg.schema) diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/undohistory.py --- a/web/views/undohistory.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/undohistory.py Thu Jun 16 14:19:20 2016 +0200 @@ -17,7 +17,7 @@ # with CubicWeb. If not, see . __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from logilab.common.registry import Predicate @@ -46,7 +46,7 @@ def __str__(self): return '%s(%s)' % (self.__class__.__name__, ', '.join( - "%s=%v" % (str(k), str(v)) for k, v in kwargs.iteritems() )) + "%s=%v" % (str(k), str(v)) for k, v in kwargs.items() )) def __call__(self, cls, req, tx_action=None, **kwargs): # tx_action is expected to be a transaction.AbstractAction diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/urlpublishing.py --- a/web/views/urlpublishing.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/urlpublishing.py Thu Jun 16 14:19:20 2016 +0200 @@ -60,7 +60,7 @@ from rql import TypeResolverException from cubicweb import RegistryException -from cubicweb.web import NotFound, Redirect, component +from cubicweb.web import NotFound, Redirect, component, views class PathDontMatch(Exception): @@ -201,18 +201,14 @@ return self.handle_etype_attr(req, cls, attrname, value) return self.handle_etype(req, cls) - def set_vid_for_rset(self, req, cls, rset):# cls is there to ease overriding + def set_vid_for_rset(self, req, cls, rset): # cls is there to ease overriding if rset.rowcount == 0: raise NotFound() - # we've to set a default vid here, since vid_from_rset may try to use a - # table view if fetch_rql include some non final relation - if rset.rowcount == 1: - req.form.setdefault('vid', 'primary') - else: # rset.rowcount >= 1 - if len(rset.column_types(0)) > 1: - req.form.setdefault('vid', 'list') - else: - req.form.setdefault('vid', 'sameetypelist') + if 'vid' not in req.form: + # check_table=False tells vid_from_rset not to try to use a table view if fetch_rql + # include some non final relation + req.form['vid'] = views.vid_from_rset(req, rset, req.vreg.schema, + check_table=False) def handle_etype(self, req, cls): rset = req.execute(cls.fetch_rql(req.user)) diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/urlrewrite.py --- a/web/views/urlrewrite.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/urlrewrite.py Thu Jun 16 14:19:20 2016 +0200 @@ -19,6 +19,8 @@ import re +from six import string_types, add_metaclass + from cubicweb.uilib import domid from cubicweb.appobject import AppObject @@ -51,6 +53,7 @@ return super(metarewriter, mcs).__new__(mcs, name, bases, classdict) +@add_metaclass(metarewriter) class URLRewriter(AppObject): """Base class for URL rewriters. @@ -64,7 +67,6 @@ should be tried first. The higher the priority is, the earlier the rewriter will be tried. """ - __metaclass__ = metarewriter __registry__ = 'urlrewriting' __abstract__ = True priority = 1 @@ -122,14 +124,14 @@ required_groups = None if required_groups and not req.user.matching_groups(required_groups): continue - if isinstance(inputurl, basestring): + if isinstance(inputurl, string_types): if inputurl == uri: req.form.update(infos) break elif inputurl.match(uri): # it's a regexp # XXX what about i18n? (vtitle for instance) for param, value in infos.items(): - if isinstance(value, basestring): + if isinstance(value, string_types): req.form[param] = inputurl.sub(value, uri) else: req.form[param] = value @@ -222,7 +224,7 @@ required_groups = None if required_groups and not req.user.matching_groups(required_groups): continue - if isinstance(inputurl, basestring): + if isinstance(inputurl, string_types): if inputurl == uri: return callback(inputurl, uri, req, self._cw.vreg.schema) elif inputurl.match(uri): # it's a regexp diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/vcard.py --- a/web/views/vcard.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/vcard.py Thu Jun 16 14:19:20 2016 +0200 @@ -23,7 +23,7 @@ from cubicweb.predicates import is_instance from cubicweb.view import EntityView -_ = unicode +from cubicweb import _ VCARD_PHONE_TYPES = {'home': 'HOME', 'office': 'WORK', 'mobile': 'CELL', 'fax': 'FAX'} diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/wdoc.py --- a/web/views/wdoc.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/wdoc.py Thu Jun 16 14:19:20 2016 +0200 @@ -35,7 +35,7 @@ from cubicweb.view import StartupView from cubicweb.uilib import rest_publish from cubicweb.web import NotFound, action -_ = unicode +from cubicweb import _ # table of content management ################################################# @@ -73,7 +73,7 @@ def build_toc(config): alltocfiles = reversed(tuple(config.locate_all_files('toc.xml'))) - maintoc = parse(alltocfiles.next()).getroot() + maintoc = parse(next(alltocfiles)).getroot() maintoc.parent = None index = {} build_toc_index(maintoc, index) @@ -229,4 +229,3 @@ def url(self): return self._cw.build_url('doc/about') - diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/workflow.py --- a/web/views/workflow.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/workflow.py Thu Jun 16 14:19:20 2016 +0200 @@ -22,11 +22,13 @@ """ __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ import os from warnings import warn +from six import add_metaclass + from logilab.mtconverter import xml_escape from logilab.common.graph import escape from logilab.common.deprecation import class_deprecated @@ -116,7 +118,7 @@ 'changestate', self._cw, entity=entity, transition=transition, redirect_path=self.redirectpath(entity), **kwargs) trinfo = self._cw.vreg['etypes'].etype_class('TrInfo')(self._cw) - trinfo.eid = self._cw.varmaker.next() + trinfo.eid = next(self._cw.varmaker) subform = self._cw.vreg['forms'].select('edition', self._cw, entity=trinfo, mainform=False) subform.field_by_name('wf_info_for', 'subject').value = entity.eid @@ -429,8 +431,8 @@ return WorkflowDotPropsHandler(self._cw) +@add_metaclass(class_deprecated) class TmpPngView(TmpFileViewMixin, EntityView): - __metaclass__ = class_deprecated __deprecation_warning__ = '[3.18] %(cls)s is deprecated' __regid__ = 'tmppng' __select__ = match_form_params('tmpfile') diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/xbel.py --- a/web/views/xbel.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/xbel.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,7 +18,9 @@ """xbel views""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ + +from six.moves import range from logilab.mtconverter import xml_escape @@ -42,7 +44,7 @@ self.w(u'') self.w(u'') self.w(u'%s' % self._cw._('bookmarks')) - for i in xrange(self.cw_rset.rowcount): + for i in range(self.cw_rset.rowcount): self.cell_call(i, 0) self.w(u"") @@ -65,4 +67,3 @@ def url(self, entity): return entity.actual_url() - diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/xmlrss.py --- a/web/views/xmlrss.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/xmlrss.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,11 +18,13 @@ """base xml and rss views""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from base64 import b64encode from time import timezone +from six.moves import range + from logilab.mtconverter import xml_escape from cubicweb.predicates import (is_instance, non_final_entity, one_line_rset, @@ -64,7 +66,7 @@ """display a list of entities by calling their view""" self.w(u'\n' % self._cw.encoding) self.w(u'<%s size="%s">\n' % (self.xml_root, len(self.cw_rset))) - for i in xrange(self.cw_rset.rowcount): + for i in range(self.cw_rset.rowcount): self.cell_call(i, 0) self.w(u'\n' % self.xml_root) @@ -256,7 +258,7 @@ def call(self): """display a list of entities by calling their view""" self._open() - for i in xrange(self.cw_rset.rowcount): + for i in range(self.cw_rset.rowcount): self.cell_call(i, 0) self._close() diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/webconfig.py --- a/web/webconfig.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/webconfig.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,7 +18,7 @@ """web ui configuration for cubicweb instances""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ import os import hmac @@ -26,6 +26,8 @@ from os.path import join, exists, split, isdir from warnings import warn +from six import text_type + from logilab.common.decorators import cached, cachedproperty from logilab.common.deprecation import deprecated from logilab.common.configuration import merge_options @@ -280,18 +282,7 @@ continue yield key, pdef - # don't use @cached: we want to be able to disable it while this must still - # be cached - def repository(self, vreg=None): - """return the instance's repository object""" - try: - return self.__repo - except AttributeError: - from cubicweb.repoapi import get_repository - repo = get_repository(config=self, vreg=vreg) - self.__repo = repo - return repo - + @deprecated('[3.22] call req.cnx.repo.get_versions() directly') def vc_config(self): return self.repository().get_versions() @@ -305,7 +296,7 @@ user = self['anonymous-user'] or None passwd = self['anonymous-password'] if user: - user = unicode(user) + user = text_type(user) except KeyError: user, passwd = None, None except UnicodeDecodeError: @@ -317,17 +308,17 @@ """This random key/salt is used to sign content to be sent back by browsers, eg. in the error report form. """ - return str(uuid4()) + return str(uuid4()).encode('ascii') def sign_text(self, text): """sign some text for later checking""" # hmac.new expect bytes - if isinstance(text, unicode): + if isinstance(text, text_type): text = text.encode('utf-8') # replace \r\n so we do not depend on whether a browser "reencode" # original message using \r\n or not return hmac.new(self._instance_salt, - text.strip().replace('\r\n', '\n')).hexdigest() + text.strip().replace(b'\r\n', b'\n')).hexdigest() def check_text_sign(self, text, signature): """check the text signature is equal to the given signature""" @@ -472,7 +463,7 @@ staticdir = join(staticdir, rdir) if not isdir(staticdir) and 'w' in mode: os.makedirs(staticdir) - return file(join(staticdir, filename), mode) + return open(join(staticdir, filename), mode) def static_file_add(self, rpath, data): stream = self.static_file_open(rpath) diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/webctl.py --- a/web/webctl.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/webctl.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,6 +18,7 @@ """cubicweb-ctl commands and command handlers common to twisted/modpython web configuration """ +from __future__ import print_function __docformat__ = "restructuredtext en" @@ -44,7 +45,7 @@ def bootstrap(self, cubes, automatic=False, inputlevel=0): """bootstrap this configuration""" if not automatic: - print '\n' + underline_title('Generic web configuration') + print('\n' + underline_title('Generic web configuration')) config = self.config config.input_config('web', inputlevel) if ASK.confirm('Allow anonymous access ?', False): @@ -87,8 +88,8 @@ copy(osp.join(resource_dir, resource_path), dest_resource) # handle md5 version subdirectory linkdir(dest, osp.join(dest, config.instance_md5_version())) - print ('You can use apache rewrite rule below :\n' - 'RewriteRule ^/data/(.*) %s/$1 [L]' % dest) + print('You can use apache rewrite rule below :\n' + 'RewriteRule ^/data/(.*) %s/$1 [L]' % dest) def _datadirs(self, config, repo=None): if repo is None: diff -r a4fcee1e9789 -r 19fcce6dc6d1 wsgi/__init__.py --- a/wsgi/__init__.py Thu Mar 24 09:43:25 2016 +0100 +++ b/wsgi/__init__.py Thu Jun 16 14:19:20 2016 +0200 @@ -27,11 +27,9 @@ __docformat__ = "restructuredtext en" from email import message, message_from_string -from Cookie import SimpleCookie -from StringIO import StringIO -from cgi import parse_header from pprint import pformat as _pformat +from six.moves.http_cookies import SimpleCookie def pformat(obj): """pretty prints `obj` if possible""" diff -r a4fcee1e9789 -r 19fcce6dc6d1 wsgi/handler.py --- a/wsgi/handler.py Thu Mar 24 09:43:25 2016 +0100 +++ b/wsgi/handler.py Thu Jun 16 14:19:20 2016 +0200 @@ -19,7 +19,9 @@ __docformat__ = "restructuredtext en" -from itertools import chain, repeat, izip +from itertools import chain, repeat + +from six.moves import zip from cubicweb import AuthenticationError from cubicweb.web import DirectResponse @@ -78,7 +80,7 @@ def __init__(self, code, req, body=None): text = STATUS_CODE_TEXT.get(code, 'UNKNOWN STATUS CODE') self.status = '%s %s' % (code, text) - self.headers = list(chain(*[izip(repeat(k), v) + self.headers = list(chain(*[zip(repeat(k), v) for k, v in req.headers_out.getAllRawHeaders()])) self.headers = [(str(k), str(v)) for k, v in self.headers] if body: diff -r a4fcee1e9789 -r 19fcce6dc6d1 wsgi/request.py --- a/wsgi/request.py Thu Mar 24 09:43:25 2016 +0100 +++ b/wsgi/request.py Thu Jun 16 14:19:20 2016 +0200 @@ -27,13 +27,12 @@ import tempfile -from StringIO import StringIO -from urllib import quote -from urlparse import parse_qs -from warnings import warn +from io import BytesIO + +from six.moves.urllib.parse import parse_qs from cubicweb.multipart import ( - copy_file, parse_form_data, MultipartError, parse_options_header) + copy_file, parse_form_data, parse_options_header) from cubicweb.web import RequestError from cubicweb.web.request import CubicWebRequestBase from cubicweb.wsgi import pformat, normalize_header @@ -59,7 +58,7 @@ length = 0 # wsgi.input is not seekable, so copy the request contents to a temporary file if length < 100000: - self.content = StringIO() + self.content = BytesIO() else: self.content = tempfile.TemporaryFile() copy_file(environ['wsgi.input'], self.content, maxread=length) @@ -82,7 +81,7 @@ headers= headers_in) self.content = environ['wsgi.input'] if files is not None: - for key, part in files.iteritems(): + for key, part in files.items(): self.form[key] = (part.filename, part.file) def __repr__(self): @@ -149,15 +148,10 @@ if params is None: return encoding = self.encoding - for param, val in params.iteritems(): + for param, val in params.items(): if isinstance(val, (tuple, list)): - val = [ - unicode(x, encoding) if isinstance(x, str) else x - for x in val] if len(val) == 1: val = val[0] - elif isinstance(val, str): - val = unicode(val, encoding) if param in self.no_script_form_params and val: val = self.no_script_form_param(param, val) if param == '_cwmsgid': diff -r a4fcee1e9789 -r 19fcce6dc6d1 wsgi/test/unittest_wsgi.py --- a/wsgi/test/unittest_wsgi.py Thu Mar 24 09:43:25 2016 +0100 +++ b/wsgi/test/unittest_wsgi.py Thu Jun 16 14:19:20 2016 +0200 @@ -1,7 +1,7 @@ # encoding=utf-8 import webtest.app -from StringIO import StringIO +from io import BytesIO from cubicweb.devtools.webtest import CubicWebTestTC @@ -21,11 +21,11 @@ r = webtest.app.TestRequest.blank('/', { 'CONTENT_LENGTH': 12, 'CONTENT_TYPE': 'text/plain', - 'wsgi.input': StringIO('some content')}) + 'wsgi.input': BytesIO(b'some content')}) req = CubicWebWsgiRequest(r.environ, self.vreg) - self.assertEqual('some content', req.content.read()) + self.assertEqual(b'some content', req.content.read()) def test_http_scheme(self): r = webtest.app.TestRequest.blank('/', { @@ -52,11 +52,11 @@ self.assertTrue(req.https) def test_big_content(self): - content = 'x'*100001 + content = b'x'*100001 r = webtest.app.TestRequest.blank('/', { 'CONTENT_LENGTH': len(content), 'CONTENT_TYPE': 'text/plain', - 'wsgi.input': StringIO(content)}) + 'wsgi.input': BytesIO(content)}) req = CubicWebWsgiRequest(r.environ, self.vreg) @@ -94,14 +94,14 @@ def test_post_files(self): content_type, params = self.webapp.encode_multipart( - (), (('filefield', 'aname', 'acontent'),)) + (), (('filefield', 'aname', b'acontent'),)) r = webtest.app.TestRequest.blank( '/', POST=params, content_type=content_type) req = CubicWebWsgiRequest(r.environ, self.vreg) self.assertIn('filefield', req.form) fieldvalue = req.form['filefield'] self.assertEqual(u'aname', fieldvalue[0]) - self.assertEqual('acontent', fieldvalue[1].read()) + self.assertEqual(b'acontent', fieldvalue[1].read()) def test_post_unicode_urlencoded(self): params = 'arg=%C3%A9' @@ -110,8 +110,7 @@ req = CubicWebWsgiRequest(r.environ, self.vreg) self.assertEqual(u"é", req.form['arg']) - @classmethod - def init_config(cls, config): - super(WSGIAppTC, cls).init_config(config) - config.https_uiprops = None - config.https_datadir_url = None + +if __name__ == '__main__': + import unittest + unittest.main()
') - jscall = unicode(js.ajaxBoxShowSelector( + jscall = text_type(js.ajaxBoxShowSelector( self.__regid__, entity.eid, self.fname_vocabulary, self.fname_validate, self.added_msg and _(self.added_msg), _(stdmsgs.BUTTON_OK[0]), _(stdmsgs.BUTTON_CANCEL[0]), @@ -677,6 +685,7 @@ # old contextual components, deprecated ######################################## +@add_metaclass(class_deprecated) class EntityVComponent(Component): """abstract base class for additinal components displayed in content headers and footer according to: @@ -687,7 +696,6 @@ it should be configured using .accepts, .etype, .rtype, .target and .context class attributes """ - __metaclass__ = class_deprecated __deprecation_warning__ = '[3.10] *VComponent classes are deprecated, use *CtxComponent instead (%(cls)s)' __registry__ = 'ctxcomponents' diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/controller.py --- a/web/controller.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/controller.py Thu Jun 16 14:19:20 2016 +0200 @@ -19,6 +19,8 @@ __docformat__ = "restructuredtext en" +from six import PY2 + from logilab.mtconverter import xml_escape from logilab.common.registry import yes from logilab.common.deprecation import deprecated @@ -87,7 +89,7 @@ rql = req.form.get('rql') if rql: req.ensure_ro_rql(rql) - if not isinstance(rql, unicode): + if PY2 and not isinstance(rql, unicode): rql = unicode(rql, req.encoding) pp = req.vreg['components'].select_or_none('magicsearch', req) if pp is not None: @@ -132,8 +134,6 @@ newparams['_cwmsgid'] = self._cw.set_redirect_message(msg) if '__action_apply' in self._cw.form: self._return_to_edition_view(newparams) - if '__action_cancel' in self._cw.form: - self._return_to_lastpage(newparams) else: self._return_to_original_view(newparams) @@ -155,7 +155,7 @@ and '_cwmsgid' in newparams): # are we here on creation or modification? if any(eid == self._edited_entity.eid - for eid in self._cw.data.get('eidmap', {}).itervalues()): + for eid in self._cw.data.get('eidmap', {}).values()): msg = self._cw._('click here to see created entity') else: msg = self._cw._('click here to see edited entity') @@ -201,11 +201,9 @@ raise Redirect(self._cw.build_url(path, **newparams)) - def _return_to_lastpage(self, newparams): - """cancel-button case: in this case we are always expecting to go back - where we came from, and this is not easy. Currently we suppose that - __redirectpath is specifying that place if found, else we look in the - request breadcrumbs for the last visited page. + def _redirect(self, newparams): + """Raise a redirect. We use __redirectpath if it specified, else we + return to the home page. """ if '__redirectpath' in self._cw.form: # if redirect path was explicitly specified in the form, use it @@ -213,7 +211,7 @@ url = self._cw.build_url(path) url = append_url_params(url, self._cw.form.get('__redirectparams')) else: - url = self._cw.last_visited_page() + url = self._cw.base_url() # The newparams must update the params in all cases url = self._cw.rebuild_url(url, **newparams) raise Redirect(url) @@ -221,4 +219,3 @@ from cubicweb import set_log_methods set_log_methods(Controller, LOGGER) - diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/cors.py --- a/web/cors.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/cors.py Thu Jun 16 14:19:20 2016 +0200 @@ -14,7 +14,7 @@ """ -import urlparse +from six.moves.urllib.parse import urlsplit from cubicweb.web import LOGGER info = LOGGER.info @@ -37,7 +37,7 @@ In case of non-compliance, no CORS-related header is set. """ - base_url = urlparse.urlsplit(req.base_url()) + base_url = urlsplit(req.base_url()) expected_host = '://'.join((base_url.scheme, base_url.netloc)) if not req.get_header('Origin') or req.get_header('Origin') == expected_host: # not a CORS request, nothing to do @@ -50,7 +50,7 @@ process_preflight(req, config) else: # Simple CORS or actual request process_simple(req, config) - except CORSFailed, exc: + except CORSFailed as exc: info('Cross origin resource sharing failed: %s' % exc) except CORSPreflight: info('Cross origin resource sharing: valid Preflight request %s') @@ -101,7 +101,7 @@ if '*' not in allowed_origins and origin not in allowed_origins: raise CORSFailed('Origin is not allowed') # bit of sanity check; see "6.3 Security" - myhost = urlparse.urlsplit(req.base_url()).netloc + myhost = urlsplit(req.base_url()).netloc host = req.get_header('Host') if host != myhost: info('cross origin resource sharing detected possible ' @@ -111,4 +111,3 @@ # include "Vary: Origin" header (see 6.4) req.headers_out.addHeader('Vary', 'Origin') return origin - diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/data/cubicweb.edition.js --- a/web/data/cubicweb.edition.js Thu Mar 24 09:43:25 2016 +0100 +++ b/web/data/cubicweb.edition.js Thu Jun 16 14:19:20 2016 +0200 @@ -537,6 +537,24 @@ } /** + * Cancel the operations done on the given form. + * + */ +$(function () { + $(document).on('click', '.cwjs-edition-cancel', function (evt) { + var $mynode = $(evt.currentTarget), + $form = $mynode.closest('form'), + $error = $form.find(':input[name="__errorurl"]'), + errorurl = $error.attr('value'), + args = ajaxFuncArgs('cancel_edition', null, errorurl); + loadRemote(AJAX_BASE_URL, args, 'POST', true); + history.back(); + return false; + }); +}); + + +/** * .. function:: validateForm(formid, action, onsuccess, onfailure) * * called on traditionnal form submission : the idea is to try diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/data/cubicweb.goa.js --- a/web/data/cubicweb.goa.js Thu Mar 24 09:43:25 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,16 +0,0 @@ -/** - * functions specific to cubicweb on google appengine - * - * :organization: Logilab - * :copyright: 2008-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. - * :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr - */ - -/** - * .. function:: rql_for_eid(eid) - * - * overrides rql_for_eid function from htmlhelpers.hs - */ -function rql_for_eid(eid) { - return 'Any X WHERE X eid "' + eid + '"'; -} diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/data/cubicweb.widgets.js --- a/web/data/cubicweb.widgets.js Thu Mar 24 09:43:25 2016 +0100 +++ b/web/data/cubicweb.widgets.js Thu Jun 16 14:19:20 2016 +0200 @@ -25,6 +25,23 @@ return null; } +function renderJQueryDatePicker(subject, button_image, date_format, min_date, max_date){ + $widget = cw.jqNode(subject); + $widget.datepicker({buttonImage: button_image, dateFormat: date_format, + firstDay: 1, showOn: "button", buttonImageOnly: true, + minDate: min_date, maxDate: max_date}); + $widget.change(function(ev) { + maxOfId = $(this).data('max-of'); + if (maxOfId) { + cw.jqNode(maxOfId).datepicker("option", "maxDate", this.value); + } + minOfId = $(this).data('min-of'); + if (minOfId) { + cw.jqNode(minOfId).datepicker("option", "minDate", this.value); + } + }); +} + /** * .. function:: buildWidgets(root) * diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/facet.py --- a/web/facet.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/facet.py Thu Jun 16 14:19:20 2016 +0200 @@ -50,13 +50,15 @@ """ __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from functools import reduce from warnings import warn from copy import deepcopy from datetime import datetime, timedelta +from six import text_type, string_types + from logilab.mtconverter import xml_escape from logilab.common.graph import has_path from logilab.common.decorators import cached, cachedproperty @@ -80,7 +82,7 @@ ptypes = facet.cw_rset.column_types(0) if len(ptypes) == 1: return display_name(facet._cw, facet.rtype, form=facet.role, - context=iter(ptypes).next()) + context=next(iter(ptypes))) return display_name(facet._cw, facet.rtype, form=facet.role) def get_facet(req, facetid, select, filtered_variable): @@ -133,7 +135,7 @@ or the first variable selected in column 0 """ if mainvar is None: - vref = select.selection[0].iget_nodes(nodes.VariableRef).next() + vref = next(select.selection[0].iget_nodes(nodes.VariableRef)) return vref.variable return select.defined_vars[mainvar] @@ -156,7 +158,7 @@ for term in select.selection[:]: select.remove_selected(term) # remove unbound variables which only have some type restriction - for dvar in list(select.defined_vars.itervalues()): + for dvar in list(select.defined_vars.values()): if not (dvar is filtered_variable or dvar.stinfo['relations']): select.undefine_variable(dvar) # global tree config: DISTINCT, LIMIT, OFFSET @@ -305,7 +307,7 @@ # optional relation return ovar if all(rdef.cardinality[cardidx] in '1+' - for rdef in rschema.rdefs.itervalues()): + for rdef in rschema.rdefs.values()): # mandatory relation without any restriction on the other variable for orel in ovar.stinfo['relations']: if rel is orel: @@ -670,7 +672,7 @@ insert_attr_select_relation( select, self.filtered_variable, self.rtype, self.role, self.target_attr, select_target_entity=False) - values = [unicode(x) for x, in self.rqlexec(select.as_string())] + values = [text_type(x) for x, in self.rqlexec(select.as_string())] except Exception: self.exception('while computing values for %s', self) return [] @@ -719,14 +721,14 @@ def rset_vocabulary(self, rset): if self.i18nable: - _ = self._cw._ + tr = self._cw._ else: - _ = unicode + tr = text_type if self.rql_sort: - values = [(_(label), eid) for eid, label in rset] + values = [(tr(label), eid) for eid, label in rset] else: if self.label_vid is None: - values = [(_(label), eid) for eid, label in rset] + values = [(tr(label), eid) for eid, label in rset] else: values = [(entity.view(self.label_vid), entity.eid) for entity in rset.entities()] @@ -754,7 +756,7 @@ # XXX handle rel is None case in RQLPathFacet? if self.restr_attr != 'eid': self.select.set_distinct(True) - if isinstance(value, basestring): + if isinstance(value, string_types): # only one value selected if value: self.select.add_constant_restriction( @@ -808,7 +810,7 @@ rschema = self._cw.vreg.schema.rschema(self.rtype) # XXX when called via ajax, no rset to compute possible types possibletypes = self.cw_rset and self.cw_rset.column_types(0) - for rdef in rschema.rdefs.itervalues(): + for rdef in rschema.rdefs.values(): if possibletypes is not None: if self.role == 'subject': if rdef.subject not in possibletypes: @@ -829,13 +831,13 @@ if self._cw.vreg.schema.rschema(self.rtype).final: return False if self.role == 'object': - subj = utils.rqlvar_maker(defined=self.select.defined_vars, - aliases=self.select.aliases).next() + subj = next(utils.rqlvar_maker(defined=self.select.defined_vars, + aliases=self.select.aliases)) obj = self.filtered_variable.name else: subj = self.filtered_variable.name - obj = utils.rqlvar_maker(defined=self.select.defined_vars, - aliases=self.select.aliases).next() + obj = next(utils.rqlvar_maker(defined=self.select.defined_vars, + aliases=self.select.aliases)) restrictions = [] if self.select.where: restrictions.append(self.select.where.as_string()) @@ -916,15 +918,13 @@ def rset_vocabulary(self, rset): if self.i18nable: - _ = self._cw._ + tr = self._cw._ else: - _ = unicode + tr = text_type if self.rql_sort: - return [(_(value), value) for value, in rset] - values = [(_(value), value) for value, in rset] - if self.sortasc: - return sorted(values) - return reversed(sorted(values)) + return [(tr(value), value) for value, in rset] + values = [(tr(value), value) for value, in rset] + return sorted(values, reverse=not self.sortasc) class AttributeFacet(RelationAttributeFacet): @@ -1073,7 +1073,7 @@ assert self.path and isinstance(self.path, (list, tuple)), \ 'path should be a list of 3-uples, not %s' % self.path for part in self.path: - if isinstance(part, basestring): + if isinstance(part, string_types): part = part.split() assert len(part) == 3, \ 'path should be a list of 3-uples, not %s' % part @@ -1126,7 +1126,7 @@ cleanup_select(select, self.filtered_variable) varmap, restrvar = self.add_path_to_select(skiplabel=True) select.append_selected(nodes.VariableRef(restrvar)) - values = [unicode(x) for x, in self.rqlexec(select.as_string())] + values = [text_type(x) for x, in self.rqlexec(select.as_string())] except Exception: self.exception('while computing values for %s', self) return [] @@ -1149,7 +1149,7 @@ varmap = {'X': self.filtered_variable} actual_filter_variable = None for part in self.path: - if isinstance(part, basestring): + if isinstance(part, string_types): part = part.split() subject, rtype, object = part if skiplabel and object == self.label_variable: @@ -1165,7 +1165,7 @@ if len(attrtypes) > 1: raise Exception('ambigous attribute %s, specify attrtype on %s' % (rtype, self.__class__)) - self.restr_attr_type = iter(attrtypes).next() + self.restr_attr_type = next(iter(attrtypes)) if skipattrfilter: actual_filter_variable = subject continue @@ -1253,7 +1253,7 @@ rset = self._range_rset() if rset: minv, maxv = rset[0] - return [(unicode(minv), minv), (unicode(maxv), maxv)] + return [(text_type(minv), minv), (text_type(maxv), maxv)] return [] def possible_values(self): @@ -1272,7 +1272,7 @@ def formatvalue(self, value): """format `value` before in order to insert it in the RQL query""" - return unicode(value) + return text_type(value) def infvalue(self, min=False): if min: @@ -1373,7 +1373,7 @@ # *list* (see rqlexec implementation) if rset: minv, maxv = rset[0] - return [(unicode(minv), minv), (unicode(maxv), maxv)] + return [(text_type(minv), minv), (text_type(maxv), maxv)] return [] @@ -1392,7 +1392,7 @@ skiplabel=True, skipattrfilter=True) restrel = None for part in self.path: - if isinstance(part, basestring): + if isinstance(part, string_types): part = part.split() subject, rtype, object = part if object == self.filter_variable: @@ -1516,7 +1516,7 @@ if not val or val & mask]) def possible_values(self): - return [unicode(val) for label, val in self.vocabulary()] + return [text_type(val) for label, val in self.vocabulary()] ## html widets ################################################################ @@ -1595,7 +1595,7 @@ if selected: cssclass += ' facetValueSelected' w(u'
\n' - % (cssclass, xml_escape(unicode(value)))) + % (cssclass, xml_escape(text_type(value)))) # If it is overflowed one must add padding to compensate for the vertical # scrollbar; given current css values, 4 blanks work perfectly ... padding = u' ' * self.scrollbar_padding_factor if overflow else u'' @@ -1754,7 +1754,7 @@ imgsrc = self._cw.data_url(self.unselected_img) imgalt = self._cw._('not selected') w(u'
\n' - % (cssclass, xml_escape(unicode(self.value)))) + % (cssclass, xml_escape(text_type(self.value)))) w(u'
') w(u'%s ' % (imgsrc, imgalt)) w(u'' diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/form.py --- a/web/form.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/form.py Thu Jun 16 14:19:20 2016 +0200 @@ -20,6 +20,8 @@ from warnings import warn +from six import add_metaclass + from logilab.common.decorators import iclassmethod from logilab.common.deprecation import deprecated @@ -74,8 +76,8 @@ found """ +@add_metaclass(metafieldsform) class Form(AppObject): - __metaclass__ = metafieldsform __registry__ = 'forms' parent_form = None @@ -120,7 +122,7 @@ extrakw = {} # search for navigation parameters and customization of existing # attributes; remaining stuff goes in extrakwargs - for key, val in kwargs.iteritems(): + for key, val in kwargs.items(): if key in controller.NAV_FORM_PARAMETERS: hiddens.append( (key, val) ) elif key == 'redirect_path': @@ -280,4 +282,3 @@ def remaining_errors(self): return sorted(self.form_valerror.errors.items()) - diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/formfields.py --- a/web/formfields.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/formfields.py Thu Jun 16 14:19:20 2016 +0200 @@ -66,6 +66,8 @@ from warnings import warn from datetime import datetime, timedelta +from six import PY2, text_type, string_types + from logilab.mtconverter import xml_escape from logilab.common import nullobject from logilab.common.date import ustrftime @@ -159,7 +161,7 @@ :attr:`order` key used by automatic forms to sort fields :attr:`ignore_req_params` - when true, this field won't consider value potentialy specified using + when true, this field won't consider value potentially specified using request's form parameters (eg you won't be able to specify a value using for instance url like http://mywebsite.com/form?field=value) @@ -231,11 +233,14 @@ def __unicode__(self): return self.as_string(False) - def __str__(self): - return self.as_string(False).encode('UTF8') + if PY2: + def __str__(self): + return self.as_string(False).encode('UTF8') + else: + __str__ = __unicode__ def __repr__(self): - return self.as_string(True).encode('UTF8') + return self.as_string(True) def init_widget(self, widget): if widget is not None: @@ -279,7 +284,7 @@ return u'' if value is True: return u'1' - return unicode(value) + return text_type(value) def get_widget(self, form): """return the widget instance associated to this field""" @@ -381,7 +386,7 @@ assert self.choices is not None if callable(self.choices): # pylint: disable=E1102 - if getattr(self.choices, 'im_self', None) is self: + if getattr(self.choices, '__self__', None) is self: vocab = self.choices(form=form, **kwargs) else: vocab = self.choices(form=form, field=self, **kwargs) @@ -508,7 +513,7 @@ class StringField(Field): """Use this field to edit unicode string (`String` yams type). This field - additionaly support a `max_length` attribute that specify a maximum size for + additionally support a `max_length` attribute that specify a maximum size for the string (`None` meaning no limit). Unless explicitly specified, the widget for this field will be: @@ -780,7 +785,7 @@ If the stream format is one of text/plain, text/html, text/rest, text/markdown - then a :class:`~cubicweb.web.formwidgets.TextArea` will be additionaly + then a :class:`~cubicweb.web.formwidgets.TextArea` will be additionally displayed, allowing to directly the file's content when desired, instead of choosing a file from user's file system. """ @@ -794,7 +799,7 @@ if data: encoding = self.encoding(form) try: - form.formvalues[(self, form)] = unicode(data.getvalue(), encoding) + form.formvalues[(self, form)] = data.getvalue().decode(encoding) except UnicodeError: pass else: @@ -815,7 +820,7 @@ def _process_form_value(self, form): value = form._cw.form.get(self.input_name(form)) - if isinstance(value, unicode): + if isinstance(value, text_type): # file modified using a text widget return Binary(value.encode(self.encoding(form))) return super(EditableFileField, self)._process_form_value(form) @@ -823,7 +828,7 @@ class BigIntField(Field): """Use this field to edit big integers (`BigInt` yams type). This field - additionaly support `min` and `max` attributes that specify a minimum and/or + additionally support `min` and `max` attributes that specify a minimum and/or maximum value for the integer (`None` meaning no boundary). Unless explicitly specified, the widget for this field will be a @@ -842,7 +847,7 @@ self.widget.attrs.setdefault('size', self.default_text_input_size) def _ensure_correctly_typed(self, form, value): - if isinstance(value, basestring): + if isinstance(value, string_types): value = value.strip() if not value: return None @@ -907,7 +912,7 @@ class FloatField(IntField): - """Use this field to edit floats (`Float` yams type). This field additionaly + """Use this field to edit floats (`Float` yams type). This field additionally support `min` and `max` attributes as the :class:`~cubicweb.web.formfields.IntField`. @@ -924,7 +929,7 @@ return self.format_single_value(req, 1.234) def _ensure_correctly_typed(self, form, value): - if isinstance(value, basestring): + if isinstance(value, string_types): value = value.strip() if not value: return None @@ -946,7 +951,7 @@ def format_single_value(self, req, value): if value: value = format_time(value.days * 24 * 3600 + value.seconds) - return unicode(value) + return text_type(value) return u'' def example_format(self, req): @@ -956,7 +961,7 @@ return u'20s, 10min, 24h, 4d' def _ensure_correctly_typed(self, form, value): - if isinstance(value, basestring): + if isinstance(value, string_types): value = value.strip() if not value: return None @@ -986,14 +991,14 @@ return self.format_single_value(req, datetime.now()) def _ensure_correctly_typed(self, form, value): - if isinstance(value, basestring): + if isinstance(value, string_types): value = value.strip() if not value: return None try: value = form._cw.parse_datetime(value, self.etype) except ValueError as ex: - raise ProcessFormError(unicode(ex)) + raise ProcessFormError(text_type(ex)) return value @@ -1083,7 +1088,7 @@ linkedto = form.linked_to.get((self.name, self.role)) if linkedto: buildent = form._cw.entity_from_eid - return [(buildent(eid).view('combobox'), unicode(eid)) + return [(buildent(eid).view('combobox'), text_type(eid)) for eid in linkedto] return [] @@ -1095,7 +1100,7 @@ # vocabulary doesn't include current values, add them if form.edited_entity.has_eid(): rset = form.edited_entity.related(self.name, self.role) - vocab += [(e.view('combobox'), unicode(e.eid)) + vocab += [(e.view('combobox'), text_type(e.eid)) for e in rset.entities()] return vocab @@ -1129,11 +1134,11 @@ if entity.eid in done: continue done.add(entity.eid) - res.append((entity.view('combobox'), unicode(entity.eid))) + res.append((entity.view('combobox'), text_type(entity.eid))) return res def format_single_value(self, req, value): - return unicode(value) + return text_type(value) def process_form_value(self, form): """process posted form and return correctly typed value""" diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/formwidgets.py --- a/web/formwidgets.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/formwidgets.py Thu Jun 16 14:19:20 2016 +0200 @@ -97,10 +97,10 @@ from functools import reduce from datetime import date -from warnings import warn + +from six import text_type, string_types from logilab.mtconverter import xml_escape -from logilab.common.deprecation import deprecated from logilab.common.date import todatetime from cubicweb import tags, uilib @@ -208,7 +208,7 @@ attrs = dict(self.attrs) if self.setdomid: attrs['id'] = field.dom_id(form, self.suffix) - if self.settabindex and not 'tabindex' in attrs: + if self.settabindex and 'tabindex' not in attrs: attrs['tabindex'] = form._cw.next_tabindex() if 'placeholder' in attrs: attrs['placeholder'] = form._cw._(attrs['placeholder']) @@ -282,7 +282,7 @@ """ posted = form._cw.form val = posted.get(field.input_name(form, self.suffix)) - if isinstance(val, basestring): + if isinstance(val, string_types): val = val.strip() return val @@ -385,7 +385,7 @@ string. """ type = 'hidden' - setdomid = False # by default, don't set id attribute on hidden input + setdomid = False # by default, don't set id attribute on hidden input settabindex = False @@ -416,7 +416,7 @@ lines = value.splitlines() linecount = len(lines) for line in lines: - linecount += len(line) / self._columns + linecount += len(line) // self._columns attrs.setdefault('cols', self._columns) attrs.setdefault('rows', min(self._maxrows, linecount + self._minrows)) return tags.textarea(value, name=field.input_name(form, self.suffix), @@ -472,9 +472,9 @@ options.append(tags.option(label, value=value, **oattrs)) if optgroup_opened: options.append(u'') - if not 'size' in attrs: + if 'size' not in attrs: if self._multiple: - size = unicode(min(self.default_size, len(vocab) or 1)) + size = text_type(min(self.default_size, len(vocab) or 1)) else: size = u'1' attrs['size'] = size @@ -530,12 +530,11 @@ options.append(tags.option(label, value=value)) if 'size' not in attrs: attrs['size'] = self.default_size - if 'id' in attrs : + if 'id' in attrs: attrs.pop('id') return tags.select(name=name, multiple=self._multiple, id=name, options=options, **attrs) + '\n'.join(inputs) - def _render(self, form, field, renderer): domid = field.dom_id(form) jsnodes = {'widgetid': domid, @@ -547,10 +546,10 @@ return (self.template % {'widgetid': jsnodes['widgetid'], # helpinfo select tag - 'inoutinput' : self.render_select(form, field, jsnodes['from']), + 'inoutinput': self.render_select(form, field, jsnodes['from']), # select tag with resultats - 'resinput' : self.render_select(form, field, jsnodes['to'], selected=True), - 'addinput' : self.add_button % jsnodes, + 'resinput': self.render_select(form, field, jsnodes['to'], selected=True), + 'addinput': self.add_button % jsnodes, 'removeinput': self.remove_button % jsnodes }) @@ -616,7 +615,7 @@ iattrs['checked'] = u'checked' tag = tags.input(name=field.input_name(form, self.suffix), type=self.type, value=value, **iattrs) - options.append(u'%s %s' % (tag, label)) + options.append(u'' % (tag, xml_escape(label))) return sep.join(options) @@ -671,21 +670,43 @@ """ % (helperid, inputid, year, month, form._cw.uiprops['CALENDAR_ICON'], - form._cw._('calendar'), helperid) ) + form._cw._('calendar'), helperid)) class JQueryDatePicker(FieldWidget): """Use jquery.ui.datepicker to define a date picker. Will return the date as a unicode string. + + You can couple DatePickers by using the min_of and/or max_of parameters. + The DatePicker identified by the value of min_of(/max_of) will force the user to + choose a date anterior(/posterior) to this DatePicker. + + example: + start and end are two JQueryDatePicker and start must always be before end + affk.set_field_kwargs(etype, 'start_date', widget=JQueryDatePicker(min_of='end_date')) + affk.set_field_kwargs(etype, 'end_date', widget=JQueryDatePicker(max_of='start_date')) + That way, on change of end(/start) value a new max(/min) will be set for start(/end) + The invalid dates will be gray colored in the datepicker """ needs_js = ('jquery.ui.js', ) needs_css = ('jquery.ui.css',) default_size = 10 - def __init__(self, datestr=None, **kwargs): + def __init__(self, datestr=None, min_of=None, max_of=None, **kwargs): super(JQueryDatePicker, self).__init__(**kwargs) + self.min_of = min_of + self.max_of = max_of self.value = datestr + def attributes(self, form, field): + form._cw.add_js('cubicweb.widgets.js') + attrs = super(JQueryDatePicker, self).attributes(form, field) + if self.max_of: + attrs['data-max-of'] = '%s-subject:%s' % (self.max_of, form.edited_entity.eid) + if self.min_of: + attrs['data-min-of'] = '%s-subject:%s' % (self.min_of, form.edited_entity.eid) + return attrs + def _render(self, form, field, renderer): req = form._cw if req.lang != 'en': @@ -693,11 +714,19 @@ domid = field.dom_id(form, self.suffix) # XXX find a way to understand every format fmt = req.property_value('ui.date-format') - fmt = fmt.replace('%Y', 'yy').replace('%m', 'mm').replace('%d', 'dd') - req.add_onload(u'cw.jqNode("%s").datepicker(' - '{buttonImage: "%s", dateFormat: "%s", firstDay: 1,' - ' showOn: "button", buttonImageOnly: true})' % ( - domid, req.uiprops['CALENDAR_ICON'], fmt)) + picker_fmt = fmt.replace('%Y', 'yy').replace('%m', 'mm').replace('%d', 'dd') + max_date = min_date = None + if self.min_of: + current = getattr(form.edited_entity, self.min_of) + if current is not None: + max_date = current.strftime(fmt) + if self.max_of: + current = getattr(form.edited_entity, self.max_of) + if current is not None: + min_date = current.strftime(fmt) + req.add_onload(u'renderJQueryDatePicker("%s", "%s", "%s", %s, %s);' + % (domid, req.uiprops['CALENDAR_ICON'], picker_fmt, json_dumps(min_date), + json_dumps(max_date))) return self._render_input(form, field) def _render_input(self, form, field): @@ -706,7 +735,7 @@ else: value = self.value attrs = self.attributes(form, field) - attrs.setdefault('size', unicode(self.default_size)) + attrs.setdefault('size', text_type(self.default_size)) return tags.input(name=field.input_name(form, self.suffix), value=value, type='text', **attrs) @@ -727,7 +756,7 @@ def _render(self, form, field, renderer): domid = field.dom_id(form, self.suffix) form._cw.add_onload(u'cw.jqNode("%s").timePicker({step: %s, separator: "%s"})' % ( - domid, self.timesteps, self.separator)) + domid, self.timesteps, self.separator)) return self._render_input(form, field) @@ -767,8 +796,8 @@ timepicker = JQueryTimePicker(timestr=timestr, timesteps=self.timesteps, suffix='time') return u'
%s%s
' % (field.dom_id(form), - datepicker.render(form, field, renderer), - timepicker.render(form, field, renderer)) + datepicker.render(form, field, renderer), + timepicker.render(form, field, renderer)) def process_field_data(self, form, field): req = form._cw @@ -779,13 +808,13 @@ try: date = todatetime(req.parse_datetime(datestr, 'Date')) except ValueError as exc: - raise ProcessFormError(unicode(exc)) + raise ProcessFormError(text_type(exc)) if timestr is None: return date try: time = req.parse_datetime(timestr, 'Time') except ValueError as exc: - raise ProcessFormError(unicode(exc)) + raise ProcessFormError(text_type(exc)) return date.replace(hour=time.hour, minute=time.minute, second=time.second) @@ -855,7 +884,6 @@ pageid=entity._cw.pageid) - class StaticFileAutoCompletionWidget(AutoCompletionWidget): """XXX describe me""" wdgtype = 'StaticFileSuggestField' @@ -874,10 +902,11 @@ def values_and_attributes(self, form, field): """override values_and_attributes to handle initial displayed values""" - values, attrs = super(LazyRestrictedAutoCompletionWidget, self).values_and_attributes(form, field) + values, attrs = super(LazyRestrictedAutoCompletionWidget, self).values_and_attributes( + form, field) assert len(values) == 1, "multiple selection is not supported yet by LazyWidget" if not values[0]: - values = form.cw_extra_kwargs.get(field.name,'') + values = form.cw_extra_kwargs.get(field.name, '') if not isinstance(values, (tuple, list)): values = (values,) try: @@ -917,7 +946,7 @@ actual_fields[0].render(form, renderer), form._cw._('to_interval_end'), actual_fields[1].render(form, renderer), - ) + ) class HorizontalLayoutWidget(FieldWidget): @@ -928,7 +957,7 @@ if self.attrs.get('display_label', True): subst = self.attrs.get('label_input_substitution', '%(label)s %(input)s') fields = [subst % {'label': renderer.render_label(form, f), - 'input': f.render(form, renderer)} + 'input': f.render(form, renderer)} for f in field.subfields(form)] else: fields = [f.render(form, renderer) for f in field.subfields(form)] @@ -946,7 +975,7 @@ assert self.suffix is None, 'not supported' req = form._cw pathqname = field.input_name(form, 'path') - fqsqname = field.input_name(form, 'fqs') # formatted query string + fqsqname = field.input_name(form, 'fqs') # formatted query string if pathqname in form.form_previous_values: path = form.form_previous_values[pathqname] fqs = form.form_previous_values[fqsqname] @@ -967,7 +996,7 @@ attrs = dict(self.attrs) if self.setdomid: attrs['id'] = field.dom_id(form) - if self.settabindex and not 'tabindex' in attrs: + if self.settabindex and 'tabindex' not in attrs: attrs['tabindex'] = req.next_tabindex() # ensure something is rendered inputs = [u'') self.w(u'') for column in self.columns: - attrs = ('%s="%s"' % (name, value) for name, value in column.cell_attrs.iteritems()) + attrs = ('%s="%s"' % (name, value) for name, value in column.cell_attrs.items()) self.w(u'' % (' '.join(attrs), column.name or u'')) self.w(u'') self.w(u'') - for rowindex in xrange(len(self.model.get_rows())): + for rowindex in range(len(self.model.get_rows())): klass = (rowindex%2==1) and 'odd' or 'even' self.w(u'' % (klass, self.highlight)) for column, sortvalue in self.itercols(rowindex): attrs = dict(column.cell_attrs) attrs["cubicweb:sortvalue"] = sortvalue - attrs = ('%s="%s"' % (name, value) for name, value in attrs.iteritems()) + attrs = ('%s="%s"' % (name, value) for name, value in attrs.items()) self.w(u'') - for rownum in xrange(self.view.table_size): + for rownum in range(self.view.table_size): self.render_row(w, rownum, colrenderers) w(u'') @@ -284,7 +287,7 @@ attrs = renderer.attributes.copy() if renderer.sortable: sortvalue = renderer.sortvalue(rownum) - if isinstance(sortvalue, basestring): + if isinstance(sortvalue, string_types): sortvalue = sortvalue[:self.sortvalue_limit] if sortvalue is not None: attrs[u'cubicweb:sortvalue'] = js_dumps(sortvalue) @@ -646,10 +649,10 @@ # compute displayed columns if self.displaycols is None: if headers is not None: - displaycols = range(len(headers)) + displaycols = list(range(len(headers))) else: rqlst = self.cw_rset.syntax_tree() - displaycols = range(len(rqlst.children[0].selection)) + displaycols = list(range(len(rqlst.children[0].selection))) else: displaycols = self.displaycols # compute table headers @@ -723,7 +726,7 @@ for aname, member in[('renderfunc', renderfunc), ('sortfunc', sortfunc)]: if isinstance(member, MethodType): - member = MethodType(member.im_func, acopy, acopy.__class__) + member = create_bound_method(member.__func__, acopy) setattr(acopy, aname, member) return acopy finally: @@ -918,13 +921,13 @@ ################################################################################ +@add_metaclass(class_deprecated) class TableView(AnyRsetView): """The table view accepts any non-empty rset. It uses introspection on the result set to compute column names and the proper way to display the cells. It is however highly configurable and accepts a wealth of options. """ - __metaclass__ = class_deprecated __deprecation_warning__ = '[3.14] %(cls)s is deprecated' __regid__ = 'table' title = _('table') @@ -977,9 +980,9 @@ if 'displaycols' in self._cw.form: displaycols = [int(idx) for idx in self._cw.form['displaycols']] elif headers is not None: - displaycols = range(len(headers)) + displaycols = list(range(len(headers))) else: - displaycols = range(len(self.cw_rset.syntax_tree().children[0].selection)) + displaycols = list(range(len(self.cw_rset.syntax_tree().children[0].selection))) return displaycols def _setup_tablesorter(self, divid): @@ -1143,7 +1146,7 @@ else: column.append_renderer(subvid or 'incontext', colindex) if cellattrs and colindex in cellattrs: - for name, value in cellattrs[colindex].iteritems(): + for name, value in cellattrs[colindex].items(): column.add_attr(name, value) # add column columns.append(column) @@ -1184,8 +1187,8 @@ title = _('editable-table') +@add_metaclass(class_deprecated) class CellView(EntityView): - __metaclass__ = class_deprecated __deprecation_warning__ = '[3.14] %(cls)s is deprecated' __regid__ = 'cell' __select__ = nonempty_rset() @@ -1271,6 +1274,7 @@ finalview = 'editable-final' +@add_metaclass(class_deprecated) class EntityAttributesTableView(EntityView): """This table displays entity attributes in a table and allow to set a specific method to help building cell content for each attribute as well as @@ -1282,7 +1286,6 @@ Table will render column header using the method header_for_COLNAME if defined otherwise COLNAME will be used. """ - __metaclass__ = class_deprecated __deprecation_warning__ = '[3.14] %(cls)s is deprecated' __abstract__ = True columns = () @@ -1298,7 +1301,7 @@ self.w(u'
', @@ -993,12 +1022,12 @@ req = form._cw values = {} path = req.form.get(field.input_name(form, 'path')) - if isinstance(path, basestring): + if isinstance(path, string_types): path = path.strip() if path is None: path = u'' fqs = req.form.get(field.input_name(form, 'fqs')) - if isinstance(fqs, basestring): + if isinstance(fqs, string_types): fqs = fqs.strip() or None if fqs: for i, line in enumerate(fqs.split('\n')): @@ -1007,9 +1036,10 @@ try: key, val = line.split('=', 1) except ValueError: - raise ProcessFormError(req._("wrong query parameter line %s") % (i+1)) + msg = req._("wrong query parameter line %s") % (i + 1) + raise ProcessFormError(msg) # value will be url quoted by build_url_params - values.setdefault(key.encode(req.encoding), []).append(val) + values.setdefault(key, []).append(val) if not values: return path return u'%s?%s' % (path, req.build_url_params(**values)) @@ -1055,7 +1085,7 @@ attrs['name'] = self.name if self.setdomid: attrs['id'] = self.name - if self.settabindex and not 'tabindex' in attrs: + if self.settabindex and 'tabindex' not in attrs: attrs['tabindex'] = form._cw.next_tabindex() if self.icon: img = tags.img(src=form._cw.uiprops[self.icon], alt=self.icon) @@ -1092,6 +1122,5 @@ imgsrc = form._cw.uiprops[self.imgressource] return ''\ '%(label)s%(label)s' % { - 'label': label, 'imgsrc': imgsrc, - 'domid': self.domid, 'href': self.href} - + 'label': label, 'imgsrc': imgsrc, + 'domid': self.domid, 'href': self.href} diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/htmlwidgets.py --- a/web/htmlwidgets.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/htmlwidgets.py Thu Jun 16 14:19:20 2016 +0200 @@ -24,6 +24,9 @@ import random from math import floor +from six import add_metaclass +from six.moves import range + from logilab.mtconverter import xml_escape from logilab.common.deprecation import class_deprecated @@ -115,9 +118,9 @@ self.w(u'') +@add_metaclass(class_deprecated) class SideBoxWidget(BoxWidget): """default CubicWeb's sidebox widget""" - __metaclass__ = class_deprecated __deprecation_warning__ = '[3.10] class %(cls)s is deprecated' title_class = u'sideBoxTitle' @@ -207,9 +210,9 @@ self.w(u'') +@add_metaclass(class_deprecated) class BoxField(HTMLWidget): """couples label / value meant to be displayed in a box""" - __metaclass__ = class_deprecated __deprecation_warning__ = '[3.10] class %(cls)s is deprecated' def __init__(self, label, value): self.label = label @@ -220,18 +223,19 @@ u'%s' % (self.label, self.value)) + +@add_metaclass(class_deprecated) class BoxSeparator(HTMLWidget): """a menu separator""" - __metaclass__ = class_deprecated __deprecation_warning__ = '[3.10] class %(cls)s is deprecated' def _render(self): self.w(u'
    ') +@add_metaclass(class_deprecated) class BoxLink(HTMLWidget): """a link in a box""" - __metaclass__ = class_deprecated __deprecation_warning__ = '[3.10] class %(cls)s is deprecated' def __init__(self, href, label, _class='', title='', ident='', escape=False): self.href = href @@ -252,9 +256,9 @@ self.w(u'
  • %s
  • \n' % (self._class, link)) +@add_metaclass(class_deprecated) class BoxHtml(HTMLWidget): """a form in a box""" - __metaclass__ = class_deprecated __deprecation_warning__ = '[3.10] class %(cls)s is deprecated' def __init__(self, rawhtml): self.rawhtml = rawhtml @@ -339,17 +343,17 @@ self.w(u'
%s
' % (' '.join(attrs))) for cellvid, colindex in column.cellrenderers: self.model.render_cell(cellvid, rowindex, colindex, w=self.w) @@ -361,5 +365,3 @@ def itercols(self, rowindex): for column in self.columns: yield column, self.model.sortvalue(rowindex, column.rset_sortcol) - - diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/http_headers.py --- a/web/http_headers.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/http_headers.py Thu Jun 16 14:19:20 2016 +0200 @@ -2,11 +2,14 @@ # http://twistedmatrix.com/trac/wiki/TwistedWeb2 -import types, time +import time from calendar import timegm import base64 import re -import urlparse + +from six import string_types +from six.moves.urllib.parse import urlparse + def dashCapitalize(s): ''' Capitalize a string, making sure to treat - as a word seperator ''' @@ -295,9 +298,9 @@ cur = cur+1 if qpair: - raise ValueError, "Missing character after '\\'" + raise ValueError("Missing character after '\\'") if quoted: - raise ValueError, "Missing end quote" + raise ValueError("Missing end quote") if start != cur: if foldCase: @@ -347,7 +350,7 @@ ##### parser utilities: def checkSingleToken(tokens): if len(tokens) != 1: - raise ValueError, "Expected single token, not %s." % (tokens,) + raise ValueError("Expected single token, not %s." % (tokens,)) return tokens[0] def parseKeyValue(val): @@ -355,11 +358,11 @@ return val[0], None elif len(val) == 3 and val[1] == Token('='): return val[0], val[2] - raise ValueError, "Expected key or key=value, but got %s." % (val,) + raise ValueError("Expected key or key=value, but got %s." % (val,)) def parseArgs(field): args = split(field, Token(';')) - val = args.next() + val = next(args) args = [parseKeyValue(arg) for arg in args] return val, args @@ -380,7 +383,7 @@ def unique(seq): '''if seq is not a string, check it's a sequence of one element and return it''' - if isinstance(seq, basestring): + if isinstance(seq, string_types): return seq if len(seq) != 1: raise ValueError('single value required, not %s' % seq) @@ -398,7 +401,7 @@ """Ensure origin is a valid URL-base stuff, or null""" if origin == 'null': return origin - p = urlparse.urlparse(origin) + p = urlparse(origin) if p.params or p.query or p.username or p.path not in ('', '/'): raise ValueError('Incorrect Accept-Control-Allow-Origin value %s' % origin) if p.scheme not in ('http', 'https'): @@ -452,14 +455,15 @@ """ if (value in (True, 1) or - isinstance(value, basestring) and value.lower() == 'true'): + isinstance(value, string_types) and value.lower() == 'true'): return 'true' if (value in (False, 0) or - isinstance(value, basestring) and value.lower() == 'false'): + isinstance(value, string_types) and value.lower() == 'false'): return 'false' raise ValueError("Invalid true/false header value: %s" % value) class MimeType(object): + @classmethod def fromString(klass, mimeTypeString): """Generate a MimeType object from the given string. @@ -469,8 +473,6 @@ """ return DefaultHTTPHandler.parse('content-type', [mimeTypeString]) - fromString = classmethod(fromString) - def __init__(self, mediaType, mediaSubtype, params={}, **kwargs): """ @type mediaType: C{str} @@ -499,14 +501,14 @@ return "MimeType(%r, %r, %r)" % (self.mediaType, self.mediaSubtype, self.params) def __hash__(self): - return hash(self.mediaType)^hash(self.mediaSubtype)^hash(tuple(self.params.iteritems())) + return hash(self.mediaType)^hash(self.mediaSubtype)^hash(tuple(self.params.items())) ##### Specific header parsers. def parseAccept(field): type, args = parseArgs(field) if len(type) != 3 or type[1] != Token('/'): - raise ValueError, "MIME Type "+str(type)+" invalid." + raise ValueError("MIME Type "+str(type)+" invalid.") # okay, this spec is screwy. A 'q' parameter is used as the separator # between MIME parameters and (as yet undefined) additional HTTP @@ -569,7 +571,7 @@ type, args = parseArgs(header) if len(type) != 3 or type[1] != Token('/'): - raise ValueError, "MIME Type "+str(type)+" invalid." + raise ValueError("MIME Type "+str(type)+" invalid.") args = [(kv[0].lower(), kv[1]) for kv in args] @@ -730,7 +732,7 @@ out ="%s/%s"%(mimeType.mediaType, mimeType.mediaSubtype) if mimeType.params: - out+=';'+generateKeyValues(mimeType.params.iteritems()) + out+=';'+generateKeyValues(mimeType.params.items()) if q != 1.0: out+=(';q=%.3f' % (q,)).rstrip('0').rstrip('.') @@ -766,7 +768,8 @@ v = [field.strip().lower() for field in v.split(',')] return k, v -def generateCacheControl((k, v)): +def generateCacheControl(args): + k, v = args if v is None: return str(k) else: @@ -833,7 +836,7 @@ def generateContentType(mimeType): out = "%s/%s" % (mimeType.mediaType, mimeType.mediaSubtype) if mimeType.params: - out += ';' + generateKeyValues(mimeType.params.iteritems()) + out += ';' + generateKeyValues(mimeType.params.items()) return out def generateIfRange(dateOrETag): @@ -854,7 +857,7 @@ try: l = [] - for k, v in dict(challenge).iteritems(): + for k, v in dict(challenge).items(): l.append("%s=%s" % (k, quoteString(v))) _generated.append("%s %s" % (scheme, ", ".join(l))) @@ -864,7 +867,7 @@ return _generated def generateAuthorization(seq): - return [' '.join(seq)] + return [' '.join(str(v) for v in seq)] #### @@ -1326,10 +1329,10 @@ self._headers = {} self.handler = handler if headers is not None: - for key, value in headers.iteritems(): + for key, value in headers.items(): self.setHeader(key, value) if rawHeaders is not None: - for key, value in rawHeaders.iteritems(): + for key, value in rawHeaders.items(): self.setRawHeaders(key, value) def _setRawHeaders(self, headers): @@ -1458,7 +1461,7 @@ """Return an iterator of key, value pairs of all headers contained in this object, as strings. The keys are capitalized in canonical capitalization.""" - for k, v in self._raw_headers.iteritems(): + for k, v in self._raw_headers.items(): if v is _RecalcNeeded: v = self._toRaw(k) yield self.canonicalNameCaps(k), v @@ -1480,7 +1483,7 @@ is strictly an error, but we're nice.). """ -iteritems = lambda x: x.iteritems() +iteritems = lambda x: x.items() parser_general_headers = { diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/httpcache.py --- a/web/httpcache.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/httpcache.py Thu Jun 16 14:19:20 2016 +0200 @@ -31,6 +31,7 @@ def set_headers(self): self.req.set_header('Cache-control', 'no-cache') + self.req.set_header('Expires', 'Sat, 01 Jan 2000 00:00:00 GMT') class MaxAgeHTTPCacheManager(NoHTTPCacheManager): @@ -68,7 +69,7 @@ try: req.set_header('Etag', '"%s"' % self.etag()) except NoEtag: - self.req.set_header('Cache-control', 'no-cache') + super(EtagHTTPCacheManager, self).set_headers() return req.set_header('Cache-control', 'must-revalidate,max-age=%s' % self.max_age()) @@ -178,4 +179,3 @@ ('if-none-match', if_none_match), #('if-modified-since', if_modified_since), ] - diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/propertysheet.py --- a/web/propertysheet.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/propertysheet.py Thu Jun 16 14:19:20 2016 +0200 @@ -22,6 +22,8 @@ import re import os import os.path as osp +import tempfile + TYPE_CHECKS = [('STYLESHEETS', list), ('JAVASCRIPTS', list), ('STYLESHEETS_IE', list), ('STYLESHEETS_PRINT', list), @@ -51,13 +53,13 @@ self.clear() self._ordered_propfiles = [] self._propfile_mtime = {} - self._sourcefile_mtime = {} - self._cache = {} def load(self, fpath): scriptglobals = self.context.copy() scriptglobals['__file__'] = fpath - execfile(fpath, scriptglobals, self) + with open(fpath, 'rb') as fobj: + code = compile(fobj.read(), fpath, 'exec') + exec(code, scriptglobals, self) for name, type in TYPE_CHECKS: if name in self: if not isinstance(self[name], type): @@ -67,10 +69,7 @@ self._ordered_propfiles.append(fpath) def need_reload(self): - for rid, (adirectory, rdirectory, mtime) in self._cache.items(): - if os.stat(osp.join(rdirectory, rid)).st_mtime > mtime: - del self._cache[rid] - for fpath, mtime in self._propfile_mtime.iteritems(): + for fpath, mtime in self._propfile_mtime.items(): if os.stat(fpath).st_mtime > mtime: return True return False @@ -86,31 +85,29 @@ self.reload() def process_resource(self, rdirectory, rid): + cachefile = osp.join(self._cache_directory, rid) + self.debug('processing %s/%s into %s', + rdirectory, rid, cachefile) + rcachedir = osp.dirname(cachefile) + if not osp.exists(rcachedir): + os.makedirs(rcachedir) + sourcefile = osp.join(rdirectory, rid) + with open(sourcefile) as f: + content = f.read() + # XXX replace % not followed by a paren by %% to avoid having to do + # this in the source css file ? try: - return self._cache[rid][0] - except KeyError: - cachefile = osp.join(self._cache_directory, rid) - self.debug('caching processed %s/%s into %s', - rdirectory, rid, cachefile) - rcachedir = osp.dirname(cachefile) - if not osp.exists(rcachedir): - os.makedirs(rcachedir) - sourcefile = osp.join(rdirectory, rid) - content = file(sourcefile).read() - # XXX replace % not followed by a paren by %% to avoid having to do - # this in the source css file ? - try: - content = self.compile(content) - except ValueError as ex: - self.error("can't process %s/%s: %s", rdirectory, rid, ex) - adirectory = rdirectory - else: - stream = file(cachefile, 'w') + content = self.compile(content) + except ValueError as ex: + self.error("can't process %s/%s: %s", rdirectory, rid, ex) + adirectory = rdirectory + else: + tmpfd, tmpfile = tempfile.mkstemp(dir=rcachedir, prefix=osp.basename(cachefile)) + with os.fdopen(tmpfd, 'w') as stream: stream.write(content) - stream.close() - adirectory = self._cache_directory - self._cache[rid] = (adirectory, rdirectory, os.stat(sourcefile).st_mtime) - return adirectory + os.rename(tmpfile, cachefile) + adirectory = self._cache_directory + return adirectory def compile(self, content): return self._percent_rgx.sub('%%', content) % self diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/request.py --- a/web/request.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/request.py Thu Jun 16 14:19:20 2016 +0200 @@ -22,15 +22,16 @@ import time import random import base64 -import urllib -from StringIO import StringIO from hashlib import sha1 # pylint: disable=E0611 -from Cookie import SimpleCookie from calendar import timegm from datetime import date, datetime -from urlparse import urlsplit -import httplib from warnings import warn +from io import BytesIO + +from six import PY2, binary_type, text_type, string_types +from six.moves import http_client +from six.moves.urllib.parse import urlsplit, quote as urlquote +from six.moves.http_cookies import SimpleCookie from rql.utils import rqlvar_maker @@ -41,7 +42,7 @@ from cubicweb import AuthenticationError from cubicweb.req import RequestSessionBase from cubicweb.uilib import remove_html_tags, js -from cubicweb.utils import SizeConstrainedList, HTMLHead, make_uid +from cubicweb.utils import HTMLHead, make_uid from cubicweb.view import TRANSITIONAL_DOCTYPE_NOEXT from cubicweb.web import (INTERNAL_FIELD_VALUE, LOGGER, NothingToEdit, RequestError, StatusResponse) @@ -51,7 +52,7 @@ _MARKER = object() def build_cb_uid(seed): - sha = sha1('%s%s%s' % (time.time(), seed, random.random())) + sha = sha1(('%s%s%s' % (time.time(), seed, random.random())).encode('ascii')) return 'cb_%s' % (sha.hexdigest()) @@ -137,12 +138,12 @@ #: received headers self._headers_in = Headers() if headers is not None: - for k, v in headers.iteritems(): + for k, v in headers.items(): self._headers_in.addRawHeader(k, v) #: form parameters self.setup_params(form) #: received body - self.content = StringIO() + self.content = BytesIO() # prepare output header #: Header used for the final response self.headers_out = Headers() @@ -242,7 +243,7 @@ '__redirectvid', '__redirectrql')) def setup_params(self, params): - """WARNING: we're intentionaly leaving INTERNAL_FIELD_VALUE here + """WARNING: we're intentionally leaving INTERNAL_FIELD_VALUE here subclasses should overrides to """ @@ -250,12 +251,13 @@ if params is None: return encoding = self.encoding - for param, val in params.iteritems(): + for param, val in params.items(): if isinstance(val, (tuple, list)): - val = [unicode(x, encoding) for x in val] + if PY2: + val = [unicode(x, encoding) for x in val] if len(val) == 1: val = val[0] - elif isinstance(val, str): + elif PY2 and isinstance(val, str): val = unicode(val, encoding) if param in self.no_script_form_params and val: val = self.no_script_form_param(param, val) @@ -317,7 +319,7 @@ return None def set_message(self, msg): - assert isinstance(msg, unicode) + assert isinstance(msg, text_type) self.reset_message() self._msg = msg @@ -330,7 +332,7 @@ def set_redirect_message(self, msg): # TODO - this should probably be merged with append_to_redirect_message - assert isinstance(msg, unicode) + assert isinstance(msg, text_type) msgid = self.redirect_message_id() self.session.data[msgid] = msg return msgid @@ -396,26 +398,6 @@ return False return True - def update_breadcrumbs(self): - """stores the last visisted page in session data""" - searchstate = self.search_state[0] - if searchstate == 'normal': - breadcrumbs = self.session.data.get('breadcrumbs') - if breadcrumbs is None: - breadcrumbs = SizeConstrainedList(10) - self.session.data['breadcrumbs'] = breadcrumbs - breadcrumbs.append(self.url()) - else: - url = self.url() - if breadcrumbs and breadcrumbs[-1] != url: - breadcrumbs.append(url) - - def last_visited_page(self): - breadcrumbs = self.session.data.get('breadcrumbs') - if breadcrumbs: - return breadcrumbs.pop() - return self.base_url() - # web edition helpers ##################################################### @cached # so it's writed only once @@ -437,7 +419,7 @@ eids = form['eid'] except KeyError: raise NothingToEdit(self._('no selected entities')) - if isinstance(eids, basestring): + if isinstance(eids, string_types): eids = (eids,) for peid in eids: if withtype: @@ -569,18 +551,18 @@ header = [disposition] unicode_filename = None try: - ascii_filename = filename.encode('ascii') + ascii_filename = filename.encode('ascii').decode('ascii') except UnicodeEncodeError: # fallback filename for very old browser unicode_filename = filename - ascii_filename = filename.encode('ascii', 'ignore') + ascii_filename = filename.encode('ascii', 'ignore').decode('ascii') # escape " and \ # see http://greenbytes.de/tech/tc2231/#attwithfilenameandextparamescaped ascii_filename = ascii_filename.replace('\x5c', r'\\').replace('"', r'\"') header.append('filename="%s"' % ascii_filename) if unicode_filename is not None: # encoded filename according RFC5987 - urlquoted_filename = urllib.quote(unicode_filename.encode('utf-8'), '') + urlquoted_filename = urlquote(unicode_filename.encode('utf-8'), '') header.append("filename*=utf-8''" + urlquoted_filename) self.set_header('content-disposition', ';'.join(header)) @@ -596,7 +578,7 @@ :param localfile: if True, the default data dir prefix is added to the JS filename """ - if isinstance(jsfiles, basestring): + if isinstance(jsfiles, string_types): jsfiles = (jsfiles,) for jsfile in jsfiles: if localfile: @@ -616,7 +598,7 @@ the css inclusion. cf: http://msdn.microsoft.com/en-us/library/ms537512(VS.85).aspx """ - if isinstance(cssfiles, basestring): + if isinstance(cssfiles, string_types): cssfiles = (cssfiles,) if ieonly: if self.ie_browser(): @@ -702,7 +684,7 @@ return urlsplit(self.base_url())[2] def data_url(self, relpath): - """returns the absolute path for a data resouce""" + """returns the absolute path for a data resource""" return self.datadir_url + relpath @cached @@ -722,25 +704,19 @@ Some response cache headers may be set by this method. """ modified = True - if self.get_header('Cache-Control') not in ('max-age=0', 'no-cache'): - # Here, we search for any invalid 'not modified' condition - # see http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html#sec13.3 - validators = get_validators(self._headers_in) - if validators: # if we have no - modified = any(func(val, self.headers_out) for func, val in validators) + # Here, we search for any invalid 'not modified' condition + # see http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html#sec13.3 + validators = get_validators(self._headers_in) + if validators: # if we have no + modified = any(func(val, self.headers_out) for func, val in validators) # Forge expected response - if modified: - if 'Expires' not in self.headers_out: - # Expires header seems to be required by IE7 -- Are you sure ? - self.add_header('Expires', 'Sat, 01 Jan 2000 00:00:00 GMT') - # /!\ no raise, the function returns and we keep processing the request - else: + if not modified: # overwrite headers_out to forge a brand new not-modified response self.headers_out = self._forge_cached_headers() if self.http_method() in ('HEAD', 'GET'): - self.status_out = httplib.NOT_MODIFIED + self.status_out = http_client.NOT_MODIFIED else: - self.status_out = httplib.PRECONDITION_FAILED + self.status_out = http_client.PRECONDITION_FAILED # XXX replace by True once validate_cache bw compat method is dropped return self.status_out # XXX replace by False once validate_cache bw compat method is dropped @@ -770,7 +746,7 @@ 'cache-control', 'vary', # Others: 'server', 'proxy-authenticate', 'www-authenticate', 'warning'): - value = self._headers_in.getRawHeaders(header) + value = self.headers_out.getRawHeaders(header) if value is not None: headers.setRawHeaders(header, value) return headers @@ -800,7 +776,7 @@ def header_accept_language(self): """returns an ordered list of preferred languages""" acceptedlangs = self.get_header('Accept-Language', raw=False) or {} - for lang, _ in sorted(acceptedlangs.iteritems(), key=lambda x: x[1], + for lang, _ in sorted(acceptedlangs.items(), key=lambda x: x[1], reverse=True): lang = lang.split('-')[0] yield lang @@ -844,7 +820,7 @@ scheme = scheme.lower() try: assert scheme == "basic" - user, passwd = base64.decodestring(rest).split(":", 1) + user, passwd = base64.decodestring(rest.encode('ascii')).split(b":", 1) # XXX HTTP header encoding: use email.Header? return user.decode('UTF8'), passwd except Exception as ex: @@ -966,8 +942,10 @@ def __getattribute__(self, attr): raise AuthenticationError() - def __nonzero__(self): + def __bool__(self): return False + + __nonzero__ = __bool__ class _MockAnonymousSession(object): sessionid = 'thisisnotarealsession' @@ -1002,8 +980,6 @@ return self.cnx.transaction_data def set_cnx(self, cnx): - if 'ecache' in cnx.transaction_data: - del cnx.transaction_data['ecache'] self.cnx = cnx self.session = cnx.session self._set_user(cnx.user) @@ -1023,8 +999,8 @@ self.set_language(lang) except KeyError: # this occurs usually during test execution - self._ = self.__ = unicode - self.pgettext = lambda x, y: unicode(y) + self._ = self.__ = text_type + self.pgettext = lambda x, y: text_type(y) entity_metas = _cnx_func('entity_metas') source_defs = _cnx_func('source_defs') @@ -1043,12 +1019,21 @@ # entities cache management ############################################### - entity_cache = _cnx_func('entity_cache') - set_entity_cache = _cnx_func('set_entity_cache') - cached_entities = _cnx_func('cached_entities') - drop_entity_cache = _cnx_func('drop_entity_cache') + def entity_cache(self, eid): + return self.transaction_data['req_ecache'][eid] + + def set_entity_cache(self, entity): + ecache = self.transaction_data.setdefault('req_ecache', {}) + ecache.setdefault(entity.eid, entity) + def cached_entities(self): + return self.transaction_data.get('req_ecache', {}).values() + def drop_entity_cache(self, eid=None): + if eid is None: + self.transaction_data.pop('req_ecache', None) + else: + del self.transaction_data['req_ecache'][eid] CubicWebRequestBase = ConnectionCubicWebRequestBase diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/schemaviewer.py --- a/web/schemaviewer.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/schemaviewer.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,7 +18,9 @@ """an helper class to display CubicWeb schema using ureports""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ + +from six import string_types from logilab.common.ureports import Section, Title, Table, Link, Span, Text @@ -218,7 +220,7 @@ elif prop == 'constraints': val = ', '.join([c.expression for c in val]) elif isinstance(val, dict): - for key, value in val.iteritems(): + for key, value in val.items(): if isinstance(value, (list, tuple)): val[key] = ', '.join(sorted( str(v) for v in value)) val = str(val) @@ -226,7 +228,7 @@ elif isinstance(val, (list, tuple)): val = sorted(val) val = ', '.join(str(v) for v in val) - elif val and isinstance(val, basestring): + elif val and isinstance(val, string_types): val = _(val) else: val = str(val) diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/data/schema.py --- a/web/test/data/schema.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/test/data/schema.py Thu Jun 16 14:19:20 2016 +0200 @@ -20,6 +20,9 @@ String, Int, Datetime, Boolean, Float) from yams.constraints import IntervalBoundConstraint +from cubicweb import _ + + class Salesterm(EntityType): described_by_test = SubjectRelation('File', cardinality='1*', composite='subject', inlined=True) diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/data/static/jstests/ajax_url0.html --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/web/test/data/static/jstests/ajax_url0.html Thu Jun 16 14:19:20 2016 +0200 @@ -0,0 +1,3 @@ +
+

Hello

+
diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/data/static/jstests/ajax_url1.html --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/web/test/data/static/jstests/ajax_url1.html Thu Jun 16 14:19:20 2016 +0200 @@ -0,0 +1,6 @@ +
+
+ +
+

Hello

+
diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/data/static/jstests/ajaxresult.json --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/web/test/data/static/jstests/ajaxresult.json Thu Jun 16 14:19:20 2016 +0200 @@ -0,0 +1,1 @@ +["foo", "bar"] diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/data/static/jstests/test_ajax.html --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/web/test/data/static/jstests/test_ajax.html Thu Jun 16 14:19:20 2016 +0200 @@ -0,0 +1,26 @@ + + + + + + + + + + + + + + + + + + +
+

cubicweb.ajax.js functions tests

+

+
    + + diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/data/static/jstests/test_ajax.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/web/test/data/static/jstests/test_ajax.js Thu Jun 16 14:19:20 2016 +0200 @@ -0,0 +1,274 @@ +$(document).ready(function() { + + QUnit.module("ajax", { + setup: function() { + this.scriptsLength = $('head script[src]').length-1; + this.cssLength = $('head link[rel=stylesheet]').length-1; + // re-initialize cw loaded cache so that each tests run in a + // clean environment, have a lookt at _loadAjaxHtmlHead implementation + // in cubicweb.ajax.js for more information. + cw.loaded_scripts = []; + cw.loaded_links = []; + }, + teardown: function() { + $('head script[src]:lt(' + ($('head script[src]').length - 1 - this.scriptsLength) + ')').remove(); + $('head link[rel=stylesheet]:gt(' + this.cssLength + ')').remove(); + } + }); + + function jsSources() { + return $.map($('head script[src]'), function(script) { + return script.getAttribute('src'); + }); + } + + QUnit.test('test simple h1 inclusion (ajax_url0.html)', function (assert) { + assert.expect(3); + assert.equal($('#qunit-fixture').children().length, 0); + var done = assert.async(); + $('#qunit-fixture').loadxhtml('static/jstests/ajax_url0.html', null, 'GET') + .addCallback(function() { + try { + assert.equal($('#qunit-fixture').children().length, 1); + assert.equal($('#qunit-fixture h1').html(), 'Hello'); + } finally { + done(); + }; + } + ); + }); + + QUnit.test('test simple html head inclusion (ajax_url1.html)', function (assert) { + assert.expect(6); + var scriptsIncluded = jsSources(); + assert.equal(jQuery.inArray('http://foo.js', scriptsIncluded), - 1); + var done = assert.async(); + $('#qunit-fixture').loadxhtml('static/jstests/ajax_url1.html', null, 'GET') + .addCallback(function() { + try { + var origLength = scriptsIncluded.length; + scriptsIncluded = jsSources(); + // check that foo.js has been prepended to + assert.equal(scriptsIncluded.length, origLength + 1); + assert.equal(scriptsIncluded.indexOf('http://foo.js'), 0); + // check that
    has been removed + assert.equal($('#qunit-fixture').children().length, 1); + assert.equal($('div.ajaxHtmlHead').length, 0); + assert.equal($('#qunit-fixture h1').html(), 'Hello'); + } finally { + done(); + }; + } + ); + }); + + QUnit.test('test addCallback', function (assert) { + assert.expect(3); + assert.equal($('#qunit-fixture').children().length, 0); + var done = assert.async(); + var d = $('#qunit-fixture').loadxhtml('static/jstests/ajax_url0.html', null, 'GET'); + d.addCallback(function() { + try { + assert.equal($('#qunit-fixture').children().length, 1); + assert.equal($('#qunit-fixture h1').html(), 'Hello'); + } finally { + done(); + }; + }); + }); + + QUnit.test('test callback after synchronous request', function (assert) { + assert.expect(1); + var deferred = new Deferred(); + var result = jQuery.ajax({ + url: 'static/jstests/ajax_url0.html', + async: false, + beforeSend: function(xhr) { + deferred._req = xhr; + }, + success: function(data, status) { + deferred.success(data); + } + }); + var done = assert.async(); + deferred.addCallback(function() { + try { + // add an assertion to ensure the callback is executed + assert.ok(true, "callback is executed"); + } finally { + done(); + }; + }); + }); + + QUnit.test('test addCallback with parameters', function (assert) { + assert.expect(3); + assert.equal($('#qunit-fixture').children().length, 0); + var done = assert.async(); + var d = $('#qunit-fixture').loadxhtml('static/jstests/ajax_url0.html', null, 'GET'); + d.addCallback(function(data, req, arg1, arg2) { + try { + assert.equal(arg1, 'Hello'); + assert.equal(arg2, 'world'); + } finally { + done(); + }; + }, + 'Hello', 'world'); + }); + + QUnit.test('test callback after synchronous request with parameters', function (assert) { + assert.expect(3); + var deferred = new Deferred(); + deferred.addCallback(function(data, req, arg1, arg2) { + // add an assertion to ensure the callback is executed + try { + assert.ok(true, "callback is executed"); + assert.equal(arg1, 'Hello'); + assert.equal(arg2, 'world'); + } finally { + done(); + }; + }, + 'Hello', 'world'); + deferred.addErrback(function() { + // throw an exception to start errback chain + try { + throw this._error; + } finally { + done(); + }; + }); + var done = assert.async(); + var result = jQuery.ajax({ + url: 'static/jstests/ajax_url0.html', + async: false, + beforeSend: function(xhr) { + deferred._req = xhr; + }, + success: function(data, status) { + deferred.success(data); + } + }); + }); + + QUnit.test('test addErrback', function (assert) { + assert.expect(1); + var done = assert.async(); + var d = $('#qunit-fixture').loadxhtml('static/jstests/nonexistent.html', null, 'GET'); + d.addCallback(function() { + // should not be executed + assert.ok(false, "callback is executed"); + }); + d.addErrback(function() { + try { + assert.ok(true, "errback is executed"); + } finally { + done(); + }; + }); + }); + + QUnit.test('test callback execution order', function (assert) { + assert.expect(3); + var counter = 0; + var done = assert.async(); + var d = $('#qunit-fixture').loadxhtml('static/jstests/ajax_url0.html', null, 'GET'); + d.addCallback(function() { + assert.equal(++counter, 1); // should be executed first + }); + d.addCallback(function() { + assert.equal(++counter, 2); + }); + d.addCallback(function() { + try { + assert.equal(++counter, 3); + } finally { + done(); + } + }); + }); + + QUnit.test('test already included resources are ignored (ajax_url1.html)', function (assert) { + assert.expect(10); + var scriptsIncluded = jsSources(); + // NOTE: + assert.equal(jQuery.inArray('http://foo.js', scriptsIncluded), -1); + assert.equal($('head link').length, 1); + /* use endswith because in pytest context we have an absolute path */ + assert.ok($('head link').attr('href').endswith('/qunit.css'), 'qunit.css is loaded'); + var done = assert.async(); + $('#qunit-fixture').loadxhtml('static/jstests/ajax_url1.html', null, 'GET') + .addCallback(function() { + var origLength = scriptsIncluded.length; + scriptsIncluded = jsSources(); + try { + // check that foo.js has been inserted in + assert.equal(scriptsIncluded.length, origLength + 1); + assert.equal(scriptsIncluded.indexOf('http://foo.js'), 0); + // check that
    has been removed + assert.equal($('#qunit-fixture').children().length, 1); + assert.equal($('div.ajaxHtmlHead').length, 0); + assert.equal($('#qunit-fixture h1').html(), 'Hello'); + // qunit.css is not added twice + assert.equal($('head link').length, 1); + /* use endswith because in pytest context we have an absolute path */ + assert.ok($('head link').attr('href').endswith('/qunit.css'), 'qunit.css is loaded'); + } finally { + done(); + } + } + ); + }); + + QUnit.test('test synchronous request loadRemote', function (assert) { + var res = loadRemote('static/jstests/ajaxresult.json', {}, + 'GET', true); + assert.deepEqual(res, ['foo', 'bar']); + }); + + QUnit.test('test event on CubicWeb', function (assert) { + assert.expect(1); + var done = assert.async(); + var events = null; + $(CubicWeb).bind('server-response', function() { + // check that server-response event on CubicWeb is triggered + events = 'CubicWeb'; + }); + $('#qunit-fixture').loadxhtml('static/jstests/ajax_url0.html', null, 'GET') + .addCallback(function() { + try { + assert.equal(events, 'CubicWeb'); + } finally { + done(); + }; + } + ); + }); + + QUnit.test('test event on node', function (assert) { + assert.expect(3); + var done = assert.async(); + var nodes = []; + $('#qunit-fixture').bind('server-response', function() { + nodes.push('node'); + }); + $(CubicWeb).bind('server-response', function() { + nodes.push('CubicWeb'); + }); + $('#qunit-fixture').loadxhtml('static/jstests/ajax_url0.html', null, 'GET') + .addCallback(function() { + try { + assert.equal(nodes.length, 2); + // check that server-response event on CubicWeb is triggered + // only once and event server-response on node is triggered + assert.equal(nodes[0], 'CubicWeb'); + assert.equal(nodes[1], 'node'); + } finally { + done(); + }; + } + ); + }); +}); + diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/data/static/jstests/test_htmlhelpers.html --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/web/test/data/static/jstests/test_htmlhelpers.html Thu Jun 16 14:19:20 2016 +0200 @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + +
    +

    cubicweb.htmlhelpers.js functions tests

    +

    +
      + + diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/data/static/jstests/test_htmlhelpers.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/web/test/data/static/jstests/test_htmlhelpers.js Thu Jun 16 14:19:20 2016 +0200 @@ -0,0 +1,36 @@ +$(document).ready(function() { + + QUnit.module("module2", { + setup: function() { + $('#qunit-fixture').append(''); + } + }); + + QUnit.test("test first selected", function (assert) { + $('#theselect').append('' + + '' + + '' + + ''); + var selected = firstSelected(document.getElementById("theselect")); + assert.equal(selected.value, 'bar'); + }); + + QUnit.test("test first selected 2", function (assert) { + $('#theselect').append('' + + '' + + '' + + ''); + var selected = firstSelected(document.getElementById("theselect")); + assert.equal(selected, null); + }); + + QUnit.module("visibilty"); + QUnit.test('toggleVisibility', function (assert) { + $('#qunit-fixture').append('
      '); + toggleVisibility('foo'); + assert.ok($('#foo').hasClass('hidden'), 'check hidden class is set'); + }); + +}); + diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/data/static/jstests/test_utils.html --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/web/test/data/static/jstests/test_utils.html Thu Jun 16 14:19:20 2016 +0200 @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + +
      +

      cw.utils functions tests

      +

      +
        + + diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/data/static/jstests/test_utils.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/web/test/data/static/jstests/test_utils.js Thu Jun 16 14:19:20 2016 +0200 @@ -0,0 +1,92 @@ +$(document).ready(function() { + + QUnit.module("datetime"); + + QUnit.test("test full datetime", function (assert) { + assert.equal(cw.utils.toISOTimestamp(new Date(1986, 3, 18, 10, 30, 0, 0)), + '1986-04-18 10:30:00'); + }); + + QUnit.test("test only date", function (assert) { + assert.equal(cw.utils.toISOTimestamp(new Date(1986, 3, 18)), '1986-04-18 00:00:00'); + }); + + QUnit.test("test null", function (assert) { + assert.equal(cw.utils.toISOTimestamp(null), null); + }); + + QUnit.module("parsing"); + QUnit.test("test basic number parsing", function (assert) { + var d = strptime('2008/08/08', '%Y/%m/%d'); + assert.deepEqual(datetuple(d), [2008, 8, 8, 0, 0]); + d = strptime('2008/8/8', '%Y/%m/%d'); + assert.deepEqual(datetuple(d), [2008, 8, 8, 0, 0]); + d = strptime('8/8/8', '%Y/%m/%d'); + assert.deepEqual(datetuple(d), [8, 8, 8, 0, 0]); + d = strptime('0/8/8', '%Y/%m/%d'); + assert.deepEqual(datetuple(d), [0, 8, 8, 0, 0]); + d = strptime('-10/8/8', '%Y/%m/%d'); + assert.deepEqual(datetuple(d), [-10, 8, 8, 0, 0]); + d = strptime('-35000', '%Y'); + assert.deepEqual(datetuple(d), [-35000, 1, 1, 0, 0]); + }); + + QUnit.test("test custom format parsing", function (assert) { + var d = strptime('2008-08-08', '%Y-%m-%d'); + assert.deepEqual(datetuple(d), [2008, 8, 8, 0, 0]); + d = strptime('2008 - ! 08: 08', '%Y - ! %m: %d'); + assert.deepEqual(datetuple(d), [2008, 8, 8, 0, 0]); + d = strptime('2008-08-08 12:14', '%Y-%m-%d %H:%M'); + assert.deepEqual(datetuple(d), [2008, 8, 8, 12, 14]); + d = strptime('2008-08-08 1:14', '%Y-%m-%d %H:%M'); + assert.deepEqual(datetuple(d), [2008, 8, 8, 1, 14]); + d = strptime('2008-08-08 01:14', '%Y-%m-%d %H:%M'); + assert.deepEqual(datetuple(d), [2008, 8, 8, 1, 14]); + }); + + QUnit.module("sliceList"); + QUnit.test("test slicelist", function (assert) { + var list = ['a', 'b', 'c', 'd', 'e', 'f']; + assert.deepEqual(cw.utils.sliceList(list, 2), ['c', 'd', 'e', 'f']); + assert.deepEqual(cw.utils.sliceList(list, 2, -2), ['c', 'd']); + assert.deepEqual(cw.utils.sliceList(list, -3), ['d', 'e', 'f']); + assert.deepEqual(cw.utils.sliceList(list, 0, -2), ['a', 'b', 'c', 'd']); + assert.deepEqual(cw.utils.sliceList(list), list); + }); + + QUnit.module("formContents", { + setup: function() { + $('#qunit-fixture').append('
        '); + } + }); + // XXX test fckeditor + QUnit.test("test formContents", function (assert) { + $('#test-form').append(''); + $('#test-form').append(' '); + $('#test-form').append(''); + $('#test-form').append(''); + $('#test-form').append(''); + $('#test-form').append(''); + $('#test-form').append(''); + $('#theselect').append('' + + ''); + //Append an unchecked radio input : should not be in formContents list + $('#test-form').append(''); + $('#test-form').append(''); + assert.deepEqual(cw.utils.formContents($('#test-form')[0]), [ + ['input-text', 'mytextarea', 'choice', 'check', 'theselect'], + ['toto', 'Hello World!', 'no', 'no', 'foo'] + ]); + }); +}); + diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/data/static/jstests/utils.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/web/test/data/static/jstests/utils.js Thu Jun 16 14:19:20 2016 +0200 @@ -0,0 +1,29 @@ +function datetuple(d) { + return [d.getFullYear(), d.getMonth()+1, d.getDate(), + d.getHours(), d.getMinutes()]; +} + +function pprint(obj) { + print('{'); + for(k in obj) { + print(' ' + k + ' = ' + obj[k]); + } + print('}'); +} + +function arrayrepr(array) { + return '[' + array.join(', ') + ']'; +} + +function assertArrayEquals(array1, array2) { + if (array1.length != array2.length) { + throw new crosscheck.AssertionFailure(array1.join(', ') + ' != ' + array2.join(', ')); + } + for (var i=0; i -

        Hello

        -
    diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/jstests/ajax_url1.html --- a/web/test/jstests/ajax_url1.html Thu Mar 24 09:43:25 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,6 +0,0 @@ -
    -
    - -
    -

    Hello

    -
    diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/jstests/ajax_url2.html --- a/web/test/jstests/ajax_url2.html Thu Mar 24 09:43:25 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,7 +0,0 @@ -
    -
    - - -
    -

    Hello

    -
    diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/jstests/ajaxresult.json --- a/web/test/jstests/ajaxresult.json Thu Mar 24 09:43:25 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -["foo", "bar"] diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/jstests/test_ajax.html --- a/web/test/jstests/test_ajax.html Thu Mar 24 09:43:25 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,26 +0,0 @@ - - - - - - - - - - - - - - - - - - -
    -

    cubicweb.ajax.js functions tests

    -

    -
      - - diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/jstests/test_ajax.js --- a/web/test/jstests/test_ajax.js Thu Mar 24 09:43:25 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,274 +0,0 @@ -$(document).ready(function() { - - QUnit.module("ajax", { - setup: function() { - this.scriptsLength = $('head script[src]').length-1; - this.cssLength = $('head link[rel=stylesheet]').length-1; - // re-initialize cw loaded cache so that each tests run in a - // clean environment, have a lookt at _loadAjaxHtmlHead implementation - // in cubicweb.ajax.js for more information. - cw.loaded_scripts = []; - cw.loaded_links = []; - }, - teardown: function() { - $('head script[src]:lt(' + ($('head script[src]').length - 1 - this.scriptsLength) + ')').remove(); - $('head link[rel=stylesheet]:gt(' + this.cssLength + ')').remove(); - } - }); - - function jsSources() { - return $.map($('head script[src]'), function(script) { - return script.getAttribute('src'); - }); - } - - QUnit.test('test simple h1 inclusion (ajax_url0.html)', function (assert) { - assert.expect(3); - assert.equal($('#qunit-fixture').children().length, 0); - var done = assert.async(); - $('#qunit-fixture').loadxhtml(BASE_URL + 'cwsoftwareroot/web/test/jstests/ajax_url0.html') - .addCallback(function() { - try { - assert.equal($('#qunit-fixture').children().length, 1); - assert.equal($('#qunit-fixture h1').html(), 'Hello'); - } finally { - done(); - }; - } - ); - }); - - QUnit.test('test simple html head inclusion (ajax_url1.html)', function (assert) { - assert.expect(6); - var scriptsIncluded = jsSources(); - assert.equal(jQuery.inArray('http://foo.js', scriptsIncluded), - 1); - var done = assert.async(); - $('#qunit-fixture').loadxhtml(BASE_URL + 'cwsoftwareroot/web/test/jstests/ajax_url1.html') - .addCallback(function() { - try { - var origLength = scriptsIncluded.length; - scriptsIncluded = jsSources(); - // check that foo.js has been prepended to - assert.equal(scriptsIncluded.length, origLength + 1); - assert.equal(scriptsIncluded.indexOf('http://foo.js'), 0); - // check that
      has been removed - assert.equal($('#qunit-fixture').children().length, 1); - assert.equal($('div.ajaxHtmlHead').length, 0); - assert.equal($('#qunit-fixture h1').html(), 'Hello'); - } finally { - done(); - }; - } - ); - }); - - QUnit.test('test addCallback', function (assert) { - assert.expect(3); - assert.equal($('#qunit-fixture').children().length, 0); - var done = assert.async(); - var d = $('#qunit-fixture').loadxhtml(BASE_URL + 'cwsoftwareroot/web/test/jstests/ajax_url0.html'); - d.addCallback(function() { - try { - assert.equal($('#qunit-fixture').children().length, 1); - assert.equal($('#qunit-fixture h1').html(), 'Hello'); - } finally { - done(); - }; - }); - }); - - QUnit.test('test callback after synchronous request', function (assert) { - assert.expect(1); - var deferred = new Deferred(); - var result = jQuery.ajax({ - url: BASE_URL + 'cwsoftwareroot/web/test/jstests/ajax_url0.html', - async: false, - beforeSend: function(xhr) { - deferred._req = xhr; - }, - success: function(data, status) { - deferred.success(data); - } - }); - var done = assert.async(); - deferred.addCallback(function() { - try { - // add an assertion to ensure the callback is executed - assert.ok(true, "callback is executed"); - } finally { - done(); - }; - }); - }); - - QUnit.test('test addCallback with parameters', function (assert) { - assert.expect(3); - assert.equal($('#qunit-fixture').children().length, 0); - var done = assert.async(); - var d = $('#qunit-fixture').loadxhtml(BASE_URL + 'cwsoftwareroot/web/test/jstests/ajax_url0.html'); - d.addCallback(function(data, req, arg1, arg2) { - try { - assert.equal(arg1, 'Hello'); - assert.equal(arg2, 'world'); - } finally { - done(); - }; - }, - 'Hello', 'world'); - }); - - QUnit.test('test callback after synchronous request with parameters', function (assert) { - assert.expect(3); - var deferred = new Deferred(); - deferred.addCallback(function(data, req, arg1, arg2) { - // add an assertion to ensure the callback is executed - try { - assert.ok(true, "callback is executed"); - assert.equal(arg1, 'Hello'); - assert.equal(arg2, 'world'); - } finally { - done(); - }; - }, - 'Hello', 'world'); - deferred.addErrback(function() { - // throw an exception to start errback chain - try { - throw this._error; - } finally { - done(); - }; - }); - var done = assert.async(); - var result = jQuery.ajax({ - url: BASE_URL + 'cwsoftwareroot/web/test/jstests/ajax_url0.html', - async: false, - beforeSend: function(xhr) { - deferred._req = xhr; - }, - success: function(data, status) { - deferred.success(data); - } - }); - }); - - QUnit.test('test addErrback', function (assert) { - assert.expect(1); - var done = assert.async(); - var d = $('#qunit-fixture').loadxhtml(BASE_URL + 'cwsoftwareroot/web/test/jstests/nonexistent.html'); - d.addCallback(function() { - // should not be executed - assert.ok(false, "callback is executed"); - }); - d.addErrback(function() { - try { - assert.ok(true, "errback is executed"); - } finally { - done(); - }; - }); - }); - - QUnit.test('test callback execution order', function (assert) { - assert.expect(3); - var counter = 0; - var done = assert.async(); - var d = $('#qunit-fixture').loadxhtml(BASE_URL + 'cwsoftwareroot/web/test/jstests/ajax_url0.html'); - d.addCallback(function() { - assert.equal(++counter, 1); // should be executed first - }); - d.addCallback(function() { - assert.equal(++counter, 2); - }); - d.addCallback(function() { - try { - assert.equal(++counter, 3); - } finally { - done(); - } - }); - }); - - QUnit.test('test already included resources are ignored (ajax_url1.html)', function (assert) { - assert.expect(10); - var scriptsIncluded = jsSources(); - // NOTE: - assert.equal(jQuery.inArray('http://foo.js', scriptsIncluded), -1); - assert.equal($('head link').length, 1); - /* use endswith because in pytest context we have an absolute path */ - assert.ok($('head link').attr('href').endswith('/qunit.css'), 'qunit.css is loaded'); - var done = assert.async(); - $('#qunit-fixture').loadxhtml(BASE_URL + 'cwsoftwareroot/web/test/jstests/ajax_url1.html') - .addCallback(function() { - var origLength = scriptsIncluded.length; - scriptsIncluded = jsSources(); - try { - // check that foo.js has been inserted in - assert.equal(scriptsIncluded.length, origLength + 1); - assert.equal(scriptsIncluded.indexOf('http://foo.js'), 0); - // check that
      has been removed - assert.equal($('#qunit-fixture').children().length, 1); - assert.equal($('div.ajaxHtmlHead').length, 0); - assert.equal($('#qunit-fixture h1').html(), 'Hello'); - // qunit.css is not added twice - assert.equal($('head link').length, 1); - /* use endswith because in pytest context we have an absolute path */ - assert.ok($('head link').attr('href').endswith('/qunit.css'), 'qunit.css is loaded'); - } finally { - done(); - } - } - ); - }); - - QUnit.test('test synchronous request loadRemote', function (assert) { - var res = loadRemote(BASE_URL + 'cwsoftwareroot/web/test/jstests/ajaxresult.json', {}, - 'GET', true); - assert.deepEqual(res, ['foo', 'bar']); - }); - - QUnit.test('test event on CubicWeb', function (assert) { - assert.expect(1); - var done = assert.async(); - var events = null; - $(CubicWeb).bind('server-response', function() { - // check that server-response event on CubicWeb is triggered - events = 'CubicWeb'; - }); - $('#qunit-fixture').loadxhtml(BASE_URL + 'cwsoftwareroot/web/test/jstests/ajax_url0.html') - .addCallback(function() { - try { - assert.equal(events, 'CubicWeb'); - } finally { - done(); - }; - } - ); - }); - - QUnit.test('test event on node', function (assert) { - assert.expect(3); - var done = assert.async(); - var nodes = []; - $('#qunit-fixture').bind('server-response', function() { - nodes.push('node'); - }); - $(CubicWeb).bind('server-response', function() { - nodes.push('CubicWeb'); - }); - $('#qunit-fixture').loadxhtml(BASE_URL + 'cwsoftwareroot/web/test/jstests/ajax_url0.html') - .addCallback(function() { - try { - assert.equal(nodes.length, 2); - // check that server-response event on CubicWeb is triggered - // only once and event server-response on node is triggered - assert.equal(nodes[0], 'CubicWeb'); - assert.equal(nodes[1], 'node'); - } finally { - done(); - }; - } - ); - }); -}); - diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/jstests/test_htmlhelpers.html --- a/web/test/jstests/test_htmlhelpers.html Thu Mar 24 09:43:25 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,22 +0,0 @@ - - - - - - - - - - - - - - - - -
      -

      cubicweb.htmlhelpers.js functions tests

      -

      -
        - - diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/jstests/test_htmlhelpers.js --- a/web/test/jstests/test_htmlhelpers.js Thu Mar 24 09:43:25 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,36 +0,0 @@ -$(document).ready(function() { - - QUnit.module("module2", { - setup: function() { - $('#qunit-fixture').append(''); - } - }); - - QUnit.test("test first selected", function (assert) { - $('#theselect').append('' + - '' + - '' + - ''); - var selected = firstSelected(document.getElementById("theselect")); - assert.equal(selected.value, 'bar'); - }); - - QUnit.test("test first selected 2", function (assert) { - $('#theselect').append('' + - '' + - '' + - ''); - var selected = firstSelected(document.getElementById("theselect")); - assert.equal(selected, null); - }); - - QUnit.module("visibilty"); - QUnit.test('toggleVisibility', function (assert) { - $('#qunit-fixture').append('
        '); - toggleVisibility('foo'); - assert.ok($('#foo').hasClass('hidden'), 'check hidden class is set'); - }); - -}); - diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/jstests/test_utils.html --- a/web/test/jstests/test_utils.html Thu Mar 24 09:43:25 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,22 +0,0 @@ - - - - - - - - - - - - - - - - -
        -

        cw.utils functions tests

        -

        -
          - - diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/jstests/test_utils.js --- a/web/test/jstests/test_utils.js Thu Mar 24 09:43:25 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,92 +0,0 @@ -$(document).ready(function() { - - QUnit.module("datetime"); - - QUnit.test("test full datetime", function (assert) { - assert.equal(cw.utils.toISOTimestamp(new Date(1986, 3, 18, 10, 30, 0, 0)), - '1986-04-18 10:30:00'); - }); - - QUnit.test("test only date", function (assert) { - assert.equal(cw.utils.toISOTimestamp(new Date(1986, 3, 18)), '1986-04-18 00:00:00'); - }); - - QUnit.test("test null", function (assert) { - assert.equal(cw.utils.toISOTimestamp(null), null); - }); - - QUnit.module("parsing"); - QUnit.test("test basic number parsing", function (assert) { - var d = strptime('2008/08/08', '%Y/%m/%d'); - assert.deepEqual(datetuple(d), [2008, 8, 8, 0, 0]); - d = strptime('2008/8/8', '%Y/%m/%d'); - assert.deepEqual(datetuple(d), [2008, 8, 8, 0, 0]); - d = strptime('8/8/8', '%Y/%m/%d'); - assert.deepEqual(datetuple(d), [8, 8, 8, 0, 0]); - d = strptime('0/8/8', '%Y/%m/%d'); - assert.deepEqual(datetuple(d), [0, 8, 8, 0, 0]); - d = strptime('-10/8/8', '%Y/%m/%d'); - assert.deepEqual(datetuple(d), [-10, 8, 8, 0, 0]); - d = strptime('-35000', '%Y'); - assert.deepEqual(datetuple(d), [-35000, 1, 1, 0, 0]); - }); - - QUnit.test("test custom format parsing", function (assert) { - var d = strptime('2008-08-08', '%Y-%m-%d'); - assert.deepEqual(datetuple(d), [2008, 8, 8, 0, 0]); - d = strptime('2008 - ! 08: 08', '%Y - ! %m: %d'); - assert.deepEqual(datetuple(d), [2008, 8, 8, 0, 0]); - d = strptime('2008-08-08 12:14', '%Y-%m-%d %H:%M'); - assert.deepEqual(datetuple(d), [2008, 8, 8, 12, 14]); - d = strptime('2008-08-08 1:14', '%Y-%m-%d %H:%M'); - assert.deepEqual(datetuple(d), [2008, 8, 8, 1, 14]); - d = strptime('2008-08-08 01:14', '%Y-%m-%d %H:%M'); - assert.deepEqual(datetuple(d), [2008, 8, 8, 1, 14]); - }); - - QUnit.module("sliceList"); - QUnit.test("test slicelist", function (assert) { - var list = ['a', 'b', 'c', 'd', 'e', 'f']; - assert.deepEqual(cw.utils.sliceList(list, 2), ['c', 'd', 'e', 'f']); - assert.deepEqual(cw.utils.sliceList(list, 2, -2), ['c', 'd']); - assert.deepEqual(cw.utils.sliceList(list, -3), ['d', 'e', 'f']); - assert.deepEqual(cw.utils.sliceList(list, 0, -2), ['a', 'b', 'c', 'd']); - assert.deepEqual(cw.utils.sliceList(list), list); - }); - - QUnit.module("formContents", { - setup: function() { - $('#qunit-fixture').append('
          '); - } - }); - // XXX test fckeditor - QUnit.test("test formContents", function (assert) { - $('#test-form').append(''); - $('#test-form').append(' '); - $('#test-form').append(''); - $('#test-form').append(''); - $('#test-form').append(''); - $('#test-form').append(''); - $('#test-form').append(''); - $('#theselect').append('' + - ''); - //Append an unchecked radio input : should not be in formContents list - $('#test-form').append(''); - $('#test-form').append(''); - assert.deepEqual(cw.utils.formContents($('#test-form')[0]), [ - ['input-text', 'mytextarea', 'choice', 'check', 'theselect'], - ['toto', 'Hello World!', 'no', 'no', 'foo'] - ]); - }); -}); - diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/jstests/utils.js --- a/web/test/jstests/utils.js Thu Mar 24 09:43:25 2016 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,29 +0,0 @@ -function datetuple(d) { - return [d.getFullYear(), d.getMonth()+1, d.getDate(), - d.getHours(), d.getMinutes()]; -} - -function pprint(obj) { - print('{'); - for(k in obj) { - print(' ' + k + ' = ' + obj[k]); - } - print('}'); -} - -function arrayrepr(array) { - return '[' + array.join(', ') + ']'; -} - -function assertArrayEquals(array1, array2) { - if (array1.length != array2.length) { - throw new crosscheck.AssertionFailure(array1.join(', ') + ' != ' + array2.join(', ')); - } - for (var i=0; i. """unit tests for cubicweb.web.application""" -import base64, Cookie -import httplib +import base64 + +from six import text_type +from six.moves import http_client +from six.moves.http_cookies import SimpleCookie from logilab.common.testlib import TestCase, unittest_main from logilab.common.decorators import clear_cache, classproperty @@ -178,8 +181,8 @@ def test_publish_validation_error(self): with self.admin_access.web_request() as req: - user = self.user(req) - eid = unicode(user.eid) + user = req.user + eid = text_type(user.eid) req.form = { 'eid': eid, '__type:'+eid: 'CWUser', '_cw_entity_fields:'+eid: 'login-subject', @@ -519,14 +522,14 @@ with self.admin_access.web_request(vid='test.ajax.error') as req: req.ajax_request = True page = app.handle_request(req, '') - self.assertEqual(httplib.INTERNAL_SERVER_ERROR, + self.assertEqual(http_client.INTERNAL_SERVER_ERROR, req.status_out) def _test_cleaned(self, kwargs, injected, cleaned): with self.admin_access.web_request(**kwargs) as req: page = self.app_handle_request(req, 'view') - self.assertNotIn(injected, page) - self.assertIn(cleaned, page) + self.assertNotIn(injected.encode('ascii'), page) + self.assertIn(cleaned.encode('ascii'), page) def test_nonregr_script_kiddies(self): """test against current script injection""" @@ -566,8 +569,8 @@ self.app.handle_request(req, 'login') self.assertEqual(401, req.status_out) clear_cache(req, 'get_authorization') - authstr = base64.encodestring('%s:%s' % (self.admlogin, self.admpassword)) - req.set_request_header('Authorization', 'basic %s' % authstr) + authstr = base64.encodestring(('%s:%s' % (self.admlogin, self.admpassword)).encode('ascii')) + req.set_request_header('Authorization', 'basic %s' % authstr.decode('ascii')) self.assertAuthSuccess(req, origsession) self.assertRaises(LogOut, self.app_handle_request, req, 'logout') self.assertEqual(len(self.open_sessions), 0) @@ -580,8 +583,8 @@ except Redirect as redir: self.fail('anonymous user should get login form') clear_cache(req, 'get_authorization') - self.assertIn('__login', form) - self.assertIn('__password', form) + self.assertIn(b'__login', form) + self.assertIn(b'__password', form) self.assertFalse(req.cnx) # Mock cnx are False req.form['__login'] = self.admlogin req.form['__password'] = self.admpassword @@ -613,7 +616,7 @@ def _reset_cookie(self, req): # preparing the suite of the test # set session id in cookie - cookie = Cookie.SimpleCookie() + cookie = SimpleCookie() sessioncookie = self.app.session_handler.session_cookie(req) cookie[sessioncookie] = req.session.sessionid req.set_request_header('Cookie', cookie[sessioncookie].OutputString(), @@ -642,11 +645,11 @@ def test_http_auth_anon_allowed(self): req, origsession = self.init_authentication('http', 'anon') self._test_auth_anon(req) - authstr = base64.encodestring('toto:pouet') - req.set_request_header('Authorization', 'basic %s' % authstr) + authstr = base64.encodestring(b'toto:pouet') + req.set_request_header('Authorization', 'basic %s' % authstr.decode('ascii')) self._test_anon_auth_fail(req) - authstr = base64.encodestring('%s:%s' % (self.admlogin, self.admpassword)) - req.set_request_header('Authorization', 'basic %s' % authstr) + authstr = base64.encodestring(('%s:%s' % (self.admlogin, self.admpassword)).encode('ascii')) + req.set_request_header('Authorization', 'basic %s' % authstr.decode('ascii')) self.assertAuthSuccess(req, origsession) self.assertRaises(LogOut, self.app_handle_request, req, 'logout') self.assertEqual(len(self.open_sessions), 0) diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/unittest_facet.py --- a/web/test/unittest_facet.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/test/unittest_facet.py Thu Jun 16 14:19:20 2016 +0200 @@ -70,8 +70,8 @@ def test_relation_optional_rel(self): with self.admin_access.web_request() as req: - rset = req.cnx.execute('Any X,GROUP_CONCAT(GN) GROUPBY X ' - 'WHERE X in_group G?, G name GN, NOT G name "users"') + rset = req.cnx.execute(u'Any X,GROUP_CONCAT(GN) GROUPBY X ' + 'WHERE X in_group G?, G name GN, NOT G name "users"') rqlst = rset.syntax_tree().copy() select = rqlst.children[0] filtered_variable, baserql = facet.init_facets(rset, select) @@ -87,18 +87,18 @@ self.assertEqual(f.vocabulary(), [(u'guests', guests), (u'managers', managers)]) # ensure rqlst is left unmodified - self.assertEqual(rqlst.as_string(), "DISTINCT Any WHERE X in_group G?, G name GN, NOT G name 'users'") + self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X in_group G?, G name GN, NOT G name "users"') #rqlst = rset.syntax_tree() self.assertEqual(sorted(f.possible_values()), [str(guests), str(managers)]) # ensure rqlst is left unmodified - self.assertEqual(rqlst.as_string(), "DISTINCT Any WHERE X in_group G?, G name GN, NOT G name 'users'") + self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X in_group G?, G name GN, NOT G name "users"') req.form[f.__regid__] = str(guests) f.add_rql_restrictions() # selection is cluttered because rqlst has been prepared for facet (it # is not in real life) self.assertEqual(f.select.as_string(), - "DISTINCT Any WHERE X in_group G?, G name GN, NOT G name 'users', X in_group D, D eid %s" % guests) + 'DISTINCT Any WHERE X in_group G?, G name GN, NOT G name "users", X in_group D, D eid %s' % guests) def test_relation_no_relation_1(self): with self.admin_access.web_request() as req: @@ -141,12 +141,12 @@ ['guests', 'managers']) # ensure rqlst is left unmodified self.assertEqual(f.select.as_string(), 'DISTINCT Any WHERE X is CWUser') - f._cw.form[f.__regid__] = 'guests' + f._cw.form[f.__regid__] = u'guests' f.add_rql_restrictions() # selection is cluttered because rqlst has been prepared for facet (it # is not in real life) self.assertEqual(f.select.as_string(), - "DISTINCT Any WHERE X is CWUser, X in_group E, E name 'guests'") + 'DISTINCT Any WHERE X is CWUser, X in_group E, E name "guests"') def test_hasrelation(self): with self.admin_access.web_request() as req: @@ -207,12 +207,12 @@ ['admin', 'anon']) # ensure rqlst is left unmodified self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') - req.form[f.__regid__] = 'admin' + req.form[f.__regid__] = u'admin' f.add_rql_restrictions() # selection is cluttered because rqlst has been prepared for facet (it # is not in real life) self.assertEqual(f.select.as_string(), - "DISTINCT Any WHERE X is CWUser, X login 'admin'") + 'DISTINCT Any WHERE X is CWUser, X login "admin"') def test_bitfield(self): with self.admin_access.web_request() as req: @@ -310,12 +310,12 @@ self.assertEqual(f.possible_values(), ['admin',]) # ensure rqlst is left unmodified self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser') - req.form[f.__regid__] = 'admin' + req.form[f.__regid__] = u'admin' f.add_rql_restrictions() # selection is cluttered because rqlst has been prepared for facet (it # is not in real life) self.assertEqual(f.select.as_string(), - "DISTINCT Any WHERE X is CWUser, X created_by G, G owned_by H, H login 'admin'") + 'DISTINCT Any WHERE X is CWUser, X created_by G, G owned_by H, H login "admin"') def test_rql_path_check_filter_label_variable(self): with self.admin_access.web_request() as req: @@ -359,13 +359,13 @@ def prepareg_aggregat_rqlst(self, req): return self.prepare_rqlst(req, - 'Any 1, COUNT(X) WHERE X is CWUser, X creation_date XD, ' - 'X modification_date XM, Y creation_date YD, Y is CWGroup ' - 'HAVING DAY(XD)>=DAY(YD) AND DAY(XM)<=DAY(YD)', 'X', - expected_baserql='Any 1,COUNT(X) WHERE X is CWUser, X creation_date XD, ' + u'Any 1, COUNT(X) WHERE X is CWUser, X creation_date XD, ' + 'X modification_date XM, Y creation_date YD, Y is CWGroup ' + 'HAVING DAY(XD)>=DAY(YD) AND DAY(XM)<=DAY(YD)', 'X', + expected_baserql=u'Any 1,COUNT(X) WHERE X is CWUser, X creation_date XD, ' 'X modification_date XM, Y creation_date YD, Y is CWGroup ' 'HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)', - expected_preparedrql='DISTINCT Any WHERE X is CWUser, X creation_date XD, ' + expected_preparedrql=u'DISTINCT Any WHERE X is CWUser, X creation_date XD, ' 'X modification_date XM, Y creation_date YD, Y is CWGroup ' 'HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)') @@ -390,13 +390,13 @@ filtered_variable=filtered_variable) self.assertEqual(f.vocabulary(), [(u'admin', u'admin')]) self.assertEqual(f.possible_values(), ['admin']) - req.form[f.__regid__] = 'admin' + req.form[f.__regid__] = u'admin' f.add_rql_restrictions() self.assertEqual(f.select.as_string(), - "DISTINCT Any WHERE X is CWUser, X creation_date XD, " - "X modification_date XM, Y creation_date YD, Y is CWGroup, " - "X created_by G, G owned_by H, H login 'admin' " - "HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)") + 'DISTINCT Any WHERE X is CWUser, X creation_date XD, ' + 'X modification_date XM, Y creation_date YD, Y is CWGroup, ' + 'X created_by G, G owned_by H, H login "admin" ' + 'HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)') def test_aggregat_query_attribute(self): with self.admin_access.web_request() as req: @@ -409,12 +409,12 @@ [(u'admin', u'admin'), (u'anon', u'anon')]) self.assertEqual(f.possible_values(), ['admin', 'anon']) - req.form[f.__regid__] = 'admin' + req.form[f.__regid__] = u'admin' f.add_rql_restrictions() self.assertEqual(f.select.as_string(), - "DISTINCT Any WHERE X is CWUser, X creation_date XD, " - "X modification_date XM, Y creation_date YD, Y is CWGroup, X login 'admin' " - "HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)") + 'DISTINCT Any WHERE X is CWUser, X creation_date XD, ' + 'X modification_date XM, Y creation_date YD, Y is CWGroup, X login "admin" ' + 'HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)') if __name__ == '__main__': from logilab.common.testlib import unittest_main diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/unittest_form.py --- a/web/test/unittest_form.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/test/unittest_form.py Thu Jun 16 14:19:20 2016 +0200 @@ -21,9 +21,12 @@ from xml.etree.ElementTree import fromstring from lxml import html +from six import text_type + from logilab.common.testlib import unittest_main from cubicweb import Binary, ValidationError +from cubicweb.mttransforms import HAS_TAL from cubicweb.devtools.testlib import CubicWebTC from cubicweb.web.formfields import (IntField, StringField, RichTextField, PasswordField, DateTimeField, @@ -65,19 +68,19 @@ t = req.create_entity('Tag', name=u'x') form1 = self.vreg['forms'].select('edition', req, entity=t) choices = [reid for rview, reid in form1.field_by_name('tags', 'subject', t.e_schema).choices(form1)] - self.assertIn(unicode(b.eid), choices) + self.assertIn(text_type(b.eid), choices) form2 = self.vreg['forms'].select('edition', req, entity=b) choices = [reid for rview, reid in form2.field_by_name('tags', 'object', t.e_schema).choices(form2)] - self.assertIn(unicode(t.eid), choices) + self.assertIn(text_type(t.eid), choices) b.cw_clear_all_caches() t.cw_clear_all_caches() req.cnx.execute('SET X tags Y WHERE X is Tag, Y is BlogEntry') choices = [reid for rview, reid in form1.field_by_name('tags', 'subject', t.e_schema).choices(form1)] - self.assertIn(unicode(b.eid), choices) + self.assertIn(text_type(b.eid), choices) choices = [reid for rview, reid in form2.field_by_name('tags', 'object', t.e_schema).choices(form2)] - self.assertIn(unicode(t.eid), choices) + self.assertIn(text_type(t.eid), choices) def test_form_field_choices_new_entity(self): with self.admin_access.web_request() as req: @@ -193,8 +196,9 @@ with self.admin_access.web_request() as req: req.use_fckeditor = lambda: False self._test_richtextfield(req, ''' @@ -241,7 +245,7 @@ eidparam=True, role='subject') with self.admin_access.web_request() as req: file = req.create_entity('File', data_name=u"pouet.txt", data_encoding=u'UTF-8', - data=Binary('new widgets system')) + data=Binary(b'new widgets system')) form = EFFForm(req, redirect_path='perdu.com', entity=file) self.assertMultiLineEqual(self._render_entity_field(req, 'data', form), ''' diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/unittest_formfields.py --- a/web/test/unittest_formfields.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/test/unittest_formfields.py Thu Jun 16 14:19:20 2016 +0200 @@ -21,6 +21,7 @@ from yams.constraints import StaticVocabularyConstraint, SizeConstraint +import cubicweb from cubicweb.devtools import TestServerConfiguration from cubicweb.devtools.testlib import CubicWebTC from cubicweb.web.formwidgets import PasswordInput, TextArea, Select, Radio @@ -127,7 +128,7 @@ self.assertIsInstance(field, BooleanField) self.assertEqual(field.required, False) self.assertIsInstance(field.widget, Radio) - self.assertEqual(field.vocabulary(mock(_cw=mock(_=unicode))), + self.assertEqual(field.vocabulary(mock(_cw=mock(_=cubicweb._))), [(u'yes', '1'), (u'no', '')]) def test_bool_field_explicit_choices(self): @@ -135,7 +136,7 @@ field = guess_field(schema['CWAttribute'], schema['indexed'], choices=[(u'maybe', '1'), (u'no', '')], req=req) self.assertIsInstance(field.widget, Radio) - self.assertEqual(field.vocabulary(mock(req=mock(_=unicode))), + self.assertEqual(field.vocabulary(mock(req=mock(_=cubicweb._))), [(u'maybe', '1'), (u'no', '')]) diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/unittest_formwidgets.py --- a/web/test/unittest_formwidgets.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/test/unittest_formwidgets.py Thu Jun 16 14:19:20 2016 +0200 @@ -17,23 +17,18 @@ # with CubicWeb. If not, see . """unittests for cw.web.formwidgets""" -from logilab.common.testlib import TestCase, unittest_main, mock_object as mock - -from cubicweb.devtools import TestServerConfiguration, fake -from cubicweb.web import formwidgets, formfields - -from cubes.file.entities import File +from logilab.common.testlib import unittest_main, mock_object as mock -def setUpModule(*args): - global schema - config = TestServerConfiguration('data', apphome=WidgetsTC.datadir) - config.bootstrap_cubes() - schema = config.load_schema() +from cubicweb.devtools import fake +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.web import formwidgets, formfields +from cubicweb.web.views.forms import FieldsForm -class WidgetsTC(TestCase): + +class WidgetsTC(CubicWebTC): def test_editableurl_widget(self): - field = formfields.guess_field(schema['Bookmark'], schema['path']) + field = formfields.guess_field(self.schema['Bookmark'], self.schema['path']) widget = formwidgets.EditableURLWidget() req = fake.FakeRequest(form={'path-subjectfqs:A': 'param=value&vid=view'}) form = mock(_cw=req, formvalues={}, edited_entity=mock(eid='A')) @@ -41,7 +36,7 @@ '?param=value%26vid%3Dview') def test_bitselect_widget(self): - field = formfields.guess_field(schema['CWAttribute'], schema['ordernum']) + field = formfields.guess_field(self.schema['CWAttribute'], self.schema['ordernum']) field.choices = [('un', '1',), ('deux', '2',)] widget = formwidgets.BitSelect(settabindex=False) req = fake.FakeRequest(form={'ordernum-subject:A': ['1', '2']}) @@ -56,5 +51,21 @@ self.assertEqual(widget.process_field_data(form, field), 3) + def test_xml_escape_checkbox(self): + class TestForm(FieldsForm): + bool = formfields.BooleanField(ignore_req_params=True, + choices=[('python >> others', '1')], + widget=formwidgets.CheckBox()) + with self.admin_access.web_request() as req: + form = TestForm(req, None) + form.build_context() + field = form.field_by_name('bool') + widget = field.widget + self.assertMultiLineEqual(widget._render(form, field, None), + '') + + if __name__ == '__main__': unittest_main() diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/unittest_http.py --- a/web/test/unittest_http.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/test/unittest_http.py Thu Jun 16 14:19:20 2016 +0200 @@ -152,6 +152,7 @@ ] req = _test_cache(hin, hout) self.assertCache(304, req.status_out, 'etag match') + self.assertEqual(req.headers_out.getRawHeaders('etag'), ['babar']) # etag match in multiple hin = [('if-none-match', 'loutre'), ('if-none-match', 'babar'), @@ -160,6 +161,7 @@ ] req = _test_cache(hin, hout) self.assertCache(304, req.status_out, 'etag match in multiple') + self.assertEqual(req.headers_out.getRawHeaders('etag'), ['babar']) # client use "*" as etag hin = [('if-none-match', '*'), ] @@ -167,6 +169,7 @@ ] req = _test_cache(hin, hout) self.assertCache(304, req.status_out, 'client use "*" as etag') + self.assertEqual(req.headers_out.getRawHeaders('etag'), ['babar']) @tag('etag', 'last_modified') def test_both(self): @@ -216,6 +219,7 @@ ] req = _test_cache(hin, hout) self.assertCache(304, req.status_out, 'both ok') + self.assertEqual(req.headers_out.getRawHeaders('etag'), ['babar']) @tag('etag', 'HEAD') def test_head_verb(self): @@ -235,6 +239,7 @@ ] req = _test_cache(hin, hout, method='HEAD') self.assertCache(304, req.status_out, 'not modifier HEAD verb') + self.assertEqual(req.headers_out.getRawHeaders('etag'), ['babar']) @tag('etag', 'POST') def test_post_verb(self): @@ -253,46 +258,6 @@ req = _test_cache(hin, hout, method='POST') self.assertCache(412, req.status_out, 'not modifier HEAD verb') - @tag('expires') - def test_expires_added(self): - #: Check that Expires header is added: - #: - when the page is modified - #: - when none was already present - hin = [('if-none-match', 'babar'), - ] - hout = [('etag', 'rhino/really-not-babar'), - ] - req = _test_cache(hin, hout) - self.assertCache(None, req.status_out, 'modifier HEAD verb') - value = req.headers_out.getHeader('expires') - self.assertIsNotNone(value) - - @tag('expires') - def test_expires_not_added(self): - #: Check that Expires header is not added if NOT-MODIFIED - hin = [('if-none-match', 'babar'), - ] - hout = [('etag', 'babar'), - ] - req = _test_cache(hin, hout) - self.assertCache(304, req.status_out, 'not modifier HEAD verb') - value = req.headers_out.getHeader('expires') - self.assertIsNone(value) - - @tag('expires') - def test_expires_no_overwrite(self): - #: Check that cache does not overwrite existing Expires header - hin = [('if-none-match', 'babar'), - ] - DATE = 'Sat, 13 Apr 2012 14:39:32 GM' - hout = [('etag', 'rhino/really-not-babar'), - ('expires', DATE), - ] - req = _test_cache(hin, hout) - self.assertCache(None, req.status_out, 'not modifier HEAD verb') - value = req.headers_out.getRawHeaders('expires') - self.assertEqual(value, [DATE]) - alloworig = 'access-control-allow-origin' allowmethods = 'access-control-allow-methods' diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/unittest_idownloadable.py --- a/web/test/unittest_idownloadable.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/test/unittest_idownloadable.py Thu Jun 16 14:19:20 2016 +0200 @@ -42,7 +42,7 @@ return self.entity.name() + '.txt' def download_data(self): - return 'Babar is not dead!' + return b'Babar is not dead!' class BrokenIDownloadableGroup(IDownloadableUser): @@ -72,7 +72,7 @@ get('content-disposition')) self.assertEqual(['text/plain;charset=ascii'], get('content-type')) - self.assertEqual('Babar is not dead!', data) + self.assertEqual(b'Babar is not dead!', data) def test_header_with_space(self): with self.admin_access.web_request() as req: @@ -87,13 +87,13 @@ get('content-disposition')) self.assertEqual(['text/plain;charset=ascii'], get('content-type')) - self.assertEqual('Babar is not dead!', data) + self.assertEqual(b'Babar is not dead!', data) def test_header_with_space_and_comma(self): with self.admin_access.web_request() as req: - self.create_user(req, login=ur'c " l\ a', password='babar') + self.create_user(req, login=u'c " l\\ a', password='babar') req.cnx.commit() - with self.new_access(ur'c " l\ a').web_request() as req: + with self.new_access(u'c " l\\ a').web_request() as req: req.form['vid'] = 'download' req.form['eid'] = str(req.user.eid) data = self.ctrl_publish(req,'view') @@ -102,7 +102,7 @@ get('content-disposition')) self.assertEqual(['text/plain;charset=ascii'], get('content-type')) - self.assertEqual('Babar is not dead!', data) + self.assertEqual(b'Babar is not dead!', data) def test_header_unicode_filename(self): with self.admin_access.web_request() as req: diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/unittest_magicsearch.py --- a/web/test/unittest_magicsearch.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/test/unittest_magicsearch.py Thu Jun 16 14:19:20 2016 +0200 @@ -21,6 +21,8 @@ import sys from contextlib import contextmanager +from six.moves import range + from logilab.common.testlib import TestCase, unittest_main from rql import BadRQLQuery, RQLSyntaxError @@ -62,19 +64,19 @@ def test_basic_translations(self): """tests basic translations (no ambiguities)""" with self.proc() as proc: - rql = "Any C WHERE C is Adresse, P adel C, C adresse 'Logilab'" + rql = u"Any C WHERE C is Adresse, P adel C, C adresse 'Logilab'" rql, = proc.preprocess_query(rql) - self.assertEqual(rql, "Any C WHERE C is EmailAddress, P use_email C, C address 'Logilab'") + self.assertEqual(rql, 'Any C WHERE C is EmailAddress, P use_email C, C address "Logilab"') def test_ambiguous_translations(self): """tests possibly ambiguous translations""" with self.proc() as proc: - rql = "Any P WHERE P adel C, C is EmailAddress, C nom 'Logilab'" + rql = u"Any P WHERE P adel C, C is EmailAddress, C nom 'Logilab'" rql, = proc.preprocess_query(rql) - self.assertEqual(rql, "Any P WHERE P use_email C, C is EmailAddress, C alias 'Logilab'") - rql = "Any P WHERE P is Utilisateur, P adel C, P nom 'Smith'" + self.assertEqual(rql, 'Any P WHERE P use_email C, C is EmailAddress, C alias "Logilab"') + rql = u"Any P WHERE P is Utilisateur, P adel C, P nom 'Smith'" rql, = proc.preprocess_query(rql) - self.assertEqual(rql, "Any P WHERE P is CWUser, P use_email C, P surname 'Smith'") + self.assertEqual(rql, 'Any P WHERE P is CWUser, P use_email C, P surname "Smith"') class QSPreProcessorTC(CubicWebTC): @@ -330,7 +332,7 @@ # suggestions should contain any possible value for # a given attribute (limited to 10) with self.admin_access.web_request() as req: - for i in xrange(15): + for i in range(15): req.create_entity('Personne', nom=u'n%s' % i, prenom=u'p%s' % i) req.cnx.commit() self.assertListEqual(['Any X WHERE X is Personne, X nom "n0"', diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/unittest_propertysheet.py --- a/web/test/unittest_propertysheet.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/test/unittest_propertysheet.py Thu Jun 16 14:19:20 2016 +0200 @@ -49,19 +49,14 @@ 'a {bgcolor: #FFFFFF; size: 1%;}') self.assertEqual(ps.process_resource(DATADIR, 'pouet.css'), self.cachedir) - self.assertIn('pouet.css', ps._cache) self.assertFalse(ps.need_reload()) os.utime(self.data('sheet1.py'), None) - self.assertIn('pouet.css', ps._cache) self.assertTrue(ps.need_reload()) - self.assertIn('pouet.css', ps._cache) ps.reload() - self.assertNotIn('pouet.css', ps._cache) self.assertFalse(ps.need_reload()) ps.process_resource(DATADIR, 'pouet.css') # put in cache os.utime(self.data('pouet.css'), None) self.assertFalse(ps.need_reload()) - self.assertNotIn('pouet.css', ps._cache) if __name__ == '__main__': diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/unittest_urlpublisher.py --- a/web/test/unittest_urlpublisher.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/test/unittest_urlpublisher.py Thu Jun 16 14:19:20 2016 +0200 @@ -25,7 +25,7 @@ from cubicweb.rset import ResultSet from cubicweb.devtools.testlib import CubicWebTC from cubicweb.devtools.fake import FakeRequest -from cubicweb.web import NotFound, Redirect +from cubicweb.web import NotFound, Redirect, views from cubicweb.web.views.urlrewrite import SimpleReqRewriter @@ -69,6 +69,7 @@ self.assertEqual("Any X,AA,AB ORDERBY AB WHERE X is_instance_of CWEType, " "X modification_date AA, X name AB", rset.printable_rql()) + self.assertEqual(req.form['vid'], 'sameetypelist') def test_rest_path_by_attr(self): with self.admin_access.web_request() as req: @@ -91,10 +92,11 @@ 'X firstname AA, X login AB, X modification_date AC, ' 'X surname AD, X login "admin"', rset.printable_rql()) + self.assertEqual(req.form['vid'], 'primary') def test_rest_path_eid(self): with self.admin_access.web_request() as req: - ctrl, rset = self.process(req, 'cwuser/eid/%s' % self.user(req).eid) + ctrl, rset = self.process(req, 'cwuser/eid/%s' % req.user.eid) self.assertEqual(ctrl, 'view') self.assertEqual(len(rset), 1) self.assertEqual(rset.description[0][0], 'CWUser') @@ -125,6 +127,15 @@ 'X title "hell\'o"', rset.printable_rql()) + def test_rest_path_use_vid_from_rset(self): + with self.admin_access.web_request(headers={'Accept': 'application/rdf+xml'}) as req: + views.VID_BY_MIMETYPE['application/rdf+xml'] = 'rdf' + try: + ctrl, rset = self.process(req, 'CWEType') + finally: + views.VID_BY_MIMETYPE.pop('application/rdf+xml') + self.assertEqual(req.form['vid'], 'rdf') + def test_rest_path_errors(self): with self.admin_access.web_request() as req: self.assertRaises(NotFound, self.process, req, 'CWUser/eid/30000') @@ -141,25 +152,24 @@ self.assertRaises(NotFound, self.process, req, '1/non_action') self.assertRaises(NotFound, self.process, req, 'CWUser/login/admin/non_action') - def test_regexp_path(self): """tests the regexp path resolution""" with self.admin_access.web_request() as req: ctrl, rset = self.process(req, 'add/Task') self.assertEqual(ctrl, 'view') self.assertEqual(rset, None) - self.assertEqual(req.form, {'etype' : "Task", 'vid' : "creation"}) + self.assertEqual(req.form, {'etype': "Task", 'vid': "creation"}) self.assertRaises(NotFound, self.process, req, 'add/foo/bar') def test_nonascii_path(self): oldrules = SimpleReqRewriter.rules - SimpleReqRewriter.rules = [(re.compile('/\w+', re.U), dict(vid='foo')),] + SimpleReqRewriter.rules = [(re.compile('/\w+', re.U), dict(vid='foo'))] with self.admin_access.web_request() as req: try: path = str(FakeRequest().url_quote(u'été')) ctrl, rset = self.process(req, path) self.assertEqual(rset, None) - self.assertEqual(req.form, {'vid' : "foo"}) + self.assertEqual(req.form, {'vid': "foo"}) finally: SimpleReqRewriter.rules = oldrules diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/unittest_urlrewrite.py --- a/web/test/unittest_urlrewrite.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/test/unittest_urlrewrite.py Thu Jun 16 14:19:20 2016 +0200 @@ -16,6 +16,8 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . +from six import text_type + from logilab.common import tempattr from cubicweb.devtools.testlib import CubicWebTC @@ -137,8 +139,8 @@ rgx_action(r'Any X WHERE X surname %(sn)s, ' 'X firstname %(fn)s', argsgroups=('sn', 'fn'), - transforms={'sn' : unicode.capitalize, - 'fn' : unicode.lower,})), + transforms={'sn' : text_type.capitalize, + 'fn' : text_type.lower,})), ] with self.admin_access.web_request() as req: rewriter = TestSchemaBasedRewriter(req) diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/unittest_views_basecontrollers.py --- a/web/test/unittest_views_basecontrollers.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/test/unittest_views_basecontrollers.py Thu Jun 16 14:19:20 2016 +0200 @@ -17,22 +17,18 @@ # with CubicWeb. If not, see . """cubicweb.web.views.basecontrollers unit tests""" -from urlparse import urlsplit, urlunsplit, urljoin -# parse_qs is deprecated in cgi and has been moved to urlparse in Python 2.6 -try: - from urlparse import parse_qs as url_parse_query -except ImportError: - from cgi import parse_qs as url_parse_query +from six import text_type +from six.moves.urllib.parse import urlsplit, urlunsplit, urljoin, parse_qs import lxml from logilab.common.testlib import unittest_main - from logilab.common.decorators import monkeypatch from cubicweb import Binary, NoSelectableObject, ValidationError from cubicweb.schema import RRQLExpression from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.devtools.webtest import CubicWebTestTC from cubicweb.utils import json_dumps from cubicweb.uilib import rql_for_eid from cubicweb.web import Redirect, RemoteCallFailed @@ -45,6 +41,19 @@ from cubicweb.server.hook import Hook, Operation from cubicweb.predicates import is_instance + +class ViewControllerTC(CubicWebTestTC): + def test_view_ctrl_with_valid_cache_headers(self): + resp = self.webapp.get('/manage') + self.assertEqual(resp.etag, 'manage/guests') + self.assertEqual(resp.status_code, 200) + cache_headers = {'if-modified-since': resp.headers['Last-Modified'], + 'if-none-match': resp.etag} + resp = self.webapp.get('/manage', headers=cache_headers) + self.assertEqual(resp.status_code, 304) + self.assertEqual(len(resp.body), 0) + + def req_form(user): return {'eid': [str(user.eid)], '_cw_entity_fields:%s' % user.eid: '_cw_generic_field', @@ -82,7 +91,7 @@ } with self.assertRaises(ValidationError) as cm: self.ctrl_publish(req) - cm.exception.translate(unicode) + cm.exception.translate(text_type) self.assertEqual({'login-subject': 'the value "admin" is already used, use another one'}, cm.exception.errors) @@ -136,12 +145,12 @@ user = req.user groupeids = [eid for eid, in req.execute('CWGroup G WHERE G name ' 'in ("managers", "users")')] - groups = [unicode(eid) for eid in groupeids] - eid = unicode(user.eid) + groups = [text_type(eid) for eid in groupeids] + eid = text_type(user.eid) req.form = { 'eid': eid, '__type:'+eid: 'CWUser', '_cw_entity_fields:'+eid: 'login-subject,firstname-subject,surname-subject,in_group-subject', - 'login-subject:'+eid: unicode(user.login), + 'login-subject:'+eid: text_type(user.login), 'surname-subject:'+eid: u'Th\xe9nault', 'firstname-subject:'+eid: u'Sylvain', 'in_group-subject:'+eid: groups, @@ -159,7 +168,7 @@ self.create_user(cnx, u'user') cnx.commit() with self.new_access(u'user').web_request() as req: - eid = unicode(req.user.eid) + eid = text_type(req.user.eid) req.form = { 'eid': eid, '__maineid' : eid, '__type:'+eid: 'CWUser', @@ -179,12 +188,12 @@ with self.admin_access.web_request() as req: user = req.user groupeids = [g.eid for g in user.in_group] - eid = unicode(user.eid) + eid = text_type(user.eid) req.form = { 'eid': eid, '__type:'+eid: 'CWUser', '_cw_entity_fields:'+eid: 'login-subject,firstname-subject,surname-subject', - 'login-subject:'+eid: unicode(user.login), + 'login-subject:'+eid: text_type(user.login), 'firstname-subject:'+eid: u'Th\xe9nault', 'surname-subject:'+eid: u'Sylvain', } @@ -207,7 +216,7 @@ 'login-subject:X': u'adim', 'upassword-subject:X': u'toto', 'upassword-subject-confirm:X': u'toto', 'surname-subject:X': u'Di Mascio', - 'in_group-subject:X': unicode(gueid), + 'in_group-subject:X': text_type(gueid), '__type:Y': 'EmailAddress', '_cw_entity_fields:Y': 'address-subject,use_email-object', @@ -231,7 +240,7 @@ '__type:Y': 'File', '_cw_entity_fields:Y': 'data-subject,described_by_test-object', - 'data-subject:Y': (u'coucou.txt', Binary('coucou')), + 'data-subject:Y': (u'coucou.txt', Binary(b'coucou')), 'described_by_test-object:Y': 'X', } path, _params = self.expect_redirect_handle_request(req, 'edit') @@ -256,7 +265,7 @@ '__type:Y': 'File', '_cw_entity_fields:Y': 'data-subject', - 'data-subject:Y': (u'coucou.txt', Binary('coucou')), + 'data-subject:Y': (u'coucou.txt', Binary(b'coucou')), } path, _params = self.expect_redirect_handle_request(req, 'edit') self.assertTrue(path.startswith('salesterm/'), path) @@ -274,7 +283,7 @@ # non regression test for #3120495. Without the fix, leads to # "unhashable type: 'list'" error with self.admin_access.web_request() as req: - cwrelation = unicode(req.execute('CWEType X WHERE X name "CWSource"')[0][0]) + cwrelation = text_type(req.execute('CWEType X WHERE X name "CWSource"')[0][0]) req.form = {'eid': [cwrelation], '__maineid' : cwrelation, '__type:'+cwrelation: 'CWEType', @@ -287,7 +296,7 @@ def test_edit_multiple_linked(self): with self.admin_access.web_request() as req: - peid = unicode(self.create_user(req, u'adim').eid) + peid = text_type(self.create_user(req, u'adim').eid) req.form = {'eid': [peid, 'Y'], '__maineid': peid, '__type:'+peid: u'CWUser', @@ -307,7 +316,7 @@ self.assertEqual(email.address, 'dima@logilab.fr') # with self.admin_access.web_request() as req: - emaileid = unicode(email.eid) + emaileid = text_type(email.eid) req.form = {'eid': [peid, emaileid], '__type:'+peid: u'CWUser', @@ -329,7 +338,7 @@ with self.admin_access.web_request() as req: user = req.user req.form = {'eid': 'X', - '__cloned_eid:X': unicode(user.eid), '__type:X': 'CWUser', + '__cloned_eid:X': text_type(user.eid), '__type:X': 'CWUser', '_cw_entity_fields:X': 'login-subject,upassword-subject', 'login-subject:X': u'toto', 'upassword-subject:X': u'toto', @@ -338,7 +347,7 @@ self.ctrl_publish(req) self.assertEqual({'upassword-subject': u'password and confirmation don\'t match'}, cm.exception.errors) - req.form = {'__cloned_eid:X': unicode(user.eid), + req.form = {'__cloned_eid:X': text_type(user.eid), 'eid': 'X', '__type:X': 'CWUser', '_cw_entity_fields:X': 'login-subject,upassword-subject', 'login-subject:X': u'toto', @@ -354,7 +363,7 @@ def test_interval_bound_constraint_success(self): with self.admin_access.repo_cnx() as cnx: feid = cnx.execute('INSERT File X: X data_name "toto.txt", X data %(data)s', - {'data': Binary('yo')})[0][0] + {'data': Binary(b'yo')})[0][0] cnx.commit() with self.admin_access.web_request(rollbackfirst=True) as req: @@ -362,11 +371,11 @@ '__type:X': 'Salesterm', '_cw_entity_fields:X': 'amount-subject,described_by_test-subject', 'amount-subject:X': u'-10', - 'described_by_test-subject:X': unicode(feid), + 'described_by_test-subject:X': text_type(feid), } with self.assertRaises(ValidationError) as cm: self.ctrl_publish(req) - cm.exception.translate(unicode) + cm.exception.translate(text_type) self.assertEqual({'amount-subject': 'value -10 must be >= 0'}, cm.exception.errors) @@ -375,11 +384,11 @@ '__type:X': 'Salesterm', '_cw_entity_fields:X': 'amount-subject,described_by_test-subject', 'amount-subject:X': u'110', - 'described_by_test-subject:X': unicode(feid), + 'described_by_test-subject:X': text_type(feid), } with self.assertRaises(ValidationError) as cm: self.ctrl_publish(req) - cm.exception.translate(unicode) + cm.exception.translate(text_type) self.assertEqual(cm.exception.errors, {'amount-subject': 'value 110 must be <= 100'}) with self.admin_access.web_request(rollbackfirst=True) as req: @@ -387,7 +396,7 @@ '__type:X': 'Salesterm', '_cw_entity_fields:X': 'amount-subject,described_by_test-subject', 'amount-subject:X': u'10', - 'described_by_test-subject:X': unicode(feid), + 'described_by_test-subject:X': text_type(feid), } self.expect_redirect_handle_request(req, 'edit') # should be redirected on the created @@ -400,31 +409,31 @@ constrained attributes""" with self.admin_access.repo_cnx() as cnx: feid = cnx.execute('INSERT File X: X data_name "toto.txt", X data %(data)s', - {'data': Binary('yo')})[0][0] + {'data': Binary(b'yo')})[0][0] seid = cnx.create_entity('Salesterm', amount=0, described_by_test=feid).eid cnx.commit() # ensure a value that violate a constraint is properly detected with self.admin_access.web_request(rollbackfirst=True) as req: - req.form = {'eid': [unicode(seid)], + req.form = {'eid': [text_type(seid)], '__type:%s'%seid: 'Salesterm', '_cw_entity_fields:%s'%seid: 'amount-subject', 'amount-subject:%s'%seid: u'-10', } self.assertMultiLineEqual(''''''%seid, self.ctrl_publish(req, 'validateform')) +'''%seid, self.ctrl_publish(req, 'validateform').decode('ascii')) # ensure a value that comply a constraint is properly processed with self.admin_access.web_request(rollbackfirst=True) as req: - req.form = {'eid': [unicode(seid)], + req.form = {'eid': [text_type(seid)], '__type:%s'%seid: 'Salesterm', '_cw_entity_fields:%s'%seid: 'amount-subject', 'amount-subject:%s'%seid: u'20', } self.assertMultiLineEqual('''''', self.ctrl_publish(req, 'validateform')) +''', self.ctrl_publish(req, 'validateform').decode('ascii')) self.assertEqual(20, req.execute('Any V WHERE X amount V, X eid %(eid)s', {'eid': seid})[0][0]) @@ -433,7 +442,7 @@ '__type:X': 'Salesterm', '_cw_entity_fields:X': 'amount-subject,described_by_test-subject', 'amount-subject:X': u'0', - 'described_by_test-subject:X': unicode(feid), + 'described_by_test-subject:X': text_type(feid), } # ensure a value that is modified in an operation on a modify @@ -452,11 +461,11 @@ with self.temporary_appobjects(ValidationErrorInOpAfterHook): self.assertMultiLineEqual('''''', self.ctrl_publish(req, 'validateform')) +''', self.ctrl_publish(req, 'validateform').decode('ascii')) self.assertMultiLineEqual('''''', self.ctrl_publish(req, 'validateform')) +''', self.ctrl_publish(req, 'validateform').decode('ascii')) def test_req_pending_insert(self): """make sure req's pending insertions are taken into account""" @@ -541,7 +550,7 @@ def test_redirect_delete_button(self): with self.admin_access.web_request() as req: eid = req.create_entity('BlogEntry', title=u'hop', content=u'hop').eid - req.form = {'eid': unicode(eid), '__type:%s'%eid: 'BlogEntry', + req.form = {'eid': text_type(eid), '__type:%s'%eid: 'BlogEntry', '__action_delete': ''} path, params = self.expect_redirect_handle_request(req, 'edit') self.assertEqual(path, 'blogentry') @@ -550,14 +559,14 @@ req.execute('SET X use_email E WHERE E eid %(e)s, X eid %(x)s', {'x': req.user.eid, 'e': eid}) req.cnx.commit() - req.form = {'eid': unicode(eid), '__type:%s'%eid: 'EmailAddress', + req.form = {'eid': text_type(eid), '__type:%s'%eid: 'EmailAddress', '__action_delete': ''} path, params = self.expect_redirect_handle_request(req, 'edit') self.assertEqual(path, 'cwuser/admin') self.assertIn('_cwmsgid', params) eid1 = req.create_entity('BlogEntry', title=u'hop', content=u'hop').eid eid2 = req.create_entity('EmailAddress', address=u'hop@logilab.fr').eid - req.form = {'eid': [unicode(eid1), unicode(eid2)], + req.form = {'eid': [text_type(eid1), text_type(eid2)], '__type:%s'%eid1: 'BlogEntry', '__type:%s'%eid2: 'EmailAddress', '__action_delete': ''} @@ -607,13 +616,13 @@ groupeids = sorted(eid for eid, in req.execute('CWGroup G ' 'WHERE G name in ("managers", "users")')) - groups = [unicode(eid) for eid in groupeids] + groups = [text_type(eid) for eid in groupeids] cwetypeeid = req.execute('CWEType X WHERE X name "CWEType"')[0][0] - basegroups = [unicode(eid) + basegroups = [text_type(eid) for eid, in req.execute('CWGroup G ' 'WHERE X read_permission G, X eid %(x)s', {'x': cwetypeeid})] - cwetypeeid = unicode(cwetypeeid) + cwetypeeid = text_type(cwetypeeid) req.form = { 'eid': cwetypeeid, '__type:'+cwetypeeid: 'CWEType', @@ -662,7 +671,7 @@ '_cw_entity_fields:X': 'login-subject,upassword-subject,in_group-subject', 'login-subject:X': u'adim', 'upassword-subject:X': u'toto', 'upassword-subject-confirm:X': u'toto', - 'in_group-subject:X': `gueid`, + 'in_group-subject:X': repr(gueid), '__type:Y': 'EmailAddress', '_cw_entity_fields:Y': 'address-subject,alias-subject,use_email-object', @@ -737,7 +746,7 @@ '__type:Y': 'File', '_cw_entity_fields:Y': 'data-subject', - 'data-subject:Y': (u'coucou.txt', Binary('coucou')), + 'data-subject:Y': (u'coucou.txt', Binary(b'coucou')), } values_by_eid = dict((eid, req.extract_entity_params(eid, minparams=2)) for eid in req.edited_eids()) @@ -783,7 +792,7 @@ rset = self.john.as_rset() rset.req = req source = ctrl.publish() - self.assertTrue(source.startswith('
          ')) + self.assertTrue(source.startswith(b'
          ')) # def test_json_exec(self): # rql = 'Any T,N WHERE T is Tag, T name N' @@ -824,7 +833,7 @@ rset.req = req source = ctrl.publish() # maydel jscall - self.assertIn('ajaxBoxRemoveLinkedEntity', source) + self.assertIn(b'ajaxBoxRemoveLinkedEntity', source) def test_pending_insertion(self): with self.remote_calling('add_pending_inserts', [['12', 'tags', '13']]) as (_, req): @@ -887,16 +896,16 @@ # silly tests def test_external_resource(self): with self.remote_calling('external_resource', 'RSS_LOGO') as (res, _): - self.assertEqual(json_dumps(self.config.uiprops['RSS_LOGO']), + self.assertEqual(json_dumps(self.config.uiprops['RSS_LOGO']).encode('ascii'), res) def test_i18n(self): with self.remote_calling('i18n', ['bimboom']) as (res, _): - self.assertEqual(json_dumps(['bimboom']), res) + self.assertEqual(json_dumps(['bimboom']).encode('ascii'), res) def test_format_date(self): with self.remote_calling('format_date', '2007-01-01 12:00:00') as (res, _): - self.assertEqual(json_dumps('2007/01/01'), res) + self.assertEqual(json_dumps('2007/01/01').encode('ascii'), res) def test_ajaxfunc_noparameter(self): @ajaxfunc @@ -968,7 +977,7 @@ def js_foo(self): return u'hello' with self.remote_calling('foo') as (res, _): - self.assertEqual(res, u'hello') + self.assertEqual(res, b'hello') def test_monkeypatch_jsoncontroller_xhtmlize(self): with self.assertRaises(RemoteCallFailed): @@ -979,7 +988,7 @@ def js_foo(self): return u'hello' with self.remote_calling('foo') as (res, _): - self.assertEqual(u'
          hello
          ', res) + self.assertEqual(b'
          hello
          ', res) def test_monkeypatch_jsoncontroller_jsonize(self): with self.assertRaises(RemoteCallFailed): @@ -990,7 +999,7 @@ def js_foo(self): return 12 with self.remote_calling('foo') as (res, _): - self.assertEqual(res, '12') + self.assertEqual(res, b'12') def test_monkeypatch_jsoncontroller_stdfunc(self): @monkeypatch(JSonController) @@ -998,7 +1007,7 @@ def js_reledit_form(self): return 12 with self.remote_calling('reledit_form') as (res, _): - self.assertEqual(res, '12') + self.assertEqual(res, b'12') class UndoControllerTC(CubicWebTC): @@ -1042,7 +1051,7 @@ """ with self.admin_access.web_request() as req: scheme, netloc, path, query, fragment = urlsplit(url) - query_dict = url_parse_query(query) + query_dict = parse_qs(query) expected_url = urljoin(req.base_url(), expected_path) self.assertEqual( urlunsplit((scheme, netloc, path, None, None)), expected_url) @@ -1058,17 +1067,6 @@ result = controller.publish(rset=None) self.assertURLPath(cm.exception.location, rpath) - def test_redirect_default(self): - with self.admin_access.web_request() as req: - txuuid = self.txuuid_toto_email - req.form['txuuid'] = txuuid - req.session.data['breadcrumbs'] = [ urljoin(req.base_url(), path) - for path in ('tata', 'toto',)] - controller = self.vreg['controllers'].select('undo', req) - with self.assertRaises(Redirect) as cm: - result = controller.publish(rset=None) - self.assertURLPath(cm.exception.location, 'toto') - class LoginControllerTC(CubicWebTC): diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/unittest_views_baseviews.py --- a/web/test/unittest_views_baseviews.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/test/unittest_views_baseviews.py Thu Jun 16 14:19:20 2016 +0200 @@ -129,8 +129,8 @@ source_lines = [line.strip() for line in html_source.splitlines(False) if line.strip()] - self.assertListEqual(['', - ''], + self.assertListEqual([b'', + b''], source_lines[:2]) def test_set_doctype_no_reset_xmldecl(self): @@ -151,9 +151,9 @@ source_lines = [line.strip() for line in html_source.splitlines(False) if line.strip()] - self.assertListEqual([html_doctype, - '', - ''], + self.assertListEqual([html_doctype.encode('ascii'), + b'', + b''], source_lines[:3]) if __name__ == '__main__': diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/unittest_views_csv.py --- a/web/test/unittest_views_csv.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/test/unittest_views_csv.py Thu Jun 16 14:19:20 2016 +0200 @@ -30,19 +30,19 @@ self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['text/comma-separated-values;charset=UTF-8']) expected_data = "String;COUNT(CWUser)\nguests;1\nmanagers;1" - self.assertMultiLineEqual(expected_data, data) + self.assertMultiLineEqual(expected_data, data.decode('utf-8')) def test_csvexport_on_empty_rset(self): """Should return the CSV header. """ with self.admin_access.web_request() as req: - rset = req.execute('Any GN,COUNT(X) GROUPBY GN ORDERBY GN ' - 'WHERE X in_group G, G name GN, X login "Miles"') + rset = req.execute(u'Any GN,COUNT(X) GROUPBY GN ORDERBY GN ' + 'WHERE X in_group G, G name GN, X login "Miles"') data = self.view('csvexport', rset, req=req) self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['text/comma-separated-values;charset=UTF-8']) expected_data = "String;COUNT(CWUser)" - self.assertMultiLineEqual(expected_data, data) + self.assertMultiLineEqual(expected_data, data.decode('utf-8')) if __name__ == '__main__': diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/unittest_views_editforms.py --- a/web/test/unittest_views_editforms.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/test/unittest_views_editforms.py Thu Jun 16 14:19:20 2016 +0200 @@ -15,7 +15,9 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . + from logilab.common.testlib import unittest_main, mock_object +from logilab.common import tempattr from cubicweb.devtools.testlib import CubicWebTC from cubicweb.web.views import uicfg @@ -181,6 +183,29 @@ autoform = self.vreg['forms'].select('edition', req, entity=req.user) self.assertEqual(list(autoform.inlined_form_views()), []) + def test_inlined_form_views(self): + # when some relation has + cardinality, and some already linked entities which are not + # updatable, a link to optionally add a new sub-entity should be displayed, not a sub-form + # forcing creation of a sub-entity + from cubicweb.web.views import autoform + with self.admin_access.web_request() as req: + req.create_entity('EmailAddress', address=u'admin@cubicweb.org', + reverse_use_email=req.user.eid) + use_email_schema = self.vreg.schema['CWUser'].rdef('use_email') + with tempattr(use_email_schema, 'cardinality', '+1'): + with self.temporary_permissions(EmailAddress={'update': ()}): + form = self.vreg['forms'].select('edition', req, entity=req.user) + formviews = list(form.inlined_form_views()) + self.assertEqual(len(formviews), 1, formviews) + self.assertIsInstance(formviews[0], autoform.InlineAddNewLinkView) + # though do not introduce regression on entity creation with 1 cardinality relation + with tempattr(use_email_schema, 'cardinality', '11'): + user = self.vreg['etypes'].etype_class('CWUser')(req) + form = self.vreg['forms'].select('edition', req, entity=user) + formviews = list(form.inlined_form_views()) + self.assertEqual(len(formviews), 1, formviews) + self.assertIsInstance(formviews[0], autoform.InlineEntityCreationFormView) + def test_check_inlined_rdef_permissions(self): # try to check permissions when creating an entity ('user' below is a # fresh entity without an eid) @@ -255,4 +280,3 @@ if __name__ == '__main__': unittest_main() - diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/unittest_views_errorform.py --- a/web/test/unittest_views_errorform.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/test/unittest_views_errorform.py Thu Jun 16 14:19:20 2016 +0200 @@ -50,8 +50,8 @@ req.data['excinfo'] = sys.exc_info() req.data['ex'] = e html = self.view('error', req=req) - self.failUnless(re.search(r'^$', + self.assertTrue(re.search(b'^$', html.source, re.M)) diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/unittest_views_json.py --- a/web/test/unittest_views_json.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/test/unittest_views_json.py Thu Jun 16 14:19:20 2016 +0200 @@ -16,12 +16,14 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . +from six import binary_type + from cubicweb.devtools.testlib import CubicWebTC class JsonViewsTC(CubicWebTC): anonymize = True - res_jsonp_data = '[["guests", 1]]' + res_jsonp_data = b'[["guests", 1]]' def setUp(self): super(JsonViewsTC, self).setUp() @@ -29,14 +31,15 @@ def test_json_rsetexport(self): with self.admin_access.web_request() as req: - rset = req.execute('Any GN,COUNT(X) GROUPBY GN ORDERBY GN WHERE X in_group G, G name GN') + rset = req.execute( + 'Any GN,COUNT(X) GROUPBY GN ORDERBY GN WHERE X in_group G, G name GN') data = self.view('jsonexport', rset, req=req) self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['application/json']) self.assertListEqual(data, [["guests", 1], ["managers", 1]]) def test_json_rsetexport_empty_rset(self): with self.admin_access.web_request() as req: - rset = req.execute('Any X WHERE X is CWUser, X login "foobarbaz"') + rset = req.execute(u'Any X WHERE X is CWUser, X login "foobarbaz"') data = self.view('jsonexport', rset, req=req) self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['application/json']) self.assertListEqual(data, []) @@ -47,21 +50,24 @@ 'rql': u'Any GN,COUNT(X) GROUPBY GN ORDERBY GN ' 'WHERE X in_group G, G name GN'}) data = self.ctrl_publish(req, ctrl='jsonp') - self.assertIsInstance(data, str) - self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['application/javascript']) + self.assertIsInstance(data, binary_type) + self.assertEqual(req.headers_out.getRawHeaders('content-type'), + ['application/javascript']) # because jsonp anonymizes data, only 'guests' group should be found - self.assertEqual(data, 'foo(%s)' % self.res_jsonp_data) + self.assertEqual(data, b'foo(' + self.res_jsonp_data + b')') def test_json_rsetexport_with_jsonp_and_bad_vid(self): with self.admin_access.web_request() as req: req.form.update({'callback': 'foo', - 'vid': 'table', # <-- this parameter should be ignored by jsonp controller + # "vid" parameter should be ignored by jsonp controller + 'vid': 'table', 'rql': 'Any GN,COUNT(X) GROUPBY GN ORDERBY GN ' 'WHERE X in_group G, G name GN'}) data = self.ctrl_publish(req, ctrl='jsonp') - self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['application/javascript']) + self.assertEqual(req.headers_out.getRawHeaders('content-type'), + ['application/javascript']) # result should be plain json, not the table view - self.assertEqual(data, 'foo(%s)' % self.res_jsonp_data) + self.assertEqual(data, b'foo(' + self.res_jsonp_data + b')') def test_json_ersetexport(self): with self.admin_access.web_request() as req: @@ -71,7 +77,7 @@ self.assertEqual(data[0]['name'], 'guests') self.assertEqual(data[1]['name'], 'managers') - rset = req.execute('Any G WHERE G is CWGroup, G name "foo"') + rset = req.execute(u'Any G WHERE G is CWGroup, G name "foo"') data = self.view('ejsonexport', rset, req=req) self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['application/json']) self.assertEqual(data, []) @@ -79,7 +85,8 @@ class NotAnonymousJsonViewsTC(JsonViewsTC): anonymize = False - res_jsonp_data = '[["guests", 1], ["managers", 1]]' + res_jsonp_data = b'[["guests", 1], ["managers", 1]]' + if __name__ == '__main__': from logilab.common.testlib import unittest_main diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/unittest_views_searchrestriction.py --- a/web/test/unittest_views_searchrestriction.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/test/unittest_views_searchrestriction.py Thu Jun 16 14:19:20 2016 +0200 @@ -37,62 +37,62 @@ @property def select(self): - return self.parse('Any B,(NOW - CD),S,V,U,GROUP_CONCAT(TN),VN,P,CD,BMD ' - 'GROUPBY B,CD,S,V,U,VN,P,BMD ' - 'WHERE B in_state S, B creation_date CD, ' - 'B modification_date BMD, T? tags B, T name TN, ' - 'V? bookmarked_by B, V title VN, B created_by U?, ' - 'B in_group P, P name "managers"') + return self.parse(u'Any B,(NOW - CD),S,V,U,GROUP_CONCAT(TN),VN,P,CD,BMD ' + 'GROUPBY B,CD,S,V,U,VN,P,BMD ' + 'WHERE B in_state S, B creation_date CD, ' + 'B modification_date BMD, T? tags B, T name TN, ' + 'V? bookmarked_by B, V title VN, B created_by U?, ' + 'B in_group P, P name "managers"') def test_1(self): self.assertEqual(self._generate(self.select, 'in_state', 'subject', 'name'), - "DISTINCT Any A,C ORDERBY C WHERE B in_group P, P name 'managers', " - "B in_state A, B is CWUser, A name C") + 'DISTINCT Any A,C ORDERBY C WHERE B in_group P, P name "managers", ' + 'B in_state A, B is CWUser, A name C') def test_2(self): self.assertEqual(self._generate(self.select, 'tags', 'object', 'name'), - "DISTINCT Any A,C ORDERBY C WHERE B in_group P, P name 'managers', " - "A tags B, B is CWUser, A name C") + 'DISTINCT Any A,C ORDERBY C WHERE B in_group P, P name "managers", ' + 'A tags B, B is CWUser, A name C') def test_3(self): self.assertEqual(self._generate(self.select, 'created_by', 'subject', 'login'), - "DISTINCT Any A,C ORDERBY C WHERE B in_group P, P name 'managers', " - "B created_by A, B is CWUser, A login C") + 'DISTINCT Any A,C ORDERBY C WHERE B in_group P, P name "managers", ' + 'B created_by A, B is CWUser, A login C') def test_4(self): - self.assertEqual(self._generate(self.parse('Any X WHERE X is CWUser'), 'created_by', 'subject', 'login'), + self.assertEqual(self._generate(self.parse(u'Any X WHERE X is CWUser'), 'created_by', 'subject', 'login'), "DISTINCT Any A,B ORDERBY B WHERE X is CWUser, X created_by A, A login B") def test_5(self): - self.assertEqual(self._generate(self.parse('Any X,L WHERE X is CWUser, X login L'), 'created_by', 'subject', 'login'), + self.assertEqual(self._generate(self.parse(u'Any X,L WHERE X is CWUser, X login L'), 'created_by', 'subject', 'login'), "DISTINCT Any A,B ORDERBY B WHERE X is CWUser, X created_by A, A login B") def test_nonregr1(self): - select = self.parse('Any T,V WHERE T bookmarked_by V?, ' - 'V in_state VS, VS name "published", T created_by U') + select = self.parse(u'Any T,V WHERE T bookmarked_by V?, ' + 'V in_state VS, VS name "published", T created_by U') self.assertEqual(self._generate(select, 'created_by', 'subject', 'login'), "DISTINCT Any A,B ORDERBY B WHERE T created_by U, " "T created_by A, T is Bookmark, A login B") def test_nonregr2(self): #'DISTINCT Any X,TMP,N WHERE P name TMP, X version_of P, P is Project, X is Version, not X in_state S,S name "published", X num N ORDERBY TMP,N' - select = self.parse('DISTINCT Any V,TN,L ORDERBY TN,L WHERE T nom TN, V connait T, T is Personne, V is CWUser,' - 'NOT V in_state VS, VS name "published", V login L') + select = self.parse(u'DISTINCT Any V,TN,L ORDERBY TN,L WHERE T nom TN, V connait T, T is Personne, V is CWUser,' + 'NOT V in_state VS, VS name "published", V login L') rschema = self.schema['connait'] - for rdefs in rschema.rdefs.itervalues(): + for rdefs in rschema.rdefs.values(): rdefs.cardinality = '++' try: self.assertEqual(self._generate(select, 'in_state', 'subject', 'name'), - "DISTINCT Any A,B ORDERBY B WHERE V is CWUser, " - "NOT EXISTS(V in_state VS), VS name 'published', " - "V in_state A, A name B") + 'DISTINCT Any A,B ORDERBY B WHERE V is CWUser, ' + 'NOT EXISTS(V in_state VS), VS name "published", ' + 'V in_state A, A name B') finally: - for rdefs in rschema.rdefs.itervalues(): + for rdefs in rschema.rdefs.values(): rdefs.cardinality = '**' def test_nonregr3(self): #'DISTINCT Any X,TMP,N WHERE P name TMP, X version_of P, P is Project, X is Version, not X in_state S,S name "published", X num N ORDERBY TMP,N' - select = self.parse('DISTINCT Any X, MAX(Y) GROUPBY X WHERE X is CWUser, Y is Bookmark, X in_group A') + select = self.parse(u'DISTINCT Any X, MAX(Y) GROUPBY X WHERE X is CWUser, Y is Bookmark, X in_group A') self.assertEqual(self._generate(select, 'in_group', 'subject', 'name'), "DISTINCT Any B,C ORDERBY C WHERE X is CWUser, X in_group B, B name C") diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/unittest_views_staticcontrollers.py --- a/web/test/unittest_views_staticcontrollers.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/test/unittest_views_staticcontrollers.py Thu Jun 16 14:19:20 2016 +0200 @@ -70,12 +70,21 @@ with self._publish_static_files(fname) as req: self.assertEqual(200, req.status_out) self.assertIn('last-modified', req.headers_out) + self.assertIn('expires', req.headers_out) + self.assertEqual(req.get_response_header('cache-control'), + {'max-age': 604800}) next_headers = { 'if-modified-since': req.get_response_header('last-modified', raw=True), } with self._publish_static_files(fname, next_headers) as req: self.assertEqual(304, req.status_out) + def _check_datafile_redirect(self, fname, expected): + with self._publish_static_files(fname) as req: + self.assertEqual(302, req.status_out) + self.assertEqual(req.get_response_header('location'), + req.base_url() + expected) + def _check_no_datafile(self, fname): with self._publish_static_files(fname) as req: self.assertEqual(404, req.status_out) @@ -90,10 +99,12 @@ self._check_no_datafile('data/%s/cubicweb.css' % ('0'*len(hash))) with tempattr(self.vreg.config, 'mode', 'notest'): - self._check_datafile_ok('data/cubicweb.css') + self.config._init_base_url() # reset config.datadir_url + self._check_datafile_redirect('data/cubicweb.css', 'data/%s/cubicweb.css' % hash) self._check_datafile_ok('data/%s/cubicweb.css' % hash) - self._check_no_datafile('data/does/not/exist') - self._check_no_datafile('data/%s/cubicweb.css' % ('0'*len(hash))) + self._check_no_datafile('data/%s/does/not/exist' % hash) + self._check_datafile_redirect('data/%s/does/not/exist' % ('0'*len(hash)), + 'data/%s/%s/does/not/exist' % (hash, '0'*len(hash))) class ConcatFilesTC(CubicWebTC): @@ -120,12 +131,12 @@ yield res, req def expected_content(self, js_files): - content = u'' + content = b'' for js_file in js_files: dirpath, rid = self.config.locate_resource(js_file) if dirpath is not None: # ignore resources not found - with open(osp.join(dirpath, rid)) as f: - content += f.read() + '\n' + with open(osp.join(dirpath, rid), 'rb') as f: + content += f.read() + b'\n' return content def test_cache(self): @@ -162,4 +173,3 @@ if __name__ == '__main__': from logilab.common.testlib import unittest_main unittest_main() - diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/test/unittest_viewselector.py --- a/web/test/unittest_viewselector.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/test/unittest_viewselector.py Thu Jun 16 14:19:20 2016 +0200 @@ -17,6 +17,7 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . """XXX rename, split, reorganize this""" +from __future__ import print_function from logilab.common.testlib import unittest_main @@ -76,9 +77,9 @@ try: self.assertSetEqual(list(content), expected) except Exception: - print registry, sorted(expected), sorted(content) - print 'no more', [v for v in expected if not v in content] - print 'missing', [v for v in content if not v in expected] + print(registry, sorted(expected), sorted(content)) + print('no more', [v for v in expected if not v in content]) + print('missing', [v for v in content if not v in expected]) raise def setUp(self): @@ -421,7 +422,7 @@ def test_interface_selector(self): with self.admin_access.web_request() as req: - req.create_entity('File', data_name=u'bim.png', data=Binary('bim')) + req.create_entity('File', data_name=u'bim.png', data=Binary(b'bim')) # image primary view priority rset = req.execute('File X WHERE X data_name "bim.png"') self.assertIsInstance(self.vreg['views'].select('primary', req, rset=rset), @@ -430,21 +431,21 @@ def test_score_entity_selector(self): with self.admin_access.web_request() as req: - req.create_entity('File', data_name=u'bim.png', data=Binary('bim')) + req.create_entity('File', data_name=u'bim.png', data=Binary(b'bim')) # image/ehtml primary view priority rset = req.execute('File X WHERE X data_name "bim.png"') self.assertIsInstance(self.vreg['views'].select('image', req, rset=rset), idownloadable.ImageView) self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'ehtml', req, rset=rset) - fileobj = req.create_entity('File', data_name=u'bim.html', data=Binary('bambam 1: self.w(u"\n") @@ -314,7 +316,7 @@ self.w(u'
            \n') else: self.w(u'\n' % (listid, klass or 'section')) - for i in xrange(self.cw_rset.rowcount): + for i in range(self.cw_rset.rowcount): self.cell_call(row=i, col=0, vid=subvid, klass=klass, **kwargs) self.w(u'
          \n') if title: @@ -393,7 +395,7 @@ @property def title(self): - etype = iter(self.cw_rset.column_types(0)).next() + etype = next(iter(self.cw_rset.column_types(0))) return display_name(self._cw, etype, form='plural') def call(self, **kwargs): @@ -427,7 +429,7 @@ def call(self, subvid=None, **kwargs): kwargs['vid'] = subvid rset = self.cw_rset - for i in xrange(len(rset)): + for i in range(len(rset)): self.cell_call(i, 0, **kwargs) if i < rset.rowcount-1: self.w(self.separator) diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/bookmark.py --- a/web/views/bookmark.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/bookmark.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,7 +18,7 @@ """Primary view for bookmarks + user's bookmarks box""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from logilab.mtconverter import xml_escape diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/boxes.py --- a/web/views/boxes.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/boxes.py Thu Jun 16 14:19:20 2016 +0200 @@ -26,10 +26,12 @@ * startup views box """ __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from warnings import warn +from six import text_type, add_metaclass + from logilab.mtconverter import xml_escape from logilab.common.deprecation import class_deprecated @@ -93,7 +95,7 @@ etypes = self.cw_rset.column_types(0) if len(etypes) == 1: plural = self.cw_rset.rowcount > 1 and 'plural' or '' - etypelabel = display_name(self._cw, iter(etypes).next(), plural) + etypelabel = display_name(self._cw, next(iter(etypes)), plural) title = u'%s - %s' % (title, etypelabel.lower()) w(title) @@ -216,7 +218,7 @@ @property def domid(self): - return super(RsetBox, self).domid + unicode(abs(id(self))) + unicode(abs(id(self.cw_rset))) + return super(RsetBox, self).domid + text_type(abs(id(self))) + text_type(abs(id(self.cw_rset))) def render_title(self, w): w(self.cw_extra_kwargs['title']) @@ -231,9 +233,9 @@ # helper classes ############################################################## +@add_metaclass(class_deprecated) class SideBoxView(EntityView): """helper view class to display some entities in a sidebox""" - __metaclass__ = class_deprecated __deprecation_warning__ = '[3.10] SideBoxView is deprecated, use RsetBox instead (%(cls)s)' __regid__ = 'sidebox' diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/calendar.py --- a/web/views/calendar.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/calendar.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,7 +18,7 @@ """html calendar views""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ import copy from datetime import timedelta diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/csvexport.py --- a/web/views/csvexport.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/csvexport.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,7 +18,10 @@ """csv export views""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ + +from six import PY2 +from six.moves import range from cubicweb.schema import display_name from cubicweb.predicates import any_rset, empty_rset @@ -29,7 +32,7 @@ """mixin class for CSV views""" templatable = False content_type = "text/comma-separated-values" - binary = True # avoid unicode assertion + binary = PY2 # python csv module is unicode aware in py3k csv_params = {'dialect': 'excel', 'quotechar': '"', 'delimiter': ';', @@ -88,7 +91,7 @@ rows_by_type = {} writer = self.csvwriter() rowdef_by_type = {} - for index in xrange(len(self.cw_rset)): + for index in range(len(self.cw_rset)): entity = self.cw_rset.complete_entity(index) if entity.e_schema not in rows_by_type: rowdef_by_type[entity.e_schema] = [rs for rs, at in entity.e_schema.attribute_definitions() @@ -98,8 +101,7 @@ rows = rows_by_type[entity.e_schema] rows.append([entity.printable_value(rs.type, format='text/plain') for rs in rowdef_by_type[entity.e_schema]]) - for rows in rows_by_type.itervalues(): + for rows in rows_by_type.values(): writer.writerows(rows) # use two empty lines as separator writer.writerows([[], []]) - diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/cwproperties.py --- a/web/views/cwproperties.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/cwproperties.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,7 +18,7 @@ """Specific views for CWProperty (eg site/user preferences""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from logilab.mtconverter import xml_escape @@ -119,10 +119,10 @@ _ = self._cw._ self.w(u'

          %s

          \n' % _(self.title)) for label, group, form in sorted((_(g), g, f) - for g, f in mainforms.iteritems()): + for g, f in mainforms.items()): self.wrap_main_form(group, label, form) for label, group, objects in sorted((_(g), g, o) - for g, o in groupedforms.iteritems()): + for g, o in groupedforms.items()): self.wrap_grouped_form(group, label, objects) @property @@ -171,7 +171,7 @@ entity = self.cwprops_rset.get_entity(values[key], 0) else: entity = self._cw.vreg['etypes'].etype_class('CWProperty')(self._cw) - entity.eid = self._cw.varmaker.next() + entity.eid = next(self._cw.varmaker) entity.cw_attr_cache['pkey'] = key entity.cw_attr_cache['value'] = self._cw.vreg.property_value(key) return entity @@ -224,7 +224,7 @@ (make_togglable_link('fieldset_' + group, label))) self.w(u'
          ' % (group, status)) sorted_objects = sorted((self._cw.__('%s_%s' % (group, o)), o, f) - for o, f in objects.iteritems()) + for o, f in objects.items()) for label, oid, form in sorted_objects: self.wrap_object_form(group, oid, label, form) self.w(u'
          ') diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/cwsources.py --- a/web/views/cwsources.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/cwsources.py Thu Jun 16 14:19:20 2016 +0200 @@ -20,20 +20,23 @@ """ __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ import logging from itertools import repeat + +from six.moves import range + from logilab.mtconverter import xml_escape from logilab.common.decorators import cachedproperty from cubicweb import Unauthorized, tags from cubicweb.utils import make_uid from cubicweb.predicates import (is_instance, score_entity, has_related_entities, - match_user_groups, match_kwargs, match_view) + match_user_groups, match_kwargs, match_view, one_line_rset) from cubicweb.view import EntityView, StartupView from cubicweb.schema import META_RTYPES, VIRTUAL_RTYPES, display_name -from cubicweb.web import formwidgets as wdgs, facet +from cubicweb.web import Redirect, formwidgets as wdgs, facet, action from cubicweb.web.views import add_etype_button from cubicweb.web.views import (uicfg, tabs, actions, ibreadcrumbs, navigation, tableview, pyviews) @@ -95,7 +98,7 @@ if hostconfig: self.w(u'

          %s

          ' % self._cw._('CWSourceHostConfig_plural')) self._cw.view('table', hostconfig, w=self.w, - displaycols=range(2), + displaycols=list(range(2)), cellvids={1: 'editable-final'}) @@ -186,7 +189,7 @@ warning(_('relation %(rtype)s with %(etype)s as %(role)s is ' 'supported but no target type supported') % {'rtype': rschema, 'role': role, 'etype': etype}) - for rtype, rdefs in self.srelations.iteritems(): + for rtype, rdefs in self.srelations.items(): if rdefs is None: rschema = self.schema[rtype] for subj, obj in rschema.rdefs: @@ -223,6 +226,36 @@ layout_args = {'display_filter': 'top'} +class CWSourceSyncAction(action.Action): + __regid__ = 'cw.source-sync' + __select__ = (action.Action.__select__ & match_user_groups('managers') + & one_line_rset() & is_instance('CWSource') + & score_entity(lambda x: x.name != 'system')) + + title = _('synchronize') + category = 'mainactions' + order = 20 + + def url(self): + entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0) + return entity.absolute_url(vid=self.__regid__) + + +class CWSourceSyncView(EntityView): + __regid__ = 'cw.source-sync' + __select__ = (match_user_groups('managers') + & one_line_rset() & is_instance('CWSource') + & score_entity(lambda x: x.name != 'system')) + + title = _('synchronize') + + def entity_call(self, entity): + self._cw.call_service('source-sync', source_eid=entity.eid) + msg = self._cw._('Source has been synchronized') + url = entity.absolute_url(tab='cwsource-imports', __message=msg) + raise Redirect(url) + + # sources management view ###################################################### diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/cwuser.py --- a/web/views/cwuser.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/cwuser.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,10 +18,13 @@ """Specific views for users and groups""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from hashlib import sha1 # pylint: disable=E0611 +from six import text_type +from six.moves import range + from logilab.mtconverter import xml_escape from cubicweb import tags @@ -64,7 +67,7 @@ '''% self._cw.encoding) - for i in xrange(self.cw_rset.rowcount): + for i in range(self.cw_rset.rowcount): self.cell_call(i, 0) self.w(u'\n') @@ -250,6 +253,6 @@ 'group': tableview.MainEntityColRenderer(), 'nb_users': tableview.EntityTableColRenderer( header=_('num. users'), - renderfunc=lambda w,x: w(unicode(x.num_users())), + renderfunc=lambda w,x: w(text_type(x.num_users())), sortfunc=lambda x: x.num_users()), } diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/debug.py --- a/web/views/debug.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/debug.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,10 +18,12 @@ """management and error screens""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from time import strftime, localtime +from six import text_type + from logilab.mtconverter import xml_escape from cubicweb.predicates import none_rset, match_user_groups @@ -33,7 +35,7 @@ if dict: w(u'
            ') for key in sorted(dict): - w(u'
          • %s: %s
          • ' % ( + w(u'
          • %s: %s
          • ' % ( xml_escape(str(key)), xml_escape(repr(dict[key])))) w(u'
          ') @@ -71,31 +73,23 @@ dtformat = req.property_value('ui.datetime-format') _ = req._ w = self.w + repo = req.cnx.repo # generic instance information w(u'

          %s

          ' % _('Instance')) - w(u'') - w(u'' % ( - _('config type'), self._cw.vreg.config.name)) - w(u'' % ( - _('config mode'), self._cw.vreg.config.mode)) - w(u'' % ( - _('instance home'), self._cw.vreg.config.apphome)) - w(u'
          %s%s
          %s%s
          %s%s
          ') - vcconf = req.vreg.config.vc_config() + pyvalue = ((_('config type'), self._cw.vreg.config.name), + (_('config mode'), self._cw.vreg.config.mode), + (_('instance home'), self._cw.vreg.config.apphome)) + self.wview('pyvaltable', pyvalue=pyvalue, header_column_idx=0) + vcconf = repo.get_versions() w(u'

          %s

          ' % _('versions configuration')) - w(u'') - w(u'' % ( - 'CubicWeb', vcconf.get('cubicweb', _('no version information')))) - for cube in sorted(self._cw.vreg.config.cubes()): - cubeversion = vcconf.get(cube, _('no version information')) - w(u'' % ( - cube, cubeversion)) - w(u'
          %s%s
          %s%s
          ') + missing = _('no version information') + pyvalue = [('CubicWeb', vcconf.get('cubicweb', missing))] + pyvalue += [(cube, vcconf.get(cube, missing)) + for cube in sorted(self._cw.vreg.config.cubes())] + self.wview('pyvaltable', pyvalue=pyvalue, header_column_idx=0) # repository information - repo = req.vreg.config.repository(None) w(u'

          %s

          ' % _('Repository')) w(u'

          %s

          ' % _('resources usage')) - w(u'') stats = self._cw.call_service('repo_stats') stats['looping_tasks'] = ', '.join('%s (%s seconds)' % (n, i) for n, i in stats['looping_tasks']) stats['threads'] = ', '.join(sorted(stats['threads'])) @@ -104,11 +98,13 @@ continue if k.endswith('_cache_size'): stats[k] = '%s / %s' % (stats[k]['size'], stats[k]['maxsize']) - for element in sorted(stats): - w(u'' - % (element, xml_escape(unicode(stats[element])), - element.endswith('percent') and '%' or '' )) - w(u'
          %s%s %s
          ') + def format_stat(sname, sval): + return '%s %s' % (xml_escape(text_type(sval)), + sname.endswith('percent') and '%' or '') + pyvalue = [(sname, format_stat(sname, sval)) + for sname, sval in sorted(stats.items())] + self.wview('pyvaltable', pyvalue=pyvalue, header_column_idx=0) + # open repo sessions if req.cnx.is_repo_in_memory and req.user.is_in_group('managers'): w(u'

          %s

          ' % _('opened sessions')) sessions = repo._sessions.values() @@ -116,7 +112,7 @@ w(u'
            ') for session in sessions: w(u'
          • %s (%s: %s)
            ' % ( - xml_escape(unicode(session)), + xml_escape(text_type(session)), _('last usage'), strftime(dtformat, localtime(session.timestamp)))) dict_to_html(w, session.data) @@ -126,12 +122,9 @@ w(u'

            %s

            ' % _('no repository sessions found')) # web server information w(u'

            %s

            ' % _('Web server')) - w(u'') - w(u'' % ( - _('base url'), req.base_url())) - w(u'' % ( - _('data directory url'), req.datadir_url)) - w(u'
            %s%s
            %s%s
            ') + pyvalue = ((_('base url'), req.base_url()), + (_('data directory url'), req.datadir_url)) + self.wview('pyvaltable', pyvalue=pyvalue, header_column_idx=0) from cubicweb.web.application import SESSION_MANAGER if SESSION_MANAGER is not None and req.user.is_in_group('managers'): sessions = SESSION_MANAGER.current_sessions() @@ -170,7 +163,7 @@ continue self.w(u'

            %s

            ' % (key, key)) if self._cw.vreg[key]: - values = sorted(self._cw.vreg[key].iteritems()) + values = sorted(self._cw.vreg[key].items()) self.wview('pyvaltable', pyvalue=[(key, xml_escape(repr(val))) for key, val in values]) else: diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/dotgraphview.py --- a/web/views/dotgraphview.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/dotgraphview.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,7 +18,7 @@ """some basic stuff to build dot generated graph images""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ import tempfile import os diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/editcontroller.py --- a/web/views/editcontroller.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/editcontroller.py Thu Jun 16 14:19:20 2016 +0200 @@ -24,12 +24,14 @@ from datetime import datetime +from six import text_type + from logilab.common.deprecation import deprecated from logilab.common.graph import ordered_nodes from rql.utils import rqlvar_maker -from cubicweb import Binary, ValidationError, UnknownEid +from cubicweb import _, Binary, ValidationError, UnknownEid from cubicweb.view import EntityAdapter from cubicweb.predicates import is_instance from cubicweb.web import (INTERNAL_FIELD_VALUE, RequestError, NothingToEdit, @@ -96,9 +98,9 @@ def update_query(self, eid): assert not self.canceled varmaker = rqlvar_maker() - var = varmaker.next() + var = next(varmaker) while var in self.kwargs: - var = varmaker.next() + var = next(varmaker) rql = 'SET %s WHERE X eid %%(%s)s' % (','.join(self.edited), var) if self.restrictions: rql += ', %s' % ','.join(self.restrictions) @@ -146,7 +148,7 @@ values_by_eid = dict((eid, req.extract_entity_params(eid, minparams=2)) for eid in req.edited_eids()) # iterate over all the edited entities - for eid, values in values_by_eid.iteritems(): + for eid, values in values_by_eid.items(): # add eid to the dependency graph graph.setdefault(eid, set()) # search entity's edited fields for mandatory inlined relation @@ -201,7 +203,7 @@ if '__linkto' in req.form and 'eid' in req.form: self.execute_linkto() elif not ('__delete' in req.form or '__insert' in req.form): - raise ValidationError(None, {None: unicode(ex)}) + raise ValidationError(None, {None: text_type(ex)}) # all pending inlined relations to newly created entities have been # treated now (pop to ensure there are no attempt to add new ones) pending_inlined = req.data.pop('pending_inlined') @@ -222,7 +224,7 @@ autoform.delete_relations(self._cw, todelete) self._cw.remove_pending_operations() if self.errors: - errors = dict((f.name, unicode(ex)) for f, ex in self.errors) + errors = dict((f.name, text_type(ex)) for f, ex in self.errors) raise ValidationError(valerror_eid(form.get('__maineid')), errors) def _insert_entity(self, etype, eid, rqlquery): @@ -273,7 +275,7 @@ rqlquery.set_inlined(field.name, form_.edited_entity.eid) if not rqlquery.canceled: if self.errors: - errors = dict((f.role_name(), unicode(ex)) for f, ex in self.errors) + errors = dict((f.role_name(), text_type(ex)) for f, ex in self.errors) raise ValidationError(valerror_eid(entity.eid), errors) if eid is None: # creation or copy entity.eid = eid = self._insert_entity(etype, formparams['eid'], rqlquery) @@ -376,7 +378,7 @@ """handle edition for the (rschema, x) relation of the given entity """ if values: - rqlquery.set_inlined(field.name, iter(values).next()) + rqlquery.set_inlined(field.name, next(iter(values))) elif form.edited_entity.has_eid(): self.handle_relation(form, field, values, origvalues) @@ -415,13 +417,13 @@ for eid, etype in eidtypes: entity = self._cw.entity_from_eid(eid, etype) path, params = entity.cw_adapt_to('IEditControl').after_deletion_path() - redirect_info.add( (path, tuple(params.iteritems())) ) + redirect_info.add( (path, tuple(params.items())) ) entity.cw_delete() if len(redirect_info) > 1: # In the face of ambiguity, refuse the temptation to guess. self._after_deletion_path = 'view', () else: - self._after_deletion_path = iter(redirect_info).next() + self._after_deletion_path = next(iter(redirect_info)) if len(eidtypes) > 1: self._cw.set_message(self._cw._('entities deleted')) else: @@ -431,7 +433,7 @@ def check_concurrent_edition(self, formparams, eid): req = self._cw try: - form_ts = datetime.fromtimestamp(float(formparams['__form_generation_time'])) + form_ts = datetime.utcfromtimestamp(float(formparams['__form_generation_time'])) except KeyError: # Backward and tests compatibility : if no timestamp consider edition OK return @@ -448,13 +450,6 @@ self._default_publish() self.reset() - def _action_cancel(self): - errorurl = self._cw.form.get('__errorurl') - if errorurl: - self._cw.cancel_edition(errorurl) - self._cw.set_message(self._cw._('edit canceled')) - return self.reset() - def _action_delete(self): self.delete_entities(self._cw.edited_eids(withtype=True)) return self.reset() diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/editforms.py --- a/web/views/editforms.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/editforms.py Thu Jun 16 14:19:20 2016 +0200 @@ -20,10 +20,12 @@ """ __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from copy import copy +from six.moves import range + from logilab.mtconverter import xml_escape from logilab.common.decorators import cached from logilab.common.registry import yes @@ -145,7 +147,7 @@ # selector etype = kwargs.pop('etype', self._cw.form.get('etype')) entity = self._cw.vreg['etypes'].etype_class(etype)(self._cw) - entity.eid = self._cw.varmaker.next() + entity.eid = next(self._cw.varmaker) self.render_form(entity) def form_title(self, entity): @@ -197,7 +199,7 @@ entity.complete() self.newentity = copy(entity) self.copying = entity - self.newentity.eid = self._cw.varmaker.next() + self.newentity.eid = next(self._cw.varmaker) self.w(u'\n' % self._cw._(self.warning_message)) super(CopyFormView, self).render_form(self.newentity) @@ -230,7 +232,7 @@ def __init__(self, req, rset, **kwargs): kwargs.setdefault('__redirectrql', rset.printable_rql()) super(TableEditForm, self).__init__(req, rset=rset, **kwargs) - for row in xrange(len(self.cw_rset)): + for row in range(len(self.cw_rset)): form = self._cw.vreg['forms'].select('edition', self._cw, rset=self.cw_rset, row=row, formtype='muledit', diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/editviews.py --- a/web/views/editviews.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/editviews.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,7 +18,7 @@ """Some views used to help to the edition process""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from logilab.common.decorators import cached from logilab.mtconverter import xml_escape diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/facets.py --- a/web/views/facets.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/facets.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,7 +18,7 @@ """the facets box and some basic facets""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from warnings import warn @@ -168,7 +168,7 @@ DeprecationWarning, stacklevel=2) else: vidargs = {} - vidargs = dict((k, v) for k, v in vidargs.iteritems() if v) + vidargs = dict((k, v) for k, v in vidargs.items() if v) facetargs = xml_escape(json_dumps([divid, vid, paginate, vidargs])) w(u'
            ' % (divid, cssclass, facetargs)) diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/formrenderers.py --- a/web/views/formrenderers.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/formrenderers.py Thu Jun 16 14:19:20 2016 +0200 @@ -33,10 +33,12 @@ """ __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from warnings import warn +from six import text_type + from logilab.mtconverter import xml_escape from logilab.common.registry import yes @@ -119,7 +121,7 @@ data.insert(0, errormsg) # NOTE: we call unicode because `tag` objects may be found within data # e.g. from the cwtags library - w(''.join(unicode(x) for x in data)) + w(''.join(text_type(x) for x in data)) def render_content(self, w, form, values): if self.display_progress_div: @@ -241,7 +243,7 @@ if form.fieldsets_in_order: fieldsets = form.fieldsets_in_order else: - fieldsets = byfieldset.iterkeys() + fieldsets = byfieldset for fieldset in list(fieldsets): try: fields = byfieldset.pop(fieldset) @@ -542,4 +544,3 @@ self._render_fields(fields, w, form) self.render_child_forms(w, form, values) w(u'') - diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/forms.py --- a/web/views/forms.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/forms.py Thu Jun 16 14:19:20 2016 +0200 @@ -45,20 +45,19 @@ __docformat__ = "restructuredtext en" -from warnings import warn +import time +import inspect -import time +from six import text_type from logilab.common import dictattr, tempattr from logilab.common.decorators import iclassmethod, cached from logilab.common.textutils import splitstrip -from logilab.common.deprecation import deprecated from cubicweb import ValidationError, neg_role -from cubicweb.utils import support_args from cubicweb.predicates import non_final_entity, match_kwargs, one_line_rset from cubicweb.web import RequestError, ProcessFormError -from cubicweb.web import form, formwidgets as fwdgs +from cubicweb.web import form from cubicweb.web.views import uicfg from cubicweb.web.formfields import guess_field @@ -257,7 +256,7 @@ editedfields = self._cw.form['_cw_fields'] except KeyError: raise RequestError(self._cw._('no edited fields specified')) - entityform = entity and self.field_by_name.im_func.func_code.co_argcount == 4 # XXX + entityform = entity and len(inspect.getargspec(self.field_by_name)) == 4 # XXX for editedfield in splitstrip(editedfields): try: name, role = editedfield.split('-') @@ -286,7 +285,7 @@ except ProcessFormError as exc: errors.append((field, exc)) if errors: - errors = dict((f.role_name(), unicode(ex)) for f, ex in errors) + errors = dict((f.role_name(), text_type(ex)) for f, ex in errors) raise ValidationError(None, errors) return processed @@ -366,8 +365,8 @@ self.add_hidden('_cwmsgid', msgid) def add_generation_time(self): - # NB repr is critical to avoid truncation of the timestamp - self.add_hidden('__form_generation_time', repr(time.time()), + # use %f to prevent (unlikely) display in exponential format + self.add_hidden('__form_generation_time', '%.6f' % time.time(), eidparam=True) def add_linkto_hidden(self): @@ -377,7 +376,7 @@ Warning: this method must be called only when all form fields are setup """ - for (rtype, role), eids in self.linked_to.iteritems(): + for (rtype, role), eids in self.linked_to.items(): # if the relation is already setup by a form field, do not add it # in a __linkto hidden to avoid setting it twice in the controller try: diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/ibreadcrumbs.py --- a/web/views/ibreadcrumbs.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/ibreadcrumbs.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,10 +18,12 @@ """breadcrumbs components definition for CubicWeb web client""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from warnings import warn +from six import text_type + from logilab.mtconverter import xml_escape from cubicweb import tags, uilib @@ -141,7 +143,7 @@ xml_escape(url), xml_escape(uilib.cut(title, textsize)))) else: textsize = self._cw.property_value('navigation.short-line-size') - w(xml_escape(uilib.cut(unicode(part), textsize))) + w(xml_escape(uilib.cut(text_type(part), textsize))) class BreadCrumbETypeVComponent(BreadCrumbEntityVComponent): diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/idownloadable.py --- a/web/views/idownloadable.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/idownloadable.py Thu Jun 16 14:19:20 2016 +0200 @@ -20,7 +20,9 @@ ===================================================== """ __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ + +from six.moves import range from logilab.mtconverter import BINARY_ENCODINGS, TransformError, xml_escape from logilab.common.deprecation import class_renamed, deprecated @@ -166,7 +168,7 @@ def call(self, **kwargs): rset = self.cw_rset - for i in xrange(len(rset)): + for i in range(len(rset)): self.w(u'
            ') self.wview(self.__regid__, rset, row=i, col=0, **kwargs) self.w(u'
            ') @@ -199,6 +201,3 @@ title = _('embedded html') _embedding_tag = tags.iframe - - - diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/json.py --- a/web/views/json.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/json.py Thu Jun 16 14:19:20 2016 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -18,7 +18,7 @@ """json export views""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from cubicweb.uilib import rest_traceback @@ -28,6 +28,7 @@ from cubicweb.web.application import anonymized_request from cubicweb.web.views import basecontrollers, management + class JsonpController(basecontrollers.ViewController): """The jsonp controller is the same as a ViewController but : @@ -49,7 +50,7 @@ self.warning("vid %s can't be used with jsonp controller, " "falling back to jsonexport", vid) self._cw.form['vid'] = 'jsonexport' - else: # if no vid is specified, use jsonexport + else: # if no vid is specified, use jsonexport self._cw.form['vid'] = 'jsonexport' if self._cw.vreg.config['anonymize-jsonp-queries']: with anonymized_request(self._cw): @@ -59,12 +60,12 @@ def _get_json_data(self, rset): json_data = super(JsonpController, self).publish(rset) - if 'callback' in self._cw.form: # jsonp + if 'callback' in self._cw.form: # jsonp json_padding = self._cw.form['callback'].encode('ascii') # use ``application/javascript`` if ``callback`` parameter is # provided, keep ``application/json`` otherwise self._cw.set_content_type('application/javascript') - json_data = b'%s(%s)' % (json_padding, json_data) + json_data = json_padding + b'(' + json_data + b')' return json_data @@ -85,13 +86,14 @@ indent = int(self._cw.form['_indent']) else: indent = None - self.w(json_dumps(data, indent=indent)) + # python's json.dumps escapes non-ascii characters + self.w(json_dumps(data, indent=indent).encode('ascii')) class JsonRsetView(JsonMixIn, AnyRsetView): """dumps raw result set in JSON format""" __regid__ = 'jsonexport' - __select__ = any_rset() # means rset might be empty or have any shape + __select__ = any_rset() # means rset might be empty or have any shape title = _('json-export-view') def call(self): @@ -105,7 +107,8 @@ The following additional metadata is added to each row : - - ``__cwetype__`` : entity type + - ``cw_etype`` : entity type + - ``cw_source`` : source url """ __regid__ = 'ejsonexport' __select__ = EntityView.__select__ | empty_rset() @@ -114,12 +117,8 @@ def call(self): entities = [] for entity in self.cw_rset.entities(): - entity.complete() # fetch all attributes - # hack to add extra metadata - entity.cw_attr_cache.update({ - '__cwetype__': entity.cw_etype, - }) - entities.append(entity) + serializer = entity.cw_adapt_to('ISerializable') + entities.append(serializer.serialize()) self.wdata(entities) @@ -148,4 +147,4 @@ 'errmsg': errmsg, 'exclass': exclass, 'traceback': rest_traceback(excinfo, errmsg), - }) + }) diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/magicsearch.py --- a/web/views/magicsearch.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/magicsearch.py Thu Jun 16 14:19:20 2016 +0200 @@ -23,6 +23,8 @@ import re from logging import getLogger +from six import text_type + from yams.interfaces import IVocabularyConstraint from rql import RQLSyntaxError, BadRQLQuery, parse @@ -86,7 +88,7 @@ else: # Only one possible translation, no ambiguity if len(translation_set) == 1: - relation.r_type = iter(translations[rtype]).next() + relation.r_type = next(iter(translations[rtype])) # More than 1 possible translation => resolve it later else: ambiguous_nodes[relation] = (lhs.name, translation_set) @@ -386,7 +388,7 @@ self.processors = sorted(processors, key=lambda x: x.priority) def process_query(self, uquery): - assert isinstance(uquery, unicode) + assert isinstance(uquery, text_type) try: procname, query = uquery.split(':', 1) proc = self.by_name[procname.strip().lower()] @@ -589,7 +591,7 @@ """ schema = self._cw.vreg.schema relations = set() - untyped_dest_var = rqlvar_maker(defined=select.defined_vars).next() + untyped_dest_var = next(rqlvar_maker(defined=select.defined_vars)) # for each solution # 1. find each possible relation # 2. for each relation: @@ -643,7 +645,7 @@ vocab_kwargs = {} if rtype_incomplete_value: vocab_rql += ', X %s LIKE %%(value)s' % user_rtype - vocab_kwargs['value'] = '%s%%' % rtype_incomplete_value + vocab_kwargs['value'] = u'%s%%' % rtype_incomplete_value vocab += [value for value, in self._cw.execute(vocab_rql, vocab_kwargs)] return sorted(set(vocab)) diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/management.py --- a/web/views/management.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/management.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,7 +18,7 @@ """security management and error screens""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from logilab.mtconverter import xml_escape @@ -137,7 +137,7 @@ # if excinfo is not None, it's probably not a bug if excinfo is None: return - vcconf = self._cw.vreg.config.vc_config() + vcconf = self._cw.cnx.repo.get_versions() w(u"
            ") eversion = vcconf.get('cubicweb', self._cw._('no version information')) # NOTE: tuple wrapping needed since eversion is itself a tuple @@ -169,7 +169,7 @@ binfo += u'\n\n:URL: %s\n' % req.url() if not '__bugreporting' in req.form: binfo += u'\n:form params:\n' - binfo += u'\n'.join(u' * %s = %s' % (k, v) for k, v in req.form.iteritems()) + binfo += u'\n'.join(u' * %s = %s' % (k, v) for k, v in req.form.items()) binfo += u'\n\n:CubicWeb version: %s\n' % (eversion,) for pkg, pkgversion in cubes: binfo += u":Cube %s version: %s\n" % (pkg, pkgversion) diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/navigation.py --- a/web/views/navigation.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/navigation.py Thu Jun 16 14:19:20 2016 +0200 @@ -46,10 +46,12 @@ """ __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from datetime import datetime +from six import text_type + from rql.nodes import VariableRef, Constant from logilab.mtconverter import xml_escape @@ -192,10 +194,10 @@ return entity.printable_value(attrname, format='text/plain') elif col is None: # smart links disabled. def index_display(row): - return unicode(row) + return text_type(row) elif self._cw.vreg.schema.eschema(rset.description[0][col]).final: def index_display(row): - return unicode(rset[row][col]) + return text_type(rset[row][col]) else: def index_display(row): return rset.get_entity(row, col).view('text') diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/owl.py --- a/web/views/owl.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/owl.py Thu Jun 16 14:19:20 2016 +0200 @@ -19,7 +19,9 @@ """ __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ + +from six.moves import range from logilab.mtconverter import TransformError, xml_escape @@ -166,7 +168,7 @@ def call(self): self.w(OWL_OPENING_ROOT % {'appid': self._cw.vreg.schema.name}) - for i in xrange(self.cw_rset.rowcount): + for i in range(self.cw_rset.rowcount): self.cell_call(i, 0) self.w(OWL_CLOSING_ROOT) diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/plots.py --- a/web/views/plots.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/plots.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,7 +18,10 @@ """basic plot views""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ + +from six import add_metaclass +from six.moves import range from logilab.common.date import datetime2ticks from logilab.common.deprecation import class_deprecated @@ -83,9 +86,10 @@ def _render(self, *args, **kwargs): raise NotImplementedError + +@add_metaclass(class_deprecated) class FlotPlotWidget(PlotWidget): """PlotRenderer widget using Flot""" - __metaclass__ = class_deprecated __deprecation_warning__ = '[3.14] cubicweb.web.views.plots module is deprecated, use the jqplot cube instead' onload = u""" var fig = jQuery('#%(figid)s'); @@ -117,7 +121,7 @@ if req.ie_browser(): req.add_js('excanvas.js') req.add_js(('jquery.flot.js', 'cubicweb.flot.js')) - figid = u'figure%s' % req.varmaker.next() + figid = u'figure%s' % next(req.varmaker) plotdefs = [] plotdata = [] self.w(u'
            ' % @@ -137,8 +141,8 @@ 'dateformat': '"%s"' % fmt}) +@add_metaclass(class_deprecated) class PlotView(baseviews.AnyRsetView): - __metaclass__ = class_deprecated __deprecation_warning__ = '[3.14] cubicweb.web.views.plots module is deprecated, use the jqplot cube instead' __regid__ = 'plot' title = _('generic plot') @@ -154,7 +158,7 @@ abscissa = [row[0] for row in self.cw_rset] plots = [] nbcols = len(self.cw_rset.rows[0]) - for col in xrange(1, nbcols): + for col in range(1, nbcols): data = [row[col] for row in self.cw_rset] plots.append(filterout_nulls(abscissa, data)) plotwidget = FlotPlotWidget(varnames, plots, timemode=self.timemode) diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/primary.py --- a/web/views/primary.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/primary.py Thu Jun 16 14:19:20 2016 +0200 @@ -38,7 +38,7 @@ """ __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from warnings import warn diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/pyviews.py --- a/web/views/pyviews.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/pyviews.py Thu Jun 16 14:19:20 2016 +0200 @@ -19,6 +19,9 @@ """ __docformat__ = "restructuredtext en" +from six import text_type +from six.moves import range + from cubicweb.view import View from cubicweb.predicates import match_kwargs from cubicweb.web.views import tableview @@ -38,7 +41,7 @@ w(self.empty_cell_content) def render_cell(self, w, rownum): - w(unicode(self.data[rownum][self.colid])) + w(text_type(self.data[rownum][self.colid])) class PyValTableView(tableview.TableMixIn, View): @@ -100,7 +103,7 @@ def build_column_renderers(self): return [self.column_renderer(colid) - for colid in xrange(len(self.pyvalue[0]))] + for colid in range(len(self.pyvalue[0]))] def facets_form(self, mainvar=None): return None # not supported diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/rdf.py --- a/web/views/rdf.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/rdf.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,7 +18,9 @@ """base xml and rss views""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ + +from six.moves import range from yams import xy @@ -56,7 +58,7 @@ graph.bind('cw', CW) for prefix, xmlns in xy.XY.prefixes.items(): graph.bind(prefix, rdflib.Namespace(xmlns)) - for i in xrange(self.cw_rset.rowcount): + for i in range(self.cw_rset.rowcount): entity = self.cw_rset.complete_entity(i, 0) self.entity2graph(graph, entity) self.w(graph.serialize(format=self.format)) diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/reledit.py --- a/web/views/reledit.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/reledit.py Thu Jun 16 14:19:20 2016 +0200 @@ -20,7 +20,7 @@ """ __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ import copy from warnings import warn @@ -259,7 +259,7 @@ elif action == 'add': add_etype = self._compute_ttypes(rschema, role)[0] _new_entity = self._cw.vreg['etypes'].etype_class(add_etype)(self._cw) - _new_entity.eid = self._cw.varmaker.next() + _new_entity.eid = next(self._cw.varmaker) edit_entity = _new_entity # XXX see forms.py ~ 276 and entities.linked_to method # is there another way? @@ -292,7 +292,7 @@ cwtarget='eformframe', cssclass='releditForm', **formargs) # pass reledit arguments - for pname, pvalue in event_args.iteritems(): + for pname, pvalue in event_args.items(): form.add_hidden('__reledit|' + pname, pvalue) # handle buttons if form.form_buttons: # edition, delete @@ -402,4 +402,3 @@ assert args['reload'].startswith('http') view = req.vreg['views'].select('reledit', req, rset=rset, rtype=args['rtype']) return self._call_view(view, **args) - diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/schema.py --- a/web/views/schema.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/schema.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,7 +18,7 @@ """Specific views for schema related entities""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from itertools import cycle @@ -26,6 +26,8 @@ import os, os.path as osp import codecs +from six import text_type + from logilab.common.graph import GraphGenerator, DotBackend from logilab.common.ureports import Section, Table from logilab.common.registry import yes @@ -114,7 +116,7 @@ def grouped_permissions_table(self, rschema): # group relation definitions with identical permissions perms = {} - for rdef in rschema.rdefs.itervalues(): + for rdef in rschema.rdefs.values(): rdef_perms = [] for action in rdef.ACTIONS: groups = sorted(rdef.get_groups(action)) @@ -131,7 +133,7 @@ _ = self._cw._ w(u'
            ') tmpl = u'%s %s %s' - for perm, rdefs in perms.iteritems(): + for perm, rdefs in perms.items(): w(u'
            %s
            ' % u', '.join( tmpl % (_(s.type), _(rschema.type), _(o.type)) for s, o in rdefs)) # accessing rdef from previous loop by design: only used to get @@ -279,7 +281,7 @@ def cell_call(self, row, col): defaultval = self.cw_rset.rows[row][col] if defaultval is not None: - self.w(unicode(self.cw_rset.rows[row][col].unzpickle())) + self.w(text_type(self.cw_rset.rows[row][col].unzpickle())) class CWETypeRelationCardinalityCell(baseviews.FinalView): __regid__ = 'etype-rel-cardinality-cell' @@ -487,7 +489,7 @@ entity = self.cw_rset.get_entity(row, col) rschema = self._cw.vreg.schema.rschema(entity.rtype.name) rdef = rschema.rdefs[(entity.stype.name, entity.otype.name)] - constraints = [xml_escape(unicode(c)) for c in getattr(rdef, 'constraints')] + constraints = [xml_escape(text_type(c)) for c in getattr(rdef, 'constraints')] self.w(u'
            '.join(constraints)) class CWAttributeOptionsCell(EntityView): @@ -557,8 +559,9 @@ def __init__(self, visitor, cw): self.visitor = visitor self.cw = cw - self.nextcolor = cycle( ('#ff7700', '#000000', - '#ebbc69', '#888888') ).next + self._cycle = iter(cycle(('#ff7700', '#000000', '#ebbc69', '#888888'))) + self.nextcolor = lambda: next(self._cycle) + self.colors = {} def node_properties(self, eschema): diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/sessions.py --- a/web/views/sessions.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/sessions.py Thu Jun 16 14:19:20 2016 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -19,20 +19,27 @@ __docformat__ = "restructuredtext en" from time import time +from logging import getLogger -from cubicweb import RepositoryError, Unauthorized, BadConnectionId -from cubicweb.web import InvalidSession, component +from logilab.common.registry import RegistrableObject, yes + +from cubicweb import RepositoryError, Unauthorized, set_log_methods +from cubicweb.web import InvalidSession + +from cubicweb.web.views import authentication -class AbstractSessionManager(component.Component): +class AbstractSessionManager(RegistrableObject): """manage session data associated to a session identifier""" __abstract__ = True + __select__ = yes() + __registry__ = 'sessions' __regid__ = 'sessionmanager' def __init__(self, repo): vreg = repo.vreg self.session_time = vreg.config['http-session-time'] or None - self.authmanager = vreg['components'].select('authmanager', repo=repo) + self.authmanager = authentication.RepositoryAuthenticationManager(repo) interval = (self.session_time or 0) / 2. if vreg.config.anonymous_user()[0] is not None: self.cleanup_anon_session_time = vreg.config['cleanup-anonymous-session-time'] or 5 * 60 @@ -53,15 +60,7 @@ closed, total = 0, 0 for session in self.current_sessions(): total += 1 - try: - last_usage_time = session.cnx.check() - except AttributeError: - last_usage_time = session.mtime - except BadConnectionId: - self.close_session(session) - closed += 1 - continue - + last_usage_time = session.mtime no_use_time = (time() - last_usage_time) if session.anonymous_session: if no_use_time >= self.cleanup_anon_session_time: @@ -95,11 +94,14 @@ raise NotImplementedError() +set_log_methods(AbstractSessionManager, getLogger('cubicweb.sessionmanager')) + + class InMemoryRepositorySessionManager(AbstractSessionManager): """manage session data associated to a session identifier""" def __init__(self, *args, **kwargs): - AbstractSessionManager.__init__(self, *args, **kwargs) + super(InMemoryRepositorySessionManager, self).__init__(*args, **kwargs) # XXX require a RepositoryAuthenticationManager which violates # authenticate interface by returning a session instead of a user #assert isinstance(self.authmanager, RepositoryAuthenticationManager) diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/sparql.py --- a/web/views/sparql.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/sparql.py Thu Jun 16 14:19:20 2016 +0200 @@ -18,7 +18,9 @@ """SPARQL integration""" __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ + +from six.moves import range from yams import xy from rql import TypeResolverException @@ -111,7 +113,7 @@ rqlst = self.cw_rset.syntax_tree().children[0] varnames = [var.name for var in rqlst.selection] results = E.results() - for rowidx in xrange(len(self.cw_rset)): + for rowidx in range(len(self.cw_rset)): result = E.result() for colidx, varname in enumerate(varnames): result.append(self.cell_binding(rowidx, colidx, varname)) @@ -140,4 +142,4 @@ def registration_callback(vreg): if Sparql2rqlTranslator is not None: - vreg.register_all(globals().itervalues(), __name__) + vreg.register_all(globals().values(), __name__) diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/startup.py --- a/web/views/startup.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/startup.py Thu Jun 16 14:19:20 2016 +0200 @@ -22,7 +22,7 @@ """ __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from logilab.common.textutils import unormalize from logilab.common.deprecation import deprecated @@ -106,7 +106,7 @@ def entity_types_table(self, eschemas): infos = sorted(self.entity_types(eschemas), - key=lambda (l,a,e): unormalize(l)) + key=lambda t: unormalize(t[0])) q, r = divmod(len(infos), 2) if r: infos.append( (None, ' ', ' ') ) @@ -172,4 +172,3 @@ @deprecated('[3.11] display_folders method is deprecated, backport it if needed') def display_folders(self): return 'Folder' in self._cw.vreg.schema and self._cw.execute('Any COUNT(X) WHERE X is Folder')[0][0] - diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/staticcontrollers.py --- a/web/views/staticcontrollers.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/staticcontrollers.py Thu Jun 16 14:19:20 2016 +0200 @@ -33,7 +33,7 @@ from logging import getLogger from cubicweb import Forbidden -from cubicweb.web import NotFound +from cubicweb.web import NotFound, Redirect from cubicweb.web.http_headers import generateDateTime from cubicweb.web.controller import Controller from cubicweb.web.views.urlrewrite import URLRewriter @@ -66,9 +66,10 @@ if not debugmode: # XXX: Don't provide additional resource information to error responses # - # the HTTP RFC recommands not going further than 1 year ahead - expires = datetime.now() + timedelta(days=6*30) + # the HTTP RFC recommends not going further than 1 year ahead + expires = datetime.now() + timedelta(seconds=self.max_age(path)) self._cw.set_header('Expires', generateDateTime(mktime(expires.timetuple()))) + self._cw.set_header('Cache-Control', 'max-age=%s' % self.max_age(path)) # XXX system call to os.stats could be cached once and for all in # production mode (where static files are not expected to change) @@ -140,7 +141,7 @@ """return the filepath that will be used to cache concatenation of `paths` """ _, ext = osp.splitext(paths[0]) - fname = 'cache_concat_' + hashlib.md5(';'.join(paths)).hexdigest() + ext + fname = 'cache_concat_' + hashlib.md5((';'.join(paths)).encode('ascii')).hexdigest() + ext return osp.join(self.config.appdatahome, 'uicache', fname) def concat_cached_filepath(self, paths): @@ -167,7 +168,7 @@ with open(osp.join(dirpath, rid), 'rb') as source: for line in source: f.write(line) - f.write('\n') + f.write(b'\n') f.close() except: os.remove(tmpfile) @@ -200,11 +201,13 @@ paths = relpath[len(self.data_modconcat_basepath):].split(',') filepath = self.concat_files_registry.concat_cached_filepath(paths) else: - # skip leading '/data/' and url params - if relpath.startswith(self.base_datapath): - prefix = self.base_datapath - else: + if not relpath.startswith(self.base_datapath): + # /data/foo, redirect to /data/{hash}/foo prefix = 'data/' + relpath = relpath[len(prefix):] + raise Redirect(self._cw.data_url(relpath), 302) + # skip leading '/data/{hash}/' and url params + prefix = self.base_datapath relpath = relpath[len(prefix):] relpath = relpath.split('?', 1)[0] dirpath, rid = config.locate_resource(relpath) diff -r a4fcee1e9789 -r 19fcce6dc6d1 web/views/tableview.py --- a/web/views/tableview.py Thu Mar 24 09:43:25 2016 +0100 +++ b/web/views/tableview.py Thu Jun 16 14:19:20 2016 +0200 @@ -42,7 +42,7 @@ .. autoclass:: cubicweb.web.views.tableview.TableLayout :members: -There is by default only on table layout, using the 'table_layout' identifier, +There is by default only one table layout, using the 'table_layout' identifier, that is referenced by table views :attr:`cubicweb.web.views.tableview.TableMixIn.layout_id`. If you want to customize the look and feel of your table, you can either replace the default @@ -52,21 +52,24 @@ Notice you can gives options to the layout using a `layout_args` dictionary on your class. -If you can still find a view that suit your needs, you should take a look at the +If you still can't find a view that suit your needs, you should take a look at the class below that is the common abstract base class for the three views defined -above and implements you own class. +above and implement your own class. .. autoclass:: cubicweb.web.views.tableview.TableMixIn :members: """ __docformat__ = "restructuredtext en" -_ = unicode +from cubicweb import _ from warnings import warn from copy import copy from types import MethodType +from six import string_types, add_metaclass, create_bound_method +from six.moves import range + from logilab.mtconverter import xml_escape from logilab.common.decorators import cachedproperty from logilab.common.deprecation import class_deprecated @@ -162,7 +165,7 @@ def __init__(self, req, view, **kwargs): super(TableLayout, self).__init__(req, **kwargs) - for key, val in self.cw_extra_kwargs.items(): + for key, val in list(self.cw_extra_kwargs.items()): if hasattr(self.__class__, key) and not key[0] == '_': setattr(self, key, val) self.cw_extra_kwargs.pop(key) @@ -225,7 +228,7 @@ def render_table_body(self, w, colrenderers): w(u'
' % self.table_css) self.table_header(sample) self.w(u'') - for row in xrange(self.cw_rset.rowcount): + for row in range(self.cw_rset.rowcount): self.cell_call(row=row, col=0) self.w(u'') self.w(u'
') @@ -1333,4 +1336,3 @@ colname = self._cw._(column) self.w(u'
%s