--- a/__init__.py Thu Mar 06 15:55:33 2014 +0100
+++ b/__init__.py Thu Nov 12 10:52:28 2015 +0100
@@ -22,23 +22,21 @@
# ignore the pygments UserWarnings
import warnings
-import cPickle
import zlib
warnings.filterwarnings('ignore', category=UserWarning,
message='.*was already imported',
module='.*pygments')
-import __builtin__
-# '_' is available in builtins to mark internationalized string but should
-# not be used to do the actual translation
-if not hasattr(__builtin__, '_'):
- __builtin__._ = unicode
+from six import PY2, binary_type, text_type
+from six.moves import builtins
CW_SOFTWARE_ROOT = __path__[0]
import sys, os, logging
-from StringIO import StringIO
+from io import BytesIO
+
+from six.moves import cPickle as pickle
from logilab.common.deprecation import deprecated
from logilab.common.logging_ext import set_log_methods
@@ -56,6 +54,14 @@
from cubicweb._exceptions import *
from logilab.common.registry import ObjectNotFound, NoSelectableObject, RegistryNotFound
+
+# '_' is available to mark internationalized string but should not be used to
+# do the actual translation
+_ = text_type
+if not hasattr(builtins, '_'):
+ builtins._ = deprecated("[3.22] Use 'from cubicweb import _'")(_)
+
+
# convert eid to the right type, raise ValueError if it's not a valid eid
@deprecated('[3.17] typed_eid() was removed. replace it with int() when needed.')
def typed_eid(eid):
@@ -66,17 +72,19 @@
#import threading
#threading.settrace(log_thread)
-class Binary(StringIO):
- """customize StringIO to make sure we don't use unicode"""
- def __init__(self, buf=''):
- assert isinstance(buf, (str, buffer, bytearray)), \
- "Binary objects must use raw strings, not %s" % buf.__class__
- StringIO.__init__(self, buf)
+class Binary(BytesIO):
+ """class to hold binary data. Use BytesIO to prevent use of unicode data"""
+ _allowed_types = (binary_type, bytearray, buffer if PY2 else memoryview)
+
+ def __init__(self, buf=b''):
+ assert isinstance(buf, self._allowed_types), \
+ "Binary objects must use bytes/buffer objects, not %s" % buf.__class__
+ super(Binary, self).__init__(buf)
def write(self, data):
- assert isinstance(data, (str, buffer, bytearray)), \
- "Binary objects must use raw strings, not %s" % data.__class__
- StringIO.write(self, data)
+ assert isinstance(data, self._allowed_types), \
+ "Binary objects must use bytes/buffer objects, not %s" % data.__class__
+ super(Binary, self).write(data)
def to_file(self, fobj):
"""write a binary to disk
@@ -132,22 +140,22 @@
def zpickle(cls, obj):
""" return a Binary containing a gzipped pickle of obj """
retval = cls()
- retval.write(zlib.compress(cPickle.dumps(obj, protocol=2)))
+ retval.write(zlib.compress(pickle.dumps(obj, protocol=2)))
return retval
def unzpickle(self):
""" decompress and loads the stream before returning it """
- return cPickle.loads(zlib.decompress(self.getvalue()))
+ return pickle.loads(zlib.decompress(self.getvalue()))
def check_password(eschema, value):
- return isinstance(value, (str, Binary))
+ return isinstance(value, (binary_type, Binary))
BASE_CHECKERS['Password'] = check_password
def str_or_binary(value):
if isinstance(value, Binary):
return value
- return str(value)
+ return binary_type(value)
BASE_CONVERTERS['Password'] = str_or_binary
--- a/__pkginfo__.py Thu Mar 06 15:55:33 2014 +0100
+++ b/__pkginfo__.py Thu Nov 12 10:52:28 2015 +0100
@@ -22,8 +22,8 @@
modname = distname = "cubicweb"
-numversion = (3, 21, 2)
-version = '.'.join(str(num) for num in numversion)
+numversion = (3, 21, 99)
+version = '.'.join(str(num) for num in numversion) + '.dev0'
description = "a repository of entities / relations for knowledge management"
author = "Logilab"
@@ -39,6 +39,7 @@
]
__depends__ = {
+ 'six': '>= 1.4.0',
'logilab-common': '>= 0.63.1',
'logilab-mtconverter': '>= 0.8.0',
'rql': '>= 0.31.2',
--- a/_exceptions.py Thu Mar 06 15:55:33 2014 +0100
+++ b/_exceptions.py Thu Nov 12 10:52:28 2015 +0100
@@ -21,6 +21,8 @@
from warnings import warn
+from six import PY3, text_type
+
from logilab.common.decorators import cachedproperty
from yams import ValidationError
@@ -30,23 +32,24 @@
class CubicWebException(Exception):
"""base class for cubicweb server exception"""
msg = ""
- def __str__(self):
+ def __unicode__(self):
if self.msg:
if self.args:
return self.msg % tuple(self.args)
else:
return self.msg
else:
- return u' '.join(unicode(arg) for arg in self.args)
+ return u' '.join(text_type(arg) for arg in self.args)
+ __str__ = __unicode__ if PY3 else lambda self: self.__unicode__().encode('utf-8')
class ConfigurationError(CubicWebException):
"""a misconfiguration error"""
class InternalError(CubicWebException):
- """base class for exceptions which should not occurs"""
+ """base class for exceptions which should not occur"""
class SecurityError(CubicWebException):
- """base class for cubicweb server security exception"""
+ """base class for cubicweb server security exceptions"""
class RepositoryError(CubicWebException):
"""base class for repository exceptions"""
--- a/_gcdebug.py Thu Mar 06 15:55:33 2014 +0100
+++ b/_gcdebug.py Thu Nov 12 10:52:28 2015 +0100
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
+from __future__ import print_function
import gc, types, weakref
@@ -68,7 +69,7 @@
except KeyError:
ocounters[key] = 1
if isinstance(obj, viewreferrersclasses):
- print ' ', obj, referrers(obj, showobjs, maxlevel)
+ print(' ', obj, referrers(obj, showobjs, maxlevel))
garbage = [repr(obj) for obj in gc.garbage]
return counters, ocounters, garbage
--- a/crypto.py Thu Mar 06 15:55:33 2014 +0100
+++ b/crypto.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,9 +18,10 @@
"""Simple cryptographic routines, based on python-crypto."""
__docformat__ = "restructuredtext en"
-from pickle import dumps, loads
from base64 import b64encode, b64decode
+from six.moves import cPickle as pickle
+
from Crypto.Cipher import Blowfish
@@ -34,7 +35,7 @@
def encrypt(data, seed):
- string = dumps(data)
+ string = pickle.dumps(data)
string = string + '*' * (8 - len(string) % 8)
string = b64encode(_cypherer(seed).encrypt(string))
return unicode(string)
@@ -43,4 +44,4 @@
def decrypt(string, seed):
# pickle ignores trailing characters so we do not need to strip them off
string = _cypherer(seed).decrypt(b64decode(string))
- return loads(string)
+ return pickle.loads(string)
--- a/cubicweb.spec Thu Mar 06 15:55:33 2014 +0100
+++ b/cubicweb.spec Thu Nov 12 10:52:28 2015 +0100
@@ -20,6 +20,7 @@
BuildArch: noarch
Requires: %{python}
+Requires: %{python}-six >= 1.4.0
Requires: %{python}-logilab-common >= 0.63.1
Requires: %{python}-logilab-mtconverter >= 0.8.0
Requires: %{python}-rql >= 0.31.2
--- a/cwconfig.py Thu Mar 06 15:55:33 2014 +0100
+++ b/cwconfig.py Thu Nov 12 10:52:28 2015 +0100
@@ -164,9 +164,9 @@
Directory where pid files will be written
"""
+from __future__ import print_function
__docformat__ = "restructuredtext en"
-_ = unicode
import sys
import os
@@ -179,6 +179,8 @@
basename, isdir, dirname, splitext)
from warnings import warn, filterwarnings
+from six import text_type
+
from logilab.common.decorators import cached, classproperty
from logilab.common.deprecation import deprecated
from logilab.common.logging_ext import set_log_methods, init_log
@@ -186,7 +188,7 @@
ConfigurationMixIn, merge_options)
from cubicweb import (CW_SOFTWARE_ROOT, CW_MIGRATION_MAP,
- ConfigurationError, Binary)
+ ConfigurationError, Binary, _)
from cubicweb.toolsutils import create_dir
CONFIGURATIONS = []
@@ -350,7 +352,7 @@
}),
('umask',
{'type' : 'int',
- 'default': 077,
+ 'default': 0o077,
'help': 'permission umask for files created by the server',
'group': 'main', 'level': 2,
}),
@@ -503,7 +505,7 @@
deps = {}
else:
deps = dict( (x[len('cubicweb-'):], v)
- for x, v in gendeps.iteritems()
+ for x, v in gendeps.items()
if x.startswith('cubicweb-'))
for depcube in deps:
try:
@@ -650,7 +652,7 @@
self.adjust_sys_path()
self.load_defaults()
# will be properly initialized later by _gettext_init
- self.translations = {'en': (unicode, lambda ctx, msgid: unicode(msgid) )}
+ self.translations = {'en': (text_type, lambda ctx, msgid: text_type(msgid) )}
self._site_loaded = set()
# don't register ReStructured Text directives by simple import, avoid pb
# with eg sphinx.
@@ -960,7 +962,7 @@
i = 1
while exists(path) and i < 100: # arbitrary limit to avoid infinite loop
try:
- file(path, 'a')
+ open(path, 'a')
break
except IOError:
path = '%s-%s.log' % (basepath, i)
@@ -994,6 +996,13 @@
rtdir = abspath(os.environ.get('CW_RUNTIME_DIR', default))
return join(rtdir, '%s-%s.pid' % (self.appid, self.name))
+ # config -> repository
+
+ def repository(self, vreg=None):
+ from cubicweb.server.repository import Repository
+ from cubicweb.server.utils import TasksManager
+ return Repository(self, TasksManager(), vreg=vreg)
+
# instance methods used to get instance specific resources #############
def __init__(self, appid, debugmode=False, creating=False):
@@ -1001,7 +1010,7 @@
# set to true while creating an instance
self.creating = creating
super(CubicWebConfiguration, self).__init__(debugmode)
- fake_gettext = (unicode, lambda ctx, msgid: unicode(msgid))
+ fake_gettext = (text_type, lambda ctx, msgid: text_type(msgid))
for lang in self.available_languages():
self.translations[lang] = fake_gettext
self._cubes = None
@@ -1101,7 +1110,7 @@
version = self.cube_version(pkg)
infos.append('%s-%s' % (pkg, version))
infos.append('cubicweb-%s' % str(self.cubicweb_version()))
- return md5(';'.join(infos)).hexdigest()
+ return md5((';'.join(infos)).encode('ascii')).hexdigest()
def load_configuration(self, **kw):
"""load instance's configuration files"""
@@ -1156,7 +1165,7 @@
def _gettext_init(self):
"""set language for gettext"""
- from cubicweb.gettext import translation
+ from cubicweb.cwgettext import translation
path = join(self.apphome, 'i18n')
for language in self.available_languages():
self.info("loading language %s", language)
@@ -1181,13 +1190,8 @@
def set_sources_mode(self, sources):
if not 'all' in sources:
- print 'warning: ignoring specified sources, requires a repository '\
- 'configuration'
-
- def migration_handler(self):
- """return a migration handler instance"""
- from cubicweb.migration import MigrationHelper
- return MigrationHelper(self, verbosity=self.verbosity)
+ print('warning: ignoring specified sources, requires a repository '
+ 'configuration')
def i18ncompile(self, langs=None):
from cubicweb import i18n
--- a/cwctl.py Thu Mar 06 15:55:33 2014 +0100
+++ b/cwctl.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,6 +18,7 @@
"""the cubicweb-ctl tool, based on logilab.common.clcommands to
provide a pluggable commands system.
"""
+from __future__ import print_function
__docformat__ = "restructuredtext en"
@@ -28,7 +29,6 @@
from warnings import warn, filterwarnings
from os import remove, listdir, system, pathsep
from os.path import exists, join, isfile, isdir, dirname, abspath
-from urlparse import urlparse
try:
from os import kill, getpgid
@@ -38,6 +38,8 @@
def getpgid():
"""win32 getpgid implementation"""
+from six.moves.urllib.parse import urlparse
+
from logilab.common.clcommands import CommandLine
from logilab.common.shellutils import ASK
from logilab.common.configuration import merge_options
@@ -113,15 +115,15 @@
_allinstances = list_instances(regdir)
if isfile(join(regdir, 'startorder')):
allinstances = []
- for line in file(join(regdir, 'startorder')):
+ for line in open(join(regdir, 'startorder')):
line = line.strip()
if line and not line.startswith('#'):
try:
_allinstances.remove(line)
allinstances.append(line)
except ValueError:
- print ('ERROR: startorder file contains unexistant '
- 'instance %s' % line)
+ print('ERROR: startorder file contains unexistant '
+ 'instance %s' % line)
allinstances += _allinstances
else:
allinstances = _allinstances
@@ -146,7 +148,7 @@
status = 0
for appid in args:
if askconfirm:
- print '*'*72
+ print('*'*72)
if not ASK.confirm('%s instance %r ?' % (self.name, appid)):
continue
try:
@@ -184,13 +186,13 @@
forkcmd = None
for appid in args:
if askconfirm:
- print '*'*72
+ print('*'*72)
if not ASK.confirm('%s instance %r ?' % (self.name, appid)):
continue
if forkcmd:
status = system('%s %s' % (forkcmd, appid))
if status:
- print '%s exited with status %s' % (forkcmd, status)
+ print('%s exited with status %s' % (forkcmd, status))
else:
self.run_arg(appid)
@@ -224,19 +226,19 @@
from cubicweb.migration import ConfigurationProblem
if mode == 'all':
- print 'CubicWeb %s (%s mode)' % (cwcfg.cubicweb_version(), cwcfg.mode)
- print
+ print('CubicWeb %s (%s mode)' % (cwcfg.cubicweb_version(), cwcfg.mode))
+ print()
if mode in ('all', 'config', 'configurations'):
- print 'Available configurations:'
+ print('Available configurations:')
for config in CONFIGURATIONS:
- print '*', config.name
+ print('*', config.name)
for line in config.__doc__.splitlines():
line = line.strip()
if not line:
continue
- print ' ', line
- print
+ print(' ', line)
+ print()
if mode in ('all', 'cubes'):
cfgpb = ConfigurationProblem(cwcfg)
@@ -244,11 +246,11 @@
cubesdir = pathsep.join(cwcfg.cubes_search_path())
namesize = max(len(x) for x in cwcfg.available_cubes())
except ConfigurationError as ex:
- print 'No cubes available:', ex
+ print('No cubes available:', ex)
except ValueError:
- print 'No cubes available in %s' % cubesdir
+ print('No cubes available in %s' % cubesdir)
else:
- print 'Available cubes (%s):' % cubesdir
+ print('Available cubes (%s):' % cubesdir)
for cube in cwcfg.available_cubes():
try:
tinfo = cwcfg.cube_pkginfo(cube)
@@ -257,59 +259,59 @@
except (ConfigurationError, AttributeError) as ex:
tinfo = None
tversion = '[missing cube information: %s]' % ex
- print '* %s %s' % (cube.ljust(namesize), tversion)
+ print('* %s %s' % (cube.ljust(namesize), tversion))
if self.config.verbose:
if tinfo:
descr = getattr(tinfo, 'description', '')
if not descr:
descr = tinfo.__doc__
if descr:
- print ' '+ ' \n'.join(descr.splitlines())
+ print(' '+ ' \n'.join(descr.splitlines()))
modes = detect_available_modes(cwcfg.cube_dir(cube))
- print ' available modes: %s' % ', '.join(modes)
- print
+ print(' available modes: %s' % ', '.join(modes))
+ print()
if mode in ('all', 'instances'):
try:
regdir = cwcfg.instances_dir()
except ConfigurationError as ex:
- print 'No instance available:', ex
- print
+ print('No instance available:', ex)
+ print()
return
instances = list_instances(regdir)
if instances:
- print 'Available instances (%s):' % regdir
+ print('Available instances (%s):' % regdir)
for appid in instances:
modes = cwcfg.possible_configurations(appid)
if not modes:
- print '* %s (BROKEN instance, no configuration found)' % appid
+ print('* %s (BROKEN instance, no configuration found)' % appid)
continue
- print '* %s (%s)' % (appid, ', '.join(modes))
+ print('* %s (%s)' % (appid, ', '.join(modes)))
try:
config = cwcfg.config_for(appid, modes[0])
except Exception as exc:
- print ' (BROKEN instance, %s)' % exc
+ print(' (BROKEN instance, %s)' % exc)
continue
else:
- print 'No instance available in %s' % regdir
- print
+ print('No instance available in %s' % regdir)
+ print()
if mode == 'all':
# configuration management problem solving
cfgpb.solve()
if cfgpb.warnings:
- print 'Warnings:\n', '\n'.join('* '+txt for txt in cfgpb.warnings)
+ print('Warnings:\n', '\n'.join('* '+txt for txt in cfgpb.warnings))
if cfgpb.errors:
- print 'Errors:'
+ print('Errors:')
for op, cube, version, src in cfgpb.errors:
if op == 'add':
- print '* cube', cube,
+ print('* cube', cube, end=' ')
if version:
- print ' version', version,
- print 'is not installed, but required by %s' % src
+ print(' version', version, end=' ')
+ print('is not installed, but required by %s' % src)
else:
- print '* cube %s version %s is installed, but version %s is required by %s' % (
- cube, cfgpb.cubes[cube], version, src)
+ print('* cube %s version %s is installed, but version %s is required by %s' % (
+ cube, cfgpb.cubes[cube], version, src))
def check_options_consistency(config):
if config.automatic and config.config_level > 0:
@@ -380,20 +382,20 @@
templdirs = [cwcfg.cube_dir(cube)
for cube in cubes]
except ConfigurationError as ex:
- print ex
- print '\navailable cubes:',
- print ', '.join(cwcfg.available_cubes())
+ print(ex)
+ print('\navailable cubes:', end=' ')
+ print(', '.join(cwcfg.available_cubes()))
return
# create the registry directory for this instance
- print '\n'+underline_title('Creating the instance %s' % appid)
+ print('\n'+underline_title('Creating the instance %s' % appid))
create_dir(config.apphome)
# cubicweb-ctl configuration
if not self.config.automatic:
- print '\n'+underline_title('Configuring the instance (%s.conf)'
- % configname)
+ print('\n'+underline_title('Configuring the instance (%s.conf)'
+ % configname))
config.input_config('main', self.config.config_level)
# configuration'specific stuff
- print
+ print()
helper.bootstrap(cubes, self.config.automatic, self.config.config_level)
# input for cubes specific options
if not self.config.automatic:
@@ -402,23 +404,23 @@
and odict.get('level') <= self.config.config_level)
for section in sections:
if section not in ('main', 'email', 'web'):
- print '\n' + underline_title('%s options' % section)
+ print('\n' + underline_title('%s options' % section))
config.input_config(section, self.config.config_level)
# write down configuration
config.save()
self._handle_win32(config, appid)
- print '-> generated config %s' % config.main_config_file()
+ print('-> generated config %s' % config.main_config_file())
# handle i18n files structure
# in the first cube given
from cubicweb import i18n
langs = [lang for lang, _ in i18n.available_catalogs(join(templdirs[0], 'i18n'))]
errors = config.i18ncompile(langs)
if errors:
- print '\n'.join(errors)
+ print('\n'.join(errors))
if self.config.automatic \
or not ASK.confirm('error while compiling message catalogs, '
'continue anyway ?'):
- print 'creation not completed'
+ print('creation not completed')
return
# create the additional data directory for this instance
if config.appdatahome != config.apphome: # true in dev mode
@@ -427,9 +429,9 @@
if config['uid']:
from logilab.common.shellutils import chown
# this directory should be owned by the uid of the server process
- print 'set %s as owner of the data directory' % config['uid']
+ print('set %s as owner of the data directory' % config['uid'])
chown(config.appdatahome, config['uid'])
- print '\n-> creation done for %s\n' % repr(config.apphome)[1:-1]
+ print('\n-> creation done for %s\n' % repr(config.apphome)[1:-1])
if not self.config.no_db_create:
helper.postcreate(self.config.automatic, self.config.config_level)
@@ -487,7 +489,7 @@
if ex.errno != errno.ENOENT:
raise
confignames = ', '.join([config.name for config in configs])
- print '-> instance %s (%s) deleted.' % (appid, confignames)
+ print('-> instance %s (%s) deleted.' % (appid, confignames))
# instance commands ########################################################
@@ -551,7 +553,7 @@
the --force option."
raise ExecutionError(msg % (appid, pidf))
if helper.start_server(config) == 1:
- print 'instance %s started' % appid
+ print('instance %s started' % appid)
def init_cmdline_log_threshold(config, loglevel):
@@ -606,7 +608,7 @@
except OSError:
# already removed by twistd
pass
- print 'instance %s stopped' % appid
+ print('instance %s stopped' % appid)
class RestartInstanceCommand(StartInstanceCommand):
@@ -630,7 +632,7 @@
# get instances in startorder
for appid in args:
if askconfirm:
- print '*'*72
+ print('*'*72)
if not ASK.confirm('%s instance %r ?' % (self.name, appid)):
continue
StopInstanceCommand(self.logger).stop_instance(appid)
@@ -677,14 +679,14 @@
status = 0
for mode in cwcfg.possible_configurations(appid):
config = cwcfg.config_for(appid, mode)
- print '[%s-%s]' % (appid, mode),
+ print('[%s-%s]' % (appid, mode), end=' ')
try:
pidf = config['pid-file']
except KeyError:
- print 'buggy instance, pid file not specified'
+ print('buggy instance, pid file not specified')
continue
if not exists(pidf):
- print "doesn't seem to be running"
+ print("doesn't seem to be running")
status = 1
continue
pid = int(open(pidf).read().strip())
@@ -692,10 +694,10 @@
try:
getpgid(pid)
except OSError:
- print "should be running with pid %s but the process can not be found" % pid
+ print("should be running with pid %s but the process can not be found" % pid)
status = 1
continue
- print "running with pid %s" % (pid)
+ print("running with pid %s" % (pid))
return status
class UpgradeInstanceCommand(InstanceCommandFork):
@@ -756,7 +758,7 @@
)
def upgrade_instance(self, appid):
- print '\n' + underline_title('Upgrading the instance %s' % appid)
+ print('\n' + underline_title('Upgrading the instance %s' % appid))
from logilab.common.changelog import Version
config = cwcfg.config_for(appid)
instance_running = exists(config['pid-file'])
@@ -767,11 +769,11 @@
set_sources_mode(self.config.ext_sources or ('migration',))
# get instance and installed versions for the server and the componants
mih = config.migration_handler()
- repo = mih.repo_connect()
+ repo = mih.repo
vcconf = repo.get_versions()
helper = self.config_helper(config, required=False)
if self.config.force_cube_version:
- for cube, version in self.config.force_cube_version.iteritems():
+ for cube, version in self.config.force_cube_version.items():
vcconf[cube] = Version(version)
toupgrade = []
for cube in config.cubes():
@@ -797,30 +799,30 @@
# run cubicweb/componants migration scripts
if self.config.fs_only or toupgrade:
for cube, fromversion, toversion in toupgrade:
- print '-> migration needed from %s to %s for %s' % (fromversion, toversion, cube)
+ print('-> migration needed from %s to %s for %s' % (fromversion, toversion, cube))
with mih.cnx:
with mih.cnx.security_enabled(False, False):
mih.migrate(vcconf, reversed(toupgrade), self.config)
else:
- print '-> no data migration needed for instance %s.' % appid
+ print('-> no data migration needed for instance %s.' % appid)
# rewrite main configuration file
mih.rewrite_configuration()
mih.shutdown()
# handle i18n upgrade
if not self.i18nupgrade(config):
return
- print
+ print()
if helper:
helper.postupgrade(repo)
- print '-> instance migrated.'
+ print('-> instance migrated.')
if instance_running and not (CWDEV or self.config.nostartstop):
# restart instance through fork to get a proper environment, avoid
# uicfg pb (and probably gettext catalogs, to check...)
forkcmd = '%s start %s' % (sys.argv[0], appid)
status = system(forkcmd)
if status:
- print '%s exited with status %s' % (forkcmd, status)
- print
+ print('%s exited with status %s' % (forkcmd, status))
+ print()
def i18nupgrade(self, config):
# handle i18n upgrade:
@@ -832,10 +834,10 @@
langs = [lang for lang, _ in i18n.available_catalogs(join(templdir, 'i18n'))]
errors = config.i18ncompile(langs)
if errors:
- print '\n'.join(errors)
+ print('\n'.join(errors))
if not ASK.confirm('Error while compiling message catalogs, '
'continue anyway?'):
- print '-> migration not completed.'
+ print('-> migration not completed.')
return False
return True
@@ -856,10 +858,9 @@
config.quick_start = True
if hasattr(config, 'set_sources_mode'):
config.set_sources_mode(('migration',))
- repo = config.migration_handler().repo_connect()
- vcconf = repo.get_versions()
+ vcconf = config.repository().get_versions()
for key in sorted(vcconf):
- print key+': %s.%s.%s' % vcconf[key]
+ print(key+': %s.%s.%s' % vcconf[key])
class ShellCommand(Command):
"""Run an interactive migration shell on an instance. This is a python shell
@@ -940,9 +941,9 @@
repo = get_repository(appuri)
cnx = connect(repo, login=login, password=pwd, mulcnx=False)
except AuthenticationError as ex:
- print ex
+ print(ex)
except (KeyboardInterrupt, EOFError):
- print
+ print()
sys.exit(0)
else:
break
@@ -1003,7 +1004,7 @@
config.init_cubes(repo.get_cubes())
errors = config.i18ncompile()
if errors:
- print '\n'.join(errors)
+ print('\n'.join(errors))
class ListInstancesCommand(Command):
@@ -1015,7 +1016,7 @@
"""run the command with its specific arguments"""
regdir = cwcfg.instances_dir()
for appid in sorted(listdir(regdir)):
- print appid
+ print(appid)
class ListCubesCommand(Command):
@@ -1026,7 +1027,7 @@
def run(self, args):
"""run the command with its specific arguments"""
for cube in cwcfg.available_cubes():
- print cube
+ print(cube)
class ConfigureInstanceCommand(InstanceCommand):
"""Configure instance.
@@ -1048,7 +1049,7 @@
def configure_instance(self, appid):
if self.config.param is not None:
appcfg = cwcfg.config_for(appid)
- for key, value in self.config.param.iteritems():
+ for key, value in self.config.param.items():
try:
appcfg.global_set_option(key, value)
except KeyError:
@@ -1138,17 +1139,15 @@
def run(args):
"""command line tool"""
import os
- sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
- sys.stderr = os.fdopen(sys.stderr.fileno(), 'w', 0)
filterwarnings('default', category=DeprecationWarning)
cwcfg.load_cwctl_plugins()
try:
CWCTL.run(args)
except ConfigurationError as err:
- print 'ERROR: ', err
+ print('ERROR: ', err)
sys.exit(1)
except ExecutionError as err:
- print err
+ print(err)
sys.exit(2)
if __name__ == '__main__':
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cwgettext.py Thu Nov 12 10:52:28 2015 +0100
@@ -0,0 +1,118 @@
+# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
+
+import gettext
+
+
+class cwGNUTranslations(gettext.GNUTranslations):
+ # The encoding of a msgctxt and a msgid in a .mo file is
+ # msgctxt + "\x04" + msgid (gettext version >= 0.15)
+ CONTEXT_ENCODING = "%s\x04%s"
+
+ def pgettext(self, context, message):
+ ctxt_msg_id = self.CONTEXT_ENCODING % (context, message)
+ missing = object()
+ tmsg = self._catalog.get(ctxt_msg_id, missing)
+ if tmsg is missing:
+ if self._fallback:
+ return self._fallback.pgettext(context, message)
+ return message
+ # Encode the Unicode tmsg back to an 8-bit string, if possible
+ if self._output_charset:
+ return tmsg.encode(self._output_charset)
+ elif self._charset:
+ return tmsg.encode(self._charset)
+ return tmsg
+
+ def lpgettext(self, context, message):
+ ctxt_msg_id = self.CONTEXT_ENCODING % (context, message)
+ missing = object()
+ tmsg = self._catalog.get(ctxt_msg_id, missing)
+ if tmsg is missing:
+ if self._fallback:
+ return self._fallback.lpgettext(context, message)
+ return message
+ if self._output_charset:
+ return tmsg.encode(self._output_charset)
+ return tmsg.encode(locale.getpreferredencoding())
+
+ def npgettext(self, context, msgid1, msgid2, n):
+ ctxt_msg_id = self.CONTEXT_ENCODING % (context, msgid1)
+ try:
+ tmsg = self._catalog[(ctxt_msg_id, self.plural(n))]
+ if self._output_charset:
+ return tmsg.encode(self._output_charset)
+ elif self._charset:
+ return tmsg.encode(self._charset)
+ return tmsg
+ except KeyError:
+ if self._fallback:
+ return self._fallback.npgettext(context, msgid1, msgid2, n)
+ if n == 1:
+ return msgid1
+ else:
+ return msgid2
+
+ def lnpgettext(self, context, msgid1, msgid2, n):
+ ctxt_msg_id = self.CONTEXT_ENCODING % (context, msgid1)
+ try:
+ tmsg = self._catalog[(ctxt_msg_id, self.plural(n))]
+ if self._output_charset:
+ return tmsg.encode(self._output_charset)
+ return tmsg.encode(locale.getpreferredencoding())
+ except KeyError:
+ if self._fallback:
+ return self._fallback.lnpgettext(context, msgid1, msgid2, n)
+ if n == 1:
+ return msgid1
+ else:
+ return msgid2
+
+ def upgettext(self, context, message):
+ ctxt_message_id = self.CONTEXT_ENCODING % (context, message)
+ missing = object()
+ tmsg = self._catalog.get(ctxt_message_id, missing)
+ if tmsg is missing:
+ # XXX logilab patch for compat w/ catalog generated by cw < 3.5
+ return self.ugettext(message)
+ if self._fallback:
+ return self._fallback.upgettext(context, message)
+ return unicode(message)
+ return tmsg
+
+ def unpgettext(self, context, msgid1, msgid2, n):
+ ctxt_message_id = self.CONTEXT_ENCODING % (context, msgid1)
+ try:
+ tmsg = self._catalog[(ctxt_message_id, self.plural(n))]
+ except KeyError:
+ if self._fallback:
+ return self._fallback.unpgettext(context, msgid1, msgid2, n)
+ if n == 1:
+ tmsg = unicode(msgid1)
+ else:
+ tmsg = unicode(msgid2)
+ return tmsg
+
+
+def translation(domain, localedir=None, languages=None,
+ class_=None, fallback=False, codeset=None):
+ if class_ is None:
+ class_ = cwGNUTranslations
+ return gettext.translation(domain, localedir=localedir,
+ languages=languages, class_=class_,
+ fallback=fallback, codeset=codeset)
--- a/cwvreg.py Thu Mar 06 15:55:33 2014 +0100
+++ b/cwvreg.py Thu Nov 12 10:52:28 2015 +0100
@@ -20,7 +20,7 @@
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
import sys
from os.path import join, dirname, realpath
@@ -28,6 +28,8 @@
from datetime import datetime, date, time, timedelta
from functools import reduce
+from six import text_type, binary_type
+
from logilab.common.decorators import cached, clear_cache
from logilab.common.deprecation import deprecated, class_deprecated
from logilab.common.modutils import cleanup_sys_modules
@@ -221,9 +223,9 @@
"""
obj = self.select(oid, req, rset=rset, **kwargs)
res = obj.render(**kwargs)
- if isinstance(res, unicode):
+ if isinstance(res, text_type):
return res.encode(req.encoding)
- assert isinstance(res, str)
+ assert isinstance(res, binary_type)
return res
def possible_views(self, req, rset=None, **kwargs):
@@ -382,7 +384,7 @@
return [item for item in super(CWRegistryStore, self).items()
if not item[0] in ('propertydefs', 'propertyvalues')]
def iteritems(self):
- return (item for item in super(CWRegistryStore, self).iteritems()
+ return (item for item in super(CWRegistryStore, self).items()
if not item[0] in ('propertydefs', 'propertyvalues'))
def values(self):
@@ -492,7 +494,7 @@
"""
self.schema = schema
for registry, regcontent in self.items():
- for objects in regcontent.itervalues():
+ for objects in regcontent.values():
for obj in objects:
obj.schema = schema
@@ -543,7 +545,7 @@
self.unregister(obj)
super(CWRegistryStore, self).initialization_completed()
if 'uicfg' in self: # 'uicfg' is not loaded in a pure repository mode
- for rtags in self['uicfg'].itervalues():
+ for rtags in self['uicfg'].values():
for rtag in rtags:
# don't check rtags if we don't want to cleanup_unused_appobjects
rtag.init(self.schema, check=self.config.cleanup_unused_appobjects)
@@ -576,7 +578,7 @@
if withsitewide:
return sorted(k for k in self['propertydefs']
if not k.startswith('sources.'))
- return sorted(k for k, kd in self['propertydefs'].iteritems()
+ return sorted(k for k, kd in self['propertydefs'].items()
if not kd['sitewide'] and not k.startswith('sources.'))
def register_property(self, key, type, help, default=None, vocabulary=None,
--- a/dataimport/csv.py Thu Mar 06 15:55:33 2014 +0100
+++ b/dataimport/csv.py Thu Nov 12 10:52:28 2015 +0100
@@ -16,18 +16,20 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""Functions to help importing CSV data"""
+from __future__ import absolute_import, print_function
-from __future__ import absolute_import
-
+import codecs
import csv as csvmod
import warnings
import os.path as osp
+from six import PY2, PY3, string_types
+
from logilab.common import shellutils
def count_lines(stream_or_filename):
- if isinstance(stream_or_filename, basestring):
+ if isinstance(stream_or_filename, string_types):
f = open(stream_or_filename)
else:
f = stream_or_filename
@@ -48,10 +50,8 @@
if quote is not None:
quotechar = quote
warnings.warn("[3.20] 'quote' kwarg is deprecated, use 'quotechar' instead")
- if isinstance(stream_or_path, basestring):
- if not osp.exists(stream_or_path):
- raise Exception("file doesn't exists: %s" % stream_or_path)
- stream = open(stream_or_path)
+ if isinstance(stream_or_path, string_types):
+ stream = open(stream_or_path, 'rb')
else:
stream = stream_or_path
rowcount = count_lines(stream)
@@ -64,7 +64,7 @@
yield urow
if withpb:
pb.update()
- print ' %s rows imported' % rowcount
+ print(' %s rows imported' % rowcount)
def ucsvreader(stream, encoding='utf-8', delimiter=',', quotechar='"',
@@ -77,6 +77,8 @@
separators) will be skipped. This is useful for Excel exports which may be
full of such lines.
"""
+ if PY3:
+ stream = codecs.getreader(encoding)(stream)
if separator is not None:
delimiter = separator
warnings.warn("[3.20] 'separator' kwarg is deprecated, use 'delimiter' instead")
@@ -86,28 +88,34 @@
it = iter(csvmod.reader(stream, delimiter=delimiter, quotechar=quotechar))
if not ignore_errors:
if skipfirst:
- it.next()
+ next(it)
for row in it:
- decoded = [item.decode(encoding) for item in row]
+ if PY2:
+ decoded = [item.decode(encoding) for item in row]
+ else:
+ decoded = row
if not skip_empty or any(decoded):
yield decoded
else:
if skipfirst:
try:
- row = it.next()
+ row = next(it)
except csvmod.Error:
pass
# Safe version, that can cope with error in CSV file
while True:
try:
- row = it.next()
+ row = next(it)
# End of CSV, break
except StopIteration:
break
# Error in CSV, ignore line and continue
except csvmod.Error:
continue
- decoded = [item.decode(encoding) for item in row]
+ if PY2:
+ decoded = [item.decode(encoding) for item in row]
+ else:
+ decoded = row
if not skip_empty or any(decoded):
yield decoded
--- a/dataimport/deprecated.py Thu Mar 06 15:55:33 2014 +0100
+++ b/dataimport/deprecated.py Thu Nov 12 10:52:28 2015 +0100
@@ -58,10 +58,13 @@
.. BUG file with one column are not parsable
.. TODO rollback() invocation is not possible yet
"""
+from __future__ import print_function
import sys
import traceback
-from StringIO import StringIO
+from io import StringIO
+
+from six import add_metaclass
from logilab.common import attrdict, shellutils
from logilab.common.date import strptime
@@ -78,7 +81,7 @@
>>> data = lazytable(ucsvreader(open(filename)))
"""
- header = reader.next()
+ header = next(reader)
for row in reader:
yield dict(zip(header, row))
@@ -103,7 +106,7 @@
@deprecated('[3.21] deprecated')
def tell(msg):
- print msg
+ print(msg)
@deprecated('[3.21] deprecated')
@@ -115,9 +118,9 @@
return answer == 'Y'
+@add_metaclass(class_deprecated)
class catch_error(object):
"""Helper for @contextmanager decorator."""
- __metaclass__ = class_deprecated
__deprecation_warning__ = '[3.21] deprecated'
def __init__(self, ctl, key='unexpected error', msg=None):
@@ -166,7 +169,9 @@
if res[dest] is None:
break
except ValueError as err:
- raise ValueError('error with %r field: %s' % (src, err)), None, sys.exc_info()[-1]
+ exc = ValueError('error with %r field: %s' % (src, err))
+ exc.__traceback__ = sys.exc_info()[-1]
+ raise exc
return res
@@ -254,6 +259,7 @@
if k is not None and len(v) > 1]
+@add_metaclass(class_deprecated)
class ObjectStore(object):
"""Store objects in memory for *faster* validation (development mode)
@@ -264,7 +270,6 @@
>>> group = store.prepare_insert_entity('CWUser', name=u'unknown')
>>> store.prepare_insert_relation(user, 'in_group', group)
"""
- __metaclass__ = class_deprecated
__deprecation_warning__ = '[3.21] use the new importer API'
def __init__(self):
@@ -335,6 +340,7 @@
self.prepare_insert_relation(eid_from, rtype, eid_to, **kwargs)
+@add_metaclass(class_deprecated)
class CWImportController(object):
"""Controller of the data import process.
@@ -343,7 +349,6 @@
>>> ctl.data = dict_of_data_tables
>>> ctl.run()
"""
- __metaclass__ = class_deprecated
__deprecation_warning__ = '[3.21] use the new importer API'
def __init__(self, store, askerror=0, catcherrors=None, tell=tell,
@@ -421,7 +426,7 @@
self.tell(pformat(sorted(error[1])))
def _print_stats(self):
- nberrors = sum(len(err) for err in self.errors.itervalues())
+ nberrors = sum(len(err) for err in self.errors.values())
self.tell('\nImport statistics: %i entities, %i types, %i relations and %i errors'
% (self.store.nb_inserted_entities,
self.store.nb_inserted_types,
--- a/dataimport/importer.py Thu Mar 06 15:55:33 2014 +0100
+++ b/dataimport/importer.py Thu Nov 12 10:52:28 2015 +0100
@@ -69,7 +69,7 @@
def use_extid_as_cwuri_filter(extentities):
for extentity in extentities:
if extentity.extid not in extid2eid:
- extentity.values.setdefault('cwuri', set([unicode(extentity.extid)]))
+ extentity.values.setdefault('cwuri', set([extentity.extid.decode('utf-8')]))
yield extentity
return use_extid_as_cwuri_filter
--- a/dataimport/pgstore.py Thu Mar 06 15:55:33 2014 +0100
+++ b/dataimport/pgstore.py Thu Nov 12 10:52:28 2015 +0100
@@ -16,17 +16,20 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""Postgres specific store"""
+from __future__ import print_function
import threading
import warnings
-import cPickle
import os.path as osp
-from StringIO import StringIO
+from io import StringIO
from time import asctime
from datetime import date, datetime, time
from collections import defaultdict
from base64 import b64encode
+from six import string_types, integer_types, text_type
+from six.moves import cPickle as pickle, range
+
from cubicweb.utils import make_uid
from cubicweb.server.sqlutils import SQL_PREFIX
from cubicweb.dataimport.stores import NoHookRQLObjectStore
@@ -40,7 +43,7 @@
try:
chunksize = (len(statements) / nb_threads) + 1
threads = []
- for i in xrange(nb_threads):
+ for i in range(nb_threads):
chunks = statements[i*chunksize:(i+1)*chunksize]
thread = threading.Thread(target=_execmany_thread,
args=(sql_connect, chunks,
@@ -52,7 +55,7 @@
for t in threads:
t.join()
except Exception:
- print 'Error in import statements'
+ print('Error in import statements')
def _execmany_thread_not_copy_from(cu, statement, data, table=None,
columns=None, encoding='utf-8'):
@@ -69,9 +72,9 @@
_execmany_thread_not_copy_from(cu, statement, data)
else:
if columns is None:
- cu.copy_from(buf, table, null='NULL')
+ cu.copy_from(buf, table, null=u'NULL')
else:
- cu.copy_from(buf, table, null='NULL', columns=columns)
+ cu.copy_from(buf, table, null=u'NULL', columns=columns)
def _execmany_thread(sql_connect, statements, dump_output_dir=None,
support_copy_from=True, encoding='utf-8'):
@@ -100,7 +103,7 @@
columns = list(data[0])
execmany_func(cu, statement, data, table, columns, encoding)
except Exception:
- print 'unable to copy data into table %s' % table
+ print('unable to copy data into table %s' % table)
# Error in import statement, save data in dump_output_dir
if dump_output_dir is not None:
pdata = {'data': data, 'statement': statement,
@@ -108,11 +111,10 @@
filename = make_uid()
try:
with open(osp.join(dump_output_dir,
- '%s.pickle' % filename), 'w') as fobj:
- fobj.write(cPickle.dumps(pdata))
+ '%s.pickle' % filename), 'wb') as fobj:
+ pickle.dump(pdata, fobj)
except IOError:
- print 'ERROR while pickling in', dump_output_dir, filename+'.pickle'
- pass
+ print('ERROR while pickling in', dump_output_dir, filename+'.pickle')
cnx.rollback()
raise
finally:
@@ -122,50 +124,44 @@
def _copyfrom_buffer_convert_None(value, **opts):
'''Convert None value to "NULL"'''
- return 'NULL'
+ return u'NULL'
def _copyfrom_buffer_convert_number(value, **opts):
'''Convert a number into its string representation'''
- return str(value)
+ return text_type(value)
def _copyfrom_buffer_convert_string(value, **opts):
'''Convert string value.
-
- Recognized keywords:
- :encoding: resulting string encoding (default: utf-8)
'''
- encoding = opts.get('encoding','utf-8')
- escape_chars = ((u'\\', ur'\\'), (u'\t', u'\\t'), (u'\r', u'\\r'),
+ escape_chars = ((u'\\', u'\\\\'), (u'\t', u'\\t'), (u'\r', u'\\r'),
(u'\n', u'\\n'))
for char, replace in escape_chars:
value = value.replace(char, replace)
- if isinstance(value, unicode):
- value = value.encode(encoding)
return value
def _copyfrom_buffer_convert_date(value, **opts):
'''Convert date into "YYYY-MM-DD"'''
# Do not use strftime, as it yields issue with date < 1900
# (http://bugs.python.org/issue1777412)
- return '%04d-%02d-%02d' % (value.year, value.month, value.day)
+ return u'%04d-%02d-%02d' % (value.year, value.month, value.day)
def _copyfrom_buffer_convert_datetime(value, **opts):
'''Convert date into "YYYY-MM-DD HH:MM:SS.UUUUUU"'''
# Do not use strftime, as it yields issue with date < 1900
# (http://bugs.python.org/issue1777412)
- return '%s %s' % (_copyfrom_buffer_convert_date(value, **opts),
- _copyfrom_buffer_convert_time(value, **opts))
+ return u'%s %s' % (_copyfrom_buffer_convert_date(value, **opts),
+ _copyfrom_buffer_convert_time(value, **opts))
def _copyfrom_buffer_convert_time(value, **opts):
'''Convert time into "HH:MM:SS.UUUUUU"'''
- return '%02d:%02d:%02d.%06d' % (value.hour, value.minute,
- value.second, value.microsecond)
+ return u'%02d:%02d:%02d.%06d' % (value.hour, value.minute,
+ value.second, value.microsecond)
# (types, converter) list.
_COPYFROM_BUFFER_CONVERTERS = [
(type(None), _copyfrom_buffer_convert_None),
- ((long, int, float), _copyfrom_buffer_convert_number),
- (basestring, _copyfrom_buffer_convert_string),
+ (integer_types + (float,), _copyfrom_buffer_convert_number),
+ (string_types, _copyfrom_buffer_convert_string),
(datetime, _copyfrom_buffer_convert_datetime),
(date, _copyfrom_buffer_convert_date),
(time, _copyfrom_buffer_convert_time),
@@ -185,7 +181,7 @@
rows = []
if columns is None:
if isinstance(data[0], (tuple, list)):
- columns = range(len(data[0]))
+ columns = list(range(len(data[0])))
elif isinstance(data[0], dict):
columns = data[0].keys()
else:
@@ -209,6 +205,7 @@
for types, converter in _COPYFROM_BUFFER_CONVERTERS:
if isinstance(value, types):
value = converter(value, **convert_opts)
+ assert isinstance(value, text_type)
break
else:
raise ValueError("Unsupported value type %s" % type(value))
@@ -335,7 +332,7 @@
self._sql.eid_insertdicts = {}
def flush(self):
- print 'starting flush'
+ print('starting flush')
_entities_sql = self._sql.entities
_relations_sql = self._sql.relations
_inlined_relations_sql = self._sql.inlined_relations
@@ -346,7 +343,7 @@
# In that case, simply update the insert dict and remove
# the need to make the
# UPDATE statement
- for statement, datalist in _inlined_relations_sql.iteritems():
+ for statement, datalist in _inlined_relations_sql.items():
new_datalist = []
# for a given inlined relation,
# browse each couple to be inserted
--- a/dataimport/stores.py Thu Mar 06 15:55:33 2014 +0100
+++ b/dataimport/stores.py Thu Nov 12 10:52:28 2015 +0100
@@ -21,7 +21,7 @@
>>> user_eid = store.prepare_insert_entity('CWUser', login=u'johndoe')
>>> group_eid = store.prepare_insert_entity('CWUser', name=u'unknown')
- >>> store.relate(user_eid, 'in_group', group_eid)
+ >>> store.prepare_insert_relation(user_eid, 'in_group', group_eid)
>>> store.flush()
>>> store.commit()
>>> store.finish()
@@ -61,6 +61,8 @@
from datetime import datetime
from copy import copy
+from six import text_type
+
from logilab.common.deprecation import deprecated
from logilab.common.decorators import cached
@@ -168,7 +170,7 @@
"""Given an entity type, attributes and inlined relations, returns the inserted entity's
eid.
"""
- for k, v in kwargs.iteritems():
+ for k, v in kwargs.items():
kwargs[k] = getattr(v, 'eid', v)
entity, rels = self.metagen.base_etype_dicts(etype)
# make a copy to keep cached entity pristine
@@ -183,7 +185,7 @@
kwargs = dict()
if inspect.getargspec(self.add_relation).keywords:
kwargs['subjtype'] = entity.cw_etype
- for rtype, targeteids in rels.iteritems():
+ for rtype, targeteids in rels.items():
# targeteids may be a single eid or a list of eids
inlined = self.rschema(rtype).inlined
try:
@@ -298,7 +300,7 @@
genfunc = self.generate(attr)
if genfunc:
entity.cw_edited.edited_attribute(attr, genfunc(entity))
- if isinstance(extid, unicode):
+ if isinstance(extid, text_type):
extid = extid.encode('utf-8')
return self.source, extid
--- a/dataimport/test/test_csv.py Thu Mar 06 15:55:33 2014 +0100
+++ b/dataimport/test/test_csv.py Thu Nov 12 10:52:28 2015 +0100
@@ -17,7 +17,7 @@
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""unittest for cubicweb.dataimport.csv"""
-from StringIO import StringIO
+from io import BytesIO
from logilab.common.testlib import TestCase, unittest_main
@@ -27,7 +27,7 @@
class UcsvreaderTC(TestCase):
def test_empty_lines_skipped(self):
- stream = StringIO('''a,b,c,d,
+ stream = BytesIO(b'''a,b,c,d,
1,2,3,4,
,,,,
,,,,
@@ -45,7 +45,7 @@
list(csv.ucsvreader(stream, skip_empty=False)))
def test_skip_first(self):
- stream = StringIO('a,b,c,d,\n1,2,3,4,\n')
+ stream = BytesIO(b'a,b,c,d,\n1,2,3,4,\n')
reader = csv.ucsvreader(stream, skipfirst=True, ignore_errors=True)
self.assertEqual(list(reader),
[[u'1', u'2', u'3', u'4', u'']])
--- a/dataimport/test/test_pgstore.py Thu Mar 06 15:55:33 2014 +0100
+++ b/dataimport/test/test_pgstore.py Thu Nov 12 10:52:28 2015 +0100
@@ -20,6 +20,7 @@
import datetime as DT
+from six import PY2
from logilab.common.testlib import TestCase, unittest_main
from cubicweb.dataimport import pgstore
@@ -31,24 +32,24 @@
def test_convert_none(self):
cnvt = pgstore._copyfrom_buffer_convert_None
- self.assertEqual('NULL', cnvt(None))
+ self.assertEqual(u'NULL', cnvt(None))
def test_convert_number(self):
cnvt = pgstore._copyfrom_buffer_convert_number
- self.assertEqual('42', cnvt(42))
- self.assertEqual('42', cnvt(42L))
- self.assertEqual('42.42', cnvt(42.42))
+ self.assertEqual(u'42', cnvt(42))
+ if PY2:
+ self.assertEqual(u'42', cnvt(long(42)))
+ self.assertEqual(u'42.42', cnvt(42.42))
def test_convert_string(self):
cnvt = pgstore._copyfrom_buffer_convert_string
# simple
- self.assertEqual('babar', cnvt('babar'))
+ self.assertEqual(u'babar', cnvt('babar'))
# unicode
- self.assertEqual('\xc3\xa9l\xc3\xa9phant', cnvt(u'éléphant'))
- self.assertEqual('\xe9l\xe9phant', cnvt(u'éléphant', encoding='latin1'))
+ self.assertEqual(u'éléphant', cnvt(u'éléphant'))
# escaping
- self.assertEqual('babar\\tceleste\\n', cnvt('babar\tceleste\n'))
- self.assertEqual(r'C:\\new\tC:\\test', cnvt('C:\\new\tC:\\test'))
+ self.assertEqual(u'babar\\tceleste\\n', cnvt(u'babar\tceleste\n'))
+ self.assertEqual(u'C:\\\\new\\tC:\\\\test', cnvt(u'C:\\new\tC:\\test'))
def test_convert_date(self):
cnvt = pgstore._copyfrom_buffer_convert_date
@@ -64,18 +65,19 @@
# test buffer
def test_create_copyfrom_buffer_tuple(self):
- data = ((42, 42L, 42.42, u'éléphant', DT.date(666, 1, 13), DT.time(6, 6, 6),
+ l = long if PY2 else int
+ data = ((42, l(42), 42.42, u'éléphant', DT.date(666, 1, 13), DT.time(6, 6, 6),
DT.datetime(666, 6, 13, 6, 6, 6)),
- (6, 6L, 6.6, u'babar', DT.date(2014, 1, 14), DT.time(4, 2, 1),
+ (6, l(6), 6.6, u'babar', DT.date(2014, 1, 14), DT.time(4, 2, 1),
DT.datetime(2014, 1, 1, 0, 0, 0)))
results = pgstore._create_copyfrom_buffer(data)
# all columns
- expected = '''42\t42\t42.42\téléphant\t0666-01-13\t06:06:06.000000\t0666-06-13 06:06:06.000000
+ expected = u'''42\t42\t42.42\téléphant\t0666-01-13\t06:06:06.000000\t0666-06-13 06:06:06.000000
6\t6\t6.6\tbabar\t2014-01-14\t04:02:01.000000\t2014-01-01 00:00:00.000000'''
self.assertMultiLineEqual(expected, results.getvalue())
# selected columns
results = pgstore._create_copyfrom_buffer(data, columns=(1, 3, 6))
- expected = '''42\téléphant\t0666-06-13 06:06:06.000000
+ expected = u'''42\téléphant\t0666-06-13 06:06:06.000000
6\tbabar\t2014-01-01 00:00:00.000000'''
self.assertMultiLineEqual(expected, results.getvalue())
@@ -85,8 +87,8 @@
dict(integer=6, double=6.6, text=u'babar',
date=DT.datetime(2014, 1, 1, 0, 0, 0)))
results = pgstore._create_copyfrom_buffer(data, ('integer', 'text'))
- expected = '''42\téléphant\n6\tbabar'''
- self.assertMultiLineEqual(expected, results.getvalue())
+ expected = u'''42\téléphant\n6\tbabar'''
+ self.assertEqual(expected, results.getvalue())
if __name__ == '__main__':
unittest_main()
--- a/dataimport/test/unittest_importer.py Thu Mar 06 15:55:33 2014 +0100
+++ b/dataimport/test/unittest_importer.py Thu Nov 12 10:52:28 2015 +0100
@@ -152,14 +152,14 @@
class UseExtidAsCwuriTC(TestCase):
def test(self):
- personne = ExtEntity('Personne', 1, {'nom': set([u'de la lune']),
- 'prenom': set([u'Jean'])})
+ personne = ExtEntity('Personne', b'1', {'nom': set([u'de la lune']),
+ 'prenom': set([u'Jean'])})
mapping = {}
set_cwuri = use_extid_as_cwuri(mapping)
list(set_cwuri((personne,)))
self.assertIn('cwuri', personne.values)
- self.assertEqual(personne.values['cwuri'], set(['1']))
- mapping[1] = 'whatever'
+ self.assertEqual(personne.values['cwuri'], set([u'1']))
+ mapping[b'1'] = 'whatever'
personne.values.pop('cwuri')
list(set_cwuri((personne,)))
self.assertNotIn('cwuri', personne.values)
@@ -167,7 +167,7 @@
def extentities_from_csv(fpath):
"""Yield ExtEntity read from `fpath` CSV file."""
- with open(fpath) as f:
+ with open(fpath, 'rb') as f:
for uri, name, knows in ucsvreader(f, skipfirst=True, skip_empty=False):
yield ExtEntity('Personne', uri,
{'nom': set([name]), 'connait': set([knows])})
--- a/debian/control Thu Mar 06 15:55:33 2014 +0100
+++ b/debian/control Thu Nov 12 10:52:28 2015 +0100
@@ -10,6 +10,7 @@
Build-Depends:
debhelper (>= 7),
python (>= 2.6),
+ python-six (>= 1.4.0),
python-sphinx,
python-logilab-common,
python-unittest2 | python (>= 2.7),
@@ -155,6 +156,7 @@
${python:Depends},
graphviz,
gettext,
+ python-six (>= 1.4.0),
python-logilab-mtconverter (>= 0.8.0),
python-logilab-common (>= 0.63.1),
python-markdown,
--- a/devtools/__init__.py Thu Mar 06 15:55:33 2014 +0100
+++ b/devtools/__init__.py Thu Nov 12 10:52:28 2015 +0100
@@ -16,6 +16,7 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""Test tools for cubicweb"""
+from __future__ import print_function
__docformat__ = "restructuredtext en"
@@ -24,7 +25,6 @@
import errno
import logging
import shutil
-import pickle
import glob
import subprocess
import warnings
@@ -35,6 +35,9 @@
from os.path import (abspath, realpath, join, exists, split, isabs, isdir)
from functools import partial
+from six import text_type
+from six.moves import cPickle as pickle
+
from logilab.common.date import strptime
from logilab.common.decorators import cached, clear_cache
@@ -92,7 +95,7 @@
DEFAULT_PSQL_SOURCES = DEFAULT_SOURCES.copy()
DEFAULT_PSQL_SOURCES['system'] = DEFAULT_SOURCES['system'].copy()
DEFAULT_PSQL_SOURCES['system']['db-driver'] = 'postgres'
-DEFAULT_PSQL_SOURCES['system']['db-user'] = unicode(getpass.getuser())
+DEFAULT_PSQL_SOURCES['system']['db-user'] = text_type(getpass.getuser())
DEFAULT_PSQL_SOURCES['system']['db-password'] = None
def turn_repo_off(repo):
@@ -109,7 +112,7 @@
try:
repo.close(sessionid)
except BadConnectionId: #this is strange ? thread issue ?
- print 'XXX unknown session', sessionid
+ print('XXX unknown session', sessionid)
for cnxset in repo.cnxsets:
cnxset.close(True)
repo.system_source.shutdown()
@@ -193,7 +196,7 @@
def sources_file(self):
"""define in subclasses self.sourcefile if necessary"""
if self.sourcefile:
- print 'Reading sources from', self.sourcefile
+ print('Reading sources from', self.sourcefile)
sourcefile = self.sourcefile
if not isabs(sourcefile):
sourcefile = join(self.apphome, sourcefile)
@@ -367,7 +370,8 @@
# XXX set a clearer error message ???
backup_coordinates, config_path = self.db_cache[self.db_cache_key(db_id)]
# reload the config used to create the database.
- config = pickle.loads(open(config_path, 'rb').read())
+ with open(config_path, 'rb') as f:
+ config = pickle.load(f)
# shutdown repo before changing database content
if self._repo is not None:
self._repo.turn_repo_off()
@@ -399,9 +403,8 @@
def _new_repo(self, config):
"""Factory method to create a new Repository Instance"""
- from cubicweb.repoapi import _get_inmemory_repo
config._cubes = None
- repo = _get_inmemory_repo(config)
+ repo = config.repository()
# extending Repository class
repo._has_started = False
repo._needs_refresh = False
@@ -414,7 +417,7 @@
from cubicweb.repoapi import connect
repo = self.get_repo()
sources = self.config.read_sources_file()
- login = unicode(sources['admin']['login'])
+ login = text_type(sources['admin']['login'])
password = sources['admin']['password'] or 'xxx'
cnx = connect(repo, login, password=password)
return cnx
@@ -463,7 +466,7 @@
dbname, data = data.split('-', 1)
db_id, filetype = data.split('.', 1)
entries.setdefault((dbname, db_id), {})[filetype] = filepath
- for (dbname, db_id), entry in entries.iteritems():
+ for (dbname, db_id), entry in entries.items():
# apply necessary transformation from the driver
value = self.process_cache_entry(directory, dbname, db_id, entry)
assert 'config' in entry
@@ -493,7 +496,7 @@
if test_db_id is DEFAULT_EMPTY_DB_ID:
self.init_test_database()
else:
- print 'Building %s for database %s' % (test_db_id, self.dbname)
+ print('Building %s for database %s' % (test_db_id, self.dbname))
self.build_db_cache(DEFAULT_EMPTY_DB_ID)
self.restore_database(DEFAULT_EMPTY_DB_ID)
repo = self.get_repo(startup=True)
@@ -542,7 +545,7 @@
try:
subprocess.check_call(['initdb', '-D', datadir, '-E', 'utf-8', '--locale=C'])
- except OSError, err:
+ except OSError as err:
if err.errno == errno.ENOENT:
raise OSError('"initdb" could not be found. '
'You should add the postgresql bin folder to your PATH '
@@ -561,7 +564,11 @@
subprocess.check_call(['pg_ctl', 'start', '-w', '-D', datadir,
'-o', options],
env=env)
- except OSError, err:
+ except OSError as err:
+ try:
+ os.rmdir(sockdir)
+ except OSError:
+ pass
if err.errno == errno.ENOENT:
raise OSError('"pg_ctl" could not be found. '
'You should add the postgresql bin folder to your PATH '
@@ -574,6 +581,10 @@
datadir = join(os.path.dirname(pyfile), 'data',
'pgdb-%s' % os.path.splitext(os.path.basename(pyfile))[0])
subprocess.call(['pg_ctl', 'stop', '-D', datadir, '-m', 'fast'])
+ try:
+ os.rmdir(DEFAULT_PSQL_SOURCES['system']['db-host'])
+ except OSError:
+ pass
class PostgresTestDataBaseHandler(TestDataBaseHandler):
@@ -677,7 +688,7 @@
@property
def _config_id(self):
- return sha1(self.config.apphome).hexdigest()[:10]
+ return sha1(self.config.apphome.encode('utf-8')).hexdigest()[:10]
def _backup_name(self, db_id): # merge me with parent
backup_name = '_'.join(('cache', self._config_id, self.dbname, db_id))
@@ -833,21 +844,21 @@
found_date = False
for row, rowdesc in zip(rset, rset.description):
for cellindex, (value, vtype) in enumerate(zip(row, rowdesc)):
- if vtype in ('Date', 'Datetime') and type(value) is unicode:
+ if vtype in ('Date', 'Datetime') and isinstance(value, text_type):
found_date = True
value = value.rsplit('.', 1)[0]
try:
row[cellindex] = strptime(value, '%Y-%m-%d %H:%M:%S')
except Exception:
row[cellindex] = strptime(value, '%Y-%m-%d')
- if vtype == 'Time' and type(value) is unicode:
+ if vtype == 'Time' and isinstance(value, text_type):
found_date = True
try:
row[cellindex] = strptime(value, '%H:%M:%S')
except Exception:
# DateTime used as Time?
row[cellindex] = strptime(value, '%Y-%m-%d %H:%M:%S')
- if vtype == 'Interval' and type(value) is int:
+ if vtype == 'Interval' and isinstance(value, int):
found_date = True
row[cellindex] = timedelta(0, value, 0) # XXX value is in number of seconds?
if not found_date:
--- a/devtools/devctl.py Thu Mar 06 15:55:33 2014 +0100
+++ b/devtools/devctl.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,6 +18,7 @@
"""additional cubicweb-ctl commands and command handlers for cubicweb and
cubicweb's cubes development
"""
+from __future__ import print_function
__docformat__ = "restructuredtext en"
@@ -83,7 +84,7 @@
def cleanup_sys_modules(config):
# cleanup sys.modules, required when we're updating multiple cubes
- for name, mod in sys.modules.items():
+ for name, mod in list(sys.modules.items()):
if mod is None:
# duh ? logilab.common.os for instance
del sys.modules[name]
@@ -250,7 +251,7 @@
# bw compat, necessary until all translation of relation are
# done properly...
add_msg(w, '%s_object' % rtype)
- for rdef in rschema.rdefs.itervalues():
+ for rdef in rschema.rdefs.values():
if not rdef.description or rdef.description in done:
continue
if (librschema is None or
@@ -267,7 +268,7 @@
for reg, objdict in vreg.items():
if reg in ('boxes', 'contentnavigation'):
continue
- for objects in objdict.itervalues():
+ for objects in objdict.values():
for obj in objects:
objid = '%s_%s' % (reg, obj.__regid__)
if objid in done:
@@ -314,21 +315,21 @@
from cubicweb.i18n import extract_from_tal, execute2
tempdir = tempfile.mkdtemp(prefix='cw-')
cwi18ndir = WebConfiguration.i18n_lib_dir()
- print '-> extract messages:',
- print 'schema',
+ print('-> extract messages:', end=' ')
+ print('schema', end=' ')
schemapot = osp.join(tempdir, 'schema.pot')
potfiles = [schemapot]
potfiles.append(schemapot)
# explicit close necessary else the file may not be yet flushed when
# we'll using it below
- schemapotstream = file(schemapot, 'w')
+ schemapotstream = open(schemapot, 'w')
generate_schema_pot(schemapotstream.write, cubedir=None)
schemapotstream.close()
- print 'TAL',
+ print('TAL', end=' ')
tali18nfile = osp.join(tempdir, 'tali18n.py')
extract_from_tal(find(osp.join(BASEDIR, 'web'), ('.py', '.pt')),
tali18nfile)
- print '-> generate .pot files.'
+ print('-> generate .pot files.')
pyfiles = get_module_files(BASEDIR)
pyfiles += globfind(osp.join(BASEDIR, 'misc', 'migration'), '*.py')
schemafiles = globfind(osp.join(BASEDIR, 'schemas'), '*.py')
@@ -349,12 +350,12 @@
if osp.exists(potfile):
potfiles.append(potfile)
else:
- print '-> WARNING: %s file was not generated' % potfile
- print '-> merging %i .pot files' % len(potfiles)
+ print('-> WARNING: %s file was not generated' % potfile)
+ print('-> merging %i .pot files' % len(potfiles))
cubicwebpot = osp.join(tempdir, 'cubicweb.pot')
cmd = ['msgcat', '-o', cubicwebpot] + potfiles
execute2(cmd)
- print '-> merging main pot file with existing translations.'
+ print('-> merging main pot file with existing translations.')
chdir(cwi18ndir)
toedit = []
for lang in CubicWebNoAppConfiguration.cw_languages():
@@ -368,10 +369,10 @@
# cleanup
rm(tempdir)
# instructions pour la suite
- print '-> regenerated CubicWeb\'s .po catalogs.'
- print '\nYou can now edit the following files:'
- print '* ' + '\n* '.join(toedit)
- print 'when you are done, run "cubicweb-ctl i18ncube yourcube".'
+ print('-> regenerated CubicWeb\'s .po catalogs.')
+ print('\nYou can now edit the following files:')
+ print('* ' + '\n* '.join(toedit))
+ print('when you are done, run "cubicweb-ctl i18ncube yourcube".')
class UpdateCubeCatalogCommand(Command):
@@ -398,25 +399,25 @@
from subprocess import CalledProcessError
for cubedir in cubes:
if not osp.isdir(cubedir):
- print '-> ignoring %s that is not a directory.' % cubedir
+ print('-> ignoring %s that is not a directory.' % cubedir)
continue
try:
toedit = update_cube_catalogs(cubedir)
except CalledProcessError as exc:
- print '\n*** error while updating catalogs for cube', cubedir
- print 'cmd:\n%s' % exc.cmd
- print 'stdout:\n%s\nstderr:\n%s' % exc.data
+ print('\n*** error while updating catalogs for cube', cubedir)
+ print('cmd:\n%s' % exc.cmd)
+ print('stdout:\n%s\nstderr:\n%s' % exc.data)
except Exception:
import traceback
traceback.print_exc()
- print '*** error while updating catalogs for cube', cubedir
+ print('*** error while updating catalogs for cube', cubedir)
return False
else:
# instructions pour la suite
if toedit:
- print '-> regenerated .po catalogs for cube %s.' % cubedir
- print '\nYou can now edit the following files:'
- print '* ' + '\n* '.join(toedit)
+ print('-> regenerated .po catalogs for cube %s.' % cubedir)
+ print('\nYou can now edit the following files:')
+ print('* ' + '\n* '.join(toedit))
print ('When you are done, run "cubicweb-ctl i18ninstance '
'<yourinstance>" to see changes in your instances.')
return True
@@ -429,7 +430,7 @@
from cubicweb.i18n import extract_from_tal, execute2
cube = osp.basename(osp.normpath(cubedir))
tempdir = tempfile.mkdtemp()
- print underline_title('Updating i18n catalogs for cube %s' % cube)
+ print(underline_title('Updating i18n catalogs for cube %s' % cube))
chdir(cubedir)
if osp.exists(osp.join('i18n', 'entities.pot')):
warn('entities.pot is deprecated, rename file to static-messages.pot (%s)'
@@ -439,20 +440,20 @@
potfiles = [osp.join('i18n', 'static-messages.pot')]
else:
potfiles = []
- print '-> extracting messages:',
- print 'schema',
+ print('-> extracting messages:', end=' ')
+ print('schema', end=' ')
schemapot = osp.join(tempdir, 'schema.pot')
potfiles.append(schemapot)
# explicit close necessary else the file may not be yet flushed when
# we'll using it below
- schemapotstream = file(schemapot, 'w')
+ schemapotstream = open(schemapot, 'w')
generate_schema_pot(schemapotstream.write, cubedir)
schemapotstream.close()
- print 'TAL',
+ print('TAL', end=' ')
tali18nfile = osp.join(tempdir, 'tali18n.py')
ptfiles = find('.', ('.py', '.pt'), blacklist=STD_BLACKLIST+('test',))
extract_from_tal(ptfiles, tali18nfile)
- print 'Javascript'
+ print('Javascript')
jsfiles = [jsfile for jsfile in find('.', '.js')
if osp.basename(jsfile).startswith('cub')]
if jsfiles:
@@ -463,7 +464,7 @@
# no pot file created if there are no string to translate
if osp.exists(tmppotfile):
potfiles.append(tmppotfile)
- print '-> creating cube-specific catalog'
+ print('-> creating cube-specific catalog')
tmppotfile = osp.join(tempdir, 'generated.pot')
cubefiles = find('.', '.py', blacklist=STD_BLACKLIST+('test',))
cubefiles.append(tali18nfile)
@@ -473,20 +474,20 @@
if osp.exists(tmppotfile): # doesn't exists of no translation string found
potfiles.append(tmppotfile)
potfile = osp.join(tempdir, 'cube.pot')
- print '-> merging %i .pot files' % len(potfiles)
+ print('-> merging %i .pot files' % len(potfiles))
cmd = ['msgcat', '-o', potfile]
cmd.extend(potfiles)
execute2(cmd)
if not osp.exists(potfile):
- print 'no message catalog for cube', cube, 'nothing to translate'
+ print('no message catalog for cube', cube, 'nothing to translate')
# cleanup
rm(tempdir)
return ()
- print '-> merging main pot file with existing translations:',
+ print('-> merging main pot file with existing translations:', end=' ')
chdir('i18n')
toedit = []
for lang in CubicWebNoAppConfiguration.cw_languages():
- print lang,
+ print(lang, end=' ')
cubepo = '%s.po' % lang
if not osp.exists(cubepo):
shutil.copy(potfile, cubepo)
@@ -496,7 +497,7 @@
ensure_fs_mode(cubepo)
shutil.move('%snew' % cubepo, cubepo)
toedit.append(osp.abspath(cubepo))
- print
+ print()
# cleanup
rm(tempdir)
return toedit
@@ -620,7 +621,7 @@
" Please specify it using the --directory option")
cubesdir = cubespath[0]
if not osp.isdir(cubesdir):
- print "-> creating cubes directory", cubesdir
+ print("-> creating cubes directory", cubesdir)
try:
mkdir(cubesdir)
except OSError as err:
@@ -649,7 +650,8 @@
if verbose:
longdesc = raw_input(
'Enter a long description (leave empty to reuse the short one): ')
- dependencies = {'cubicweb': '>= %s' % cubicwebversion}
+ dependencies = {'cubicweb': '>= %s' % cubicwebversion,
+ 'six': '>= 1.4.0',}
if verbose:
dependencies.update(self._ask_for_dependencies())
context = {'cubename' : cubename,
@@ -710,7 +712,7 @@
requests = {}
for filepath in args:
try:
- stream = file(filepath)
+ stream = open(filepath)
except OSError as ex:
raise BadCommandUsage("can't open rql log file %s: %s"
% (filepath, ex))
@@ -731,17 +733,17 @@
except Exception as exc:
sys.stderr.write('Line %s: %s (%s)\n' % (lineno, exc, line))
stat = []
- for rql, times in requests.iteritems():
+ for rql, times in requests.items():
stat.append( (sum(time[0] for time in times),
sum(time[1] for time in times),
len(times), rql) )
stat.sort()
stat.reverse()
total_time = sum(clocktime for clocktime, cputime, occ, rql in stat) * 0.01
- print 'Percentage;Cumulative Time (clock);Cumulative Time (CPU);Occurences;Query'
+ print('Percentage;Cumulative Time (clock);Cumulative Time (CPU);Occurences;Query')
for clocktime, cputime, occ, rql in stat:
- print '%.2f;%.2f;%.2f;%s;%s' % (clocktime/total_time, clocktime,
- cputime, occ, rql)
+ print('%.2f;%.2f;%.2f;%s;%s' % (clocktime/total_time, clocktime,
+ cputime, occ, rql))
class GenerateSchema(Command):
--- a/devtools/fake.py Thu Mar 06 15:55:33 2014 +0100
+++ b/devtools/fake.py Thu Nov 12 10:52:28 2015 +0100
@@ -22,6 +22,8 @@
from contextlib import contextmanager
+from six import string_types
+
from logilab.database import get_db_helper
from cubicweb.req import RequestSessionBase
@@ -91,7 +93,7 @@
def set_request_header(self, header, value, raw=False):
"""set an incoming HTTP header (for test purpose only)"""
- if isinstance(value, basestring):
+ if isinstance(value, string_types):
value = [value]
if raw:
# adding encoded header is important, else page content
@@ -110,7 +112,7 @@
def build_url_params(self, **kwargs):
# overriden to get predictable resultts
args = []
- for param, values in sorted(kwargs.iteritems()):
+ for param, values in sorted(kwargs.items()):
if not isinstance(values, (list, tuple)):
values = (values,)
for value in values:
--- a/devtools/fill.py Thu Mar 06 15:55:33 2014 +0100
+++ b/devtools/fill.py Thu Nov 12 10:52:28 2015 +0100
@@ -17,6 +17,7 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""This modules defines func / methods for creating test repositories"""
+from __future__ import print_function
__docformat__ = "restructuredtext en"
@@ -25,6 +26,10 @@
from copy import deepcopy
from datetime import datetime, date, time, timedelta
from decimal import Decimal
+import inspect
+
+from six import text_type, add_metaclass
+from six.moves import range
from logilab.common import attrdict
from logilab.mtconverter import xml_escape
@@ -173,7 +178,7 @@
generate_tztime = generate_time # XXX implementation should add a timezone
def generate_bytes(self, entity, attrname, index, format=None):
- fakefile = Binary("%s%s" % (attrname, index))
+ fakefile = Binary(("%s%s" % (attrname, index)).encode('ascii'))
fakefile.filename = u"file_%s" % attrname
return fakefile
@@ -224,7 +229,7 @@
"""
for cst in self.eschema.rdef(attrname).constraints:
if isinstance(cst, StaticVocabularyConstraint):
- return unicode(choice(cst.vocabulary()))
+ return text_type(choice(cst.vocabulary()))
return None
# XXX nothing to do here
@@ -254,13 +259,15 @@
for attrname, attrvalue in classdict.items():
if callable(attrvalue):
if attrname.startswith('generate_') and \
- attrvalue.func_code.co_argcount < 2:
+ len(inspect.getargspec(attrvalue).args) < 2:
raise TypeError('generate_xxx must accept at least 1 argument')
setattr(_ValueGenerator, attrname, attrvalue)
return type.__new__(mcs, name, bases, classdict)
+
+@add_metaclass(autoextend)
class ValueGenerator(_ValueGenerator):
- __metaclass__ = autoextend
+ pass
def _default_choice_func(etype, attrname):
@@ -286,7 +293,7 @@
returns acceptable values for this attribute
"""
queries = []
- for index in xrange(entity_num):
+ for index in range(entity_num):
restrictions = []
args = {}
for attrname, value in make_entity(etype, schema, vreg, index, choice_func).items():
@@ -347,7 +354,7 @@
fmt = vreg.property_value('ui.float-format')
value = fmt % value
else:
- value = unicode(value)
+ value = text_type(value)
return entity
@@ -363,7 +370,7 @@
rql += ', %s is %s' % (selectvar, objtype)
rset = cnx.execute(rql)
except Exception:
- print "could restrict eid_list with given constraints (%r)" % constraints
+ print("could restrict eid_list with given constraints (%r)" % constraints)
return []
return set(eid for eid, in rset.rows)
@@ -508,8 +515,8 @@
break
else:
# FIXME: 20 should be read from config
- subjeidsiter = [choice(tuple(subjeids)) for i in xrange(min(len(subjeids), 20))]
- objeidsiter = [choice(tuple(objeids)) for i in xrange(min(len(objeids), 20))]
+ subjeidsiter = [choice(tuple(subjeids)) for i in range(min(len(subjeids), 20))]
+ objeidsiter = [choice(tuple(objeids)) for i in range(min(len(objeids), 20))]
for subjeid, objeid in zip(subjeidsiter, objeidsiter):
if subjeid != objeid and not (subjeid, objeid) in used:
used.add( (subjeid, objeid) )
--- a/devtools/htmlparser.py Thu Mar 06 15:55:33 2014 +0100
+++ b/devtools/htmlparser.py Thu Nov 12 10:52:28 2015 +0100
@@ -20,7 +20,7 @@
import re
import sys
from xml import sax
-from cStringIO import StringIO
+from io import BytesIO
from lxml import etree
@@ -33,7 +33,7 @@
ERR_COUNT = 0
-_REM_SCRIPT_RGX = re.compile(r"<script[^>]*>.*?</script>", re.U|re.M|re.I|re.S)
+_REM_SCRIPT_RGX = re.compile(br"<script[^>]*>.*?</script>", re.M|re.I|re.S)
def _remove_script_tags(data):
"""Remove the script (usually javascript) tags to help the lxml
XMLParser / HTMLParser do their job. Without that, they choke on
@@ -70,7 +70,7 @@
#
# using that, we'll miss most actual validation error we want to
# catch. For now, use dumb regexp
- return _REM_SCRIPT_RGX.sub('', data)
+ return _REM_SCRIPT_RGX.sub(b'', data)
class Validator(object):
@@ -164,10 +164,10 @@
def _parse(self, data):
inpsrc = sax.InputSource()
- inpsrc.setByteStream(StringIO(data))
+ inpsrc.setByteStream(BytesIO(data))
try:
self._parser.parse(inpsrc)
- except sax.SAXParseException, exc:
+ except sax.SAXParseException as exc:
new_exc = AssertionError(u'invalid document: %s' % exc)
new_exc.position = (exc._linenum, exc._colnum)
raise new_exc
@@ -209,7 +209,7 @@
def matching_nodes(self, tag, **attrs):
for elt in self.etree.iterfind(self._iterstr(tag)):
eltattrs = elt.attrib
- for attr, value in attrs.iteritems():
+ for attr, value in attrs.items():
try:
if eltattrs[attr] != value:
break
--- a/devtools/httptest.py Thu Mar 06 15:55:33 2014 +0100
+++ b/devtools/httptest.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,17 +18,18 @@
"""this module contains base classes and utilities for integration with running
http server
"""
+from __future__ import print_function
+
__docformat__ = "restructuredtext en"
import random
import threading
import socket
-import httplib
-from urlparse import urlparse
-from twisted.internet import reactor, error
+from six.moves import range, http_client
+from six.moves.urllib.parse import urlparse
-from cubicweb.etwist.server import run
+
from cubicweb.devtools.testlib import CubicWebTC
from cubicweb.devtools import ApptestConfiguration
@@ -89,6 +90,8 @@
configcls = CubicWebServerConfig
def start_server(self):
+ from twisted.internet import reactor
+ from cubicweb.etwist.server import run
# use a semaphore to avoid starting test while the http server isn't
# fully initilialized
semaphore = threading.Semaphore(0)
@@ -110,12 +113,13 @@
#pre init utils connection
parseurl = urlparse(self.config['base-url'])
assert parseurl.port == self.config['port'], (self.config['base-url'], self.config['port'])
- self._web_test_cnx = httplib.HTTPConnection(parseurl.hostname,
- parseurl.port)
+ self._web_test_cnx = http_client.HTTPConnection(parseurl.hostname,
+ parseurl.port)
self._ident_cookie = None
def stop_server(self, timeout=15):
"""Stop the webserver, waiting for the thread to return"""
+ from twisted.internet import reactor
if self._web_test_cnx is None:
self.web_logout()
self._web_test_cnx.close()
@@ -139,7 +143,7 @@
passwd = user
response = self.web_get("login?__login=%s&__password=%s" %
(user, passwd))
- assert response.status == httplib.SEE_OTHER, response.status
+ assert response.status == http_client.SEE_OTHER, response.status
self._ident_cookie = response.getheader('Set-Cookie')
assert self._ident_cookie
return True
@@ -151,7 +155,7 @@
self._ident_cookie = None
def web_request(self, path='', method='GET', body=None, headers=None):
- """Return an httplib.HTTPResponse object for the specified path
+ """Return an http_client.HTTPResponse object for the specified path
Use available credential if available.
"""
@@ -174,9 +178,10 @@
self.start_server()
def tearDown(self):
+ from twisted.internet import error
try:
self.stop_server()
except error.ReactorNotRunning as err:
# Server could be launched manually
- print err
+ print(err)
super(CubicWebServerTC, self).tearDown()
--- a/devtools/instrument.py Thu Mar 06 15:55:33 2014 +0100
+++ b/devtools/instrument.py Thu Nov 12 10:52:28 2015 +0100
@@ -14,6 +14,7 @@
# You should have received a copy of the GNU Lesser General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
"""Instrumentation utilities"""
+from __future__ import print_function
import os
@@ -45,10 +46,10 @@
return _COLORS[key]
def warn(msg, *args):
- print 'WARNING: %s' % (msg % args)
+ print('WARNING: %s' % (msg % args))
def info(msg):
- print 'INFO: ' + msg
+ print('INFO: ' + msg)
class PropagationAnalyzer(object):
@@ -185,7 +186,7 @@
def add_colors_legend(self, graph):
"""Add a legend of used colors to the graph."""
- for package, color in sorted(_COLORS.iteritems()):
+ for package, color in sorted(_COLORS.items()):
graph.add_node(package, color=color, fontcolor=color, shape='record')
--- a/devtools/qunit.py Thu Mar 06 15:55:33 2014 +0100
+++ b/devtools/qunit.py Thu Nov 12 10:52:28 2015 +0100
@@ -19,11 +19,12 @@
import os, os.path as osp
from tempfile import mkdtemp, NamedTemporaryFile, TemporaryFile
import tempfile
-from Queue import Queue, Empty
from subprocess import Popen, check_call, CalledProcessError
from shutil import rmtree, copy as copyfile
from uuid import uuid4
+from six.moves.queue import Queue, Empty
+
# imported by default to simplify further import statements
from logilab.common.testlib import unittest_main, with_tempdir, InnerTest, Tags
from logilab.common.shellutils import getlogin
@@ -122,25 +123,20 @@
def test_javascripts(self):
for args in self.all_js_tests:
+ self.assertIn(len(args), (1, 2))
test_file = self.abspath(args[0])
if len(args) > 1:
depends = [self.abspath(dep) for dep in args[1]]
else:
depends = ()
- if len(args) > 2:
- data = [self.abspath(data) for data in args[2]]
- else:
- data = ()
- for js_test in self._test_qunit(test_file, depends, data):
+ for js_test in self._test_qunit(test_file, depends):
yield js_test
@with_tempdir
- def _test_qunit(self, test_file, depends=(), data_files=(), timeout=10):
+ def _test_qunit(self, test_file, depends=(), timeout=10):
assert osp.exists(test_file), test_file
for dep in depends:
assert osp.exists(dep), dep
- for data in data_files:
- assert osp.exists(data), data
QUnitView.test_file = test_file
QUnitView.depends = depends
--- a/devtools/repotest.py Thu Mar 06 15:55:33 2014 +0100
+++ b/devtools/repotest.py Thu Nov 12 10:52:28 2015 +0100
@@ -19,6 +19,7 @@
This module contains functions to initialize a new repository.
"""
+from __future__ import print_function
__docformat__ = "restructuredtext en"
@@ -29,10 +30,9 @@
def tuplify(mylist):
return [tuple(item) for item in mylist]
-def snippet_cmp(a, b):
- a = (a[0], [e.expression for e in a[1]])
- b = (b[0], [e.expression for e in b[1]])
- return cmp(a, b)
+def snippet_key(a):
+ # a[0] may be a dict or a key/value tuple
+ return (sorted(dict(a[0]).items()), [e.expression for e in a[1]])
def test_plan(self, rql, expected, kwargs=None):
with self.session.new_cnx() as cnx:
@@ -57,7 +57,7 @@
'expected %s queries, got %s' % (len(equeries), len(queries)))
for i, (rql, sol) in enumerate(queries):
self.assertEqual(rql, equeries[i][0])
- self.assertEqual(sorted(sol), sorted(equeries[i][1]))
+ self.assertEqual(sorted(sorted(x.items()) for x in sol), sorted(sorted(x.items()) for x in equeries[i][1]))
idx = 2
else:
idx = 1
@@ -66,7 +66,7 @@
self.assertEqual(len(step[-1]), len(expected[-1]),
'got %s child steps, expected %s' % (len(step[-1]), len(expected[-1])))
except AssertionError:
- print 'error on step ',
+ print('error on step ', end=' ')
pprint(step[:-1])
raise
children = step[-1]
@@ -115,7 +115,7 @@
schema_eids[x] = x.eid
for x in schema.relations():
schema_eids[x] = x.eid
- for rdef in x.rdefs.itervalues():
+ for rdef in x.rdefs.values():
schema_eids[(rdef.subject, rdef.rtype, rdef.object)] = rdef.eid
return schema_eids
@@ -127,7 +127,7 @@
for x in schema.relations():
x.eid = schema_eids[x]
schema._eid_index[x.eid] = x
- for rdef in x.rdefs.itervalues():
+ for rdef in x.rdefs.values():
rdef.eid = schema_eids[(rdef.subject, rdef.rtype, rdef.object)]
schema._eid_index[rdef.eid] = rdef
@@ -187,7 +187,7 @@
plan = self.qhelper.plan_factory(union, {}, FakeSession(self.repo))
plan.preprocess(union)
for select in union.children:
- select.solutions.sort()
+ select.solutions.sort(key=lambda x: list(x.items()))
#print '********* ppsolutions', solutions
return union
@@ -197,7 +197,7 @@
def setUp(self):
self.o = self.repo.querier
- self.session = self.repo._sessions.values()[0]
+ self.session = next(iter(self.repo._sessions.values()))
self.ueid = self.session.user.eid
assert self.ueid != -1
self.repo._type_source_cache = {} # clear cache
@@ -238,7 +238,7 @@
if simplify:
rqlhelper.simplify(rqlst)
for select in rqlst.children:
- select.solutions.sort()
+ select.solutions.sort(key=lambda x: list(x.items()))
return self.o.plan_factory(rqlst, kwargs, cnx)
def _prepare(self, cnx, rql, kwargs=None):
@@ -286,13 +286,13 @@
if rqlst.TYPE == 'select':
self.repo.vreg.rqlhelper.annotate(rqlst)
for select in rqlst.children:
- select.solutions.sort()
+ select.solutions.sort(key=lambda x: list(x.items()))
else:
- rqlst.solutions.sort()
+ rqlst.solutions.sort(key=lambda x: list(x.items()))
return self.o.plan_factory(rqlst, kwargs, cnx)
-# monkey patch some methods to get predicatable results #######################
+# monkey patch some methods to get predictable results #######################
from cubicweb import rqlrewrite
_orig_iter_relations = rqlrewrite.iter_relations
@@ -300,16 +300,15 @@
_orig_build_variantes = rqlrewrite.RQLRewriter.build_variantes
def _insert_snippets(self, snippets, varexistsmap=None):
- _orig_insert_snippets(self, sorted(snippets, snippet_cmp), varexistsmap)
+ _orig_insert_snippets(self, sorted(snippets, key=snippet_key), varexistsmap)
def _build_variantes(self, newsolutions):
variantes = _orig_build_variantes(self, newsolutions)
sortedvariantes = []
for variante in variantes:
- orderedkeys = sorted((k[1], k[2], v) for k, v in variante.iteritems())
- variante = DumbOrderedDict(sorted(variante.iteritems(),
- lambda a, b: cmp((a[0][1],a[0][2],a[1]),
- (b[0][1],b[0][2],b[1]))))
+ orderedkeys = sorted((k[1], k[2], v) for k, v in variante.items())
+ variante = DumbOrderedDict(sorted(variante.items(),
+ key=lambda a: (a[0][1], a[0][2], a[1])))
sortedvariantes.append( (orderedkeys, variante) )
return [v for ok, v in sorted(sortedvariantes)]
@@ -318,7 +317,7 @@
def _check_permissions(*args, **kwargs):
res, restricted = _orig_check_permissions(*args, **kwargs)
- res = DumbOrderedDict(sorted(res.iteritems(), lambda a, b: cmp(a[1], b[1])))
+ res = DumbOrderedDict(sorted(res.items(), key=lambda x: [y.items() for y in x[1]]))
return res, restricted
def _dummy_check_permissions(self, rqlst):
--- a/devtools/stresstester.py Thu Mar 06 15:55:33 2014 +0100
+++ b/devtools/stresstester.py Thu Nov 12 10:52:28 2015 +0100
@@ -41,6 +41,7 @@
Copyright (c) 2003-2011 LOGILAB S.A. (Paris, FRANCE), license is LGPL v2.
http://www.logilab.fr/ -- mailto:contact@logilab.fr
"""
+from __future__ import print_function
import os
import sys
@@ -84,7 +85,7 @@
def usage(status=0):
"""print usage string and exit"""
- print __doc__ % basename(sys.argv[0])
+ print(__doc__ % basename(sys.argv[0]))
sys.exit(status)
@@ -133,7 +134,7 @@
'nb-times=', 'nb-threads=',
'profile', 'report-output=',])
except Exception as ex:
- print ex
+ print(ex)
usage(1)
repeat = 100
threads = 1
@@ -155,7 +156,7 @@
elif opt in ('-P', '--profile'):
prof_file = val
elif opt in ('-o', '--report-output'):
- report_output = file(val, 'w')
+ report_output = open(val, 'w')
if len(args) != 2:
usage(1)
queries = [query for query in lines(args[1]) if not query.startswith('#')]
@@ -166,7 +167,7 @@
from cubicweb.cwconfig import instance_configuration
config = instance_configuration(args[0])
# get local access to the repository
- print "Creating repo", prof_file
+ print("Creating repo", prof_file)
repo = Repository(config, prof_file)
cnxid = repo.connect(user, password=password)
# connection to the CubicWeb repository
--- a/devtools/test/unittest_dbfill.py Thu Mar 06 15:55:33 2014 +0100
+++ b/devtools/test/unittest_dbfill.py Thu Nov 12 10:52:28 2015 +0100
@@ -21,6 +21,9 @@
import os.path as osp
import re
import datetime
+import io
+
+from six.moves import range
from logilab.common.testlib import TestCase, unittest_main
@@ -50,7 +53,7 @@
return None
def _available_Person_firstname(self, etype, attrname):
- return [f.strip() for f in file(osp.join(DATADIR, 'firstnames.txt'))]
+ return [f.strip() for f in io.open(osp.join(DATADIR, 'firstnames.txt'), encoding='latin1')]
def setUp(self):
config = ApptestConfiguration('data', apphome=DATADIR)
@@ -86,7 +89,7 @@
# Test for random index
for index in range(5):
cost_value = self.bug_valgen.generate_attribute_value({}, 'cost', index)
- self.assertIn(cost_value, range(index+1))
+ self.assertIn(cost_value, list(range(index+1)))
def test_date(self):
"""test date generation"""
--- a/devtools/test/unittest_httptest.py Thu Mar 06 15:55:33 2014 +0100
+++ b/devtools/test/unittest_httptest.py Thu Nov 12 10:52:28 2015 +0100
@@ -17,7 +17,7 @@
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""unittest for cubicweb.devtools.httptest module"""
-import httplib
+from six.moves import http_client
from logilab.common.testlib import Tags
from cubicweb.devtools.httptest import CubicWebServerTC
@@ -28,12 +28,12 @@
def test_response(self):
try:
response = self.web_get()
- except httplib.NotConnected as ex:
+ except http_client.NotConnected as ex:
self.fail("Can't connection to test server: %s" % ex)
def test_response_anon(self):
response = self.web_get()
- self.assertEqual(response.status, httplib.OK)
+ self.assertEqual(response.status, http_client.OK)
def test_base_url(self):
if self.config['base-url'] not in self.web_get().read():
@@ -47,20 +47,20 @@
def test_response_denied(self):
response = self.web_get()
- self.assertEqual(response.status, httplib.FORBIDDEN)
+ self.assertEqual(response.status, http_client.FORBIDDEN)
def test_login(self):
response = self.web_get()
- if response.status != httplib.FORBIDDEN:
+ if response.status != http_client.FORBIDDEN:
self.skipTest('Already authenticated, "test_response_denied" must have failed')
# login
self.web_login(self.admlogin, self.admpassword)
response = self.web_get()
- self.assertEqual(response.status, httplib.OK, response.body)
+ self.assertEqual(response.status, http_client.OK, response.body)
# logout
self.web_logout()
response = self.web_get()
- self.assertEqual(response.status, httplib.FORBIDDEN, response.body)
+ self.assertEqual(response.status, http_client.FORBIDDEN, response.body)
--- a/devtools/test/unittest_testlib.py Thu Mar 06 15:55:33 2014 +0100
+++ b/devtools/test/unittest_testlib.py Thu Nov 12 10:52:28 2015 +0100
@@ -17,9 +17,10 @@
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""unittests for cw.devtools.testlib module"""
-from cStringIO import StringIO
+from io import BytesIO, StringIO
+from unittest import TextTestRunner
-from unittest import TextTestRunner
+from six import PY2
from logilab.common.testlib import TestSuite, TestCase, unittest_main
from logilab.common.registry import yes
@@ -33,7 +34,7 @@
class entity:
cw_etype = 'Entity'
eid = 0
- sio = StringIO('hop\n')
+ sio = BytesIO(b'hop\n')
form = CubicWebTC.fake_form('import',
{'file': ('filename.txt', sio),
'encoding': u'utf-8',
@@ -51,7 +52,7 @@
class WebTestTC(TestCase):
def setUp(self):
- output = StringIO()
+ output = BytesIO() if PY2 else StringIO()
self.runner = TextTestRunner(stream=output)
def test_error_raised(self):
--- a/devtools/test/unittest_webtest.py Thu Mar 06 15:55:33 2014 +0100
+++ b/devtools/test/unittest_webtest.py Thu Nov 12 10:52:28 2015 +0100
@@ -1,4 +1,4 @@
-import httplib
+from six.moves import http_client
from logilab.common.testlib import Tags
from cubicweb.devtools.webtest import CubicWebTestTC
@@ -21,19 +21,19 @@
def test_reponse_denied(self):
res = self.webapp.get('/', expect_errors=True)
- self.assertEqual(httplib.FORBIDDEN, res.status_int)
+ self.assertEqual(http_client.FORBIDDEN, res.status_int)
def test_login(self):
res = self.webapp.get('/', expect_errors=True)
- self.assertEqual(httplib.FORBIDDEN, res.status_int)
+ self.assertEqual(http_client.FORBIDDEN, res.status_int)
self.login(self.admlogin, self.admpassword)
res = self.webapp.get('/')
- self.assertEqual(httplib.OK, res.status_int)
+ self.assertEqual(http_client.OK, res.status_int)
self.logout()
res = self.webapp.get('/', expect_errors=True)
- self.assertEqual(httplib.FORBIDDEN, res.status_int)
+ self.assertEqual(http_client.FORBIDDEN, res.status_int)
if __name__ == '__main__':
--- a/devtools/testlib.py Thu Mar 06 15:55:33 2014 +0100
+++ b/devtools/testlib.py Thu Nov 12 10:52:28 2015 +0100
@@ -16,19 +16,22 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""this module contains base classes and utilities for cubicweb tests"""
+from __future__ import print_function
+
__docformat__ = "restructuredtext en"
import sys
import re
-import urlparse
from os.path import dirname, join, abspath
-from urllib import unquote
from math import log
from contextlib import contextmanager
from warnings import warn
-from types import NoneType
from itertools import chain
+from six import text_type, string_types
+from six.moves import range
+from six.moves.urllib.parse import urlparse, parse_qs, unquote as urlunquote
+
import yams.schema
from logilab.common.testlib import TestCase, InnerTest, Tags
@@ -60,7 +63,7 @@
def do_view(self, arg):
import webbrowser
data = self._getval(arg)
- with file('/tmp/toto.html', 'w') as toto:
+ with open('/tmp/toto.html', 'w') as toto:
toto.write(data)
webbrowser.open('file:///tmp/toto.html')
@@ -85,7 +88,7 @@
class JsonValidator(object):
def parse_string(self, data):
- return json.loads(data)
+ return json.loads(data.decode('ascii'))
@contextmanager
def real_error_handling(app):
@@ -283,7 +286,7 @@
"""provide a new RepoAccess object for a given user
The access is automatically closed at the end of the test."""
- login = unicode(login)
+ login = text_type(login)
access = RepoAccess(self.repo, login, self.requestcls)
self._open_access.add(access)
return access
@@ -310,10 +313,9 @@
db_handler.restore_database(self.test_db_id)
self.repo = db_handler.get_repo(startup=True)
# get an admin session (without actual login)
- login = unicode(db_handler.config.default_admin_config['login'])
+ login = text_type(db_handler.config.default_admin_config['login'])
self.admin_access = self.new_access(login)
self._admin_session = self.admin_access._session
- self.config.repository = lambda x=None: self.repo
# config management ########################################################
@@ -324,8 +326,11 @@
Configuration is cached on the test class.
"""
+ if cls is CubicWebTC:
+ # Prevent direct use of CubicWebTC directly to avoid database
+ # caching issues
+ return None
try:
- assert not cls is CubicWebTC, "Don't use CubicWebTC directly to prevent database caching issue"
return cls.__dict__['_config']
except KeyError:
home = abspath(join(dirname(sys.modules[cls.__module__].__file__), cls.appid))
@@ -346,7 +351,7 @@
been properly bootstrapped.
"""
admincfg = config.default_admin_config
- cls.admlogin = unicode(admincfg['login'])
+ cls.admlogin = text_type(admincfg['login'])
cls.admpassword = admincfg['password']
# uncomment the line below if you want rql queries to be logged
#config.global_set_option('query-log-file',
@@ -454,14 +459,14 @@
if password is None:
password = login
if login is not None:
- login = unicode(login)
+ login = text_type(login)
user = req.create_entity('CWUser', login=login,
upassword=password, **kwargs)
req.execute('SET X in_group G WHERE X eid %%(x)s, G name IN(%s)'
% ','.join(repr(str(g)) for g in groups),
{'x': user.eid})
if email is not None:
- req.create_entity('EmailAddress', address=unicode(email),
+ req.create_entity('EmailAddress', address=text_type(email),
reverse_primary_email=user)
user.cw_clear_relation_cache('in_group', 'subject')
if commit:
@@ -519,10 +524,10 @@
similar to `orig_permissions.update(partial_perms)`.
"""
torestore = []
- for erschema, etypeperms in chain(perm_overrides, perm_kwoverrides.iteritems()):
- if isinstance(erschema, basestring):
+ for erschema, etypeperms in chain(perm_overrides, perm_kwoverrides.items()):
+ if isinstance(erschema, string_types):
erschema = self.schema[erschema]
- for action, actionperms in etypeperms.iteritems():
+ for action, actionperms in etypeperms.items():
origperms = erschema.permissions[action]
erschema.set_action_permissions(action, actionperms)
torestore.append([erschema, action, origperms])
@@ -738,8 +743,8 @@
req = self.request(url=url)
if isinstance(url, unicode):
url = url.encode(req.encoding) # req.setup_params() expects encoded strings
- querystring = urlparse.urlparse(url)[-2]
- params = urlparse.parse_qs(querystring)
+ querystring = urlparse(url)[-2]
+ params = parse_qs(querystring)
req.setup_params(params)
return req
@@ -752,8 +757,8 @@
with self.admin_access.web_request(url=url) as req:
if isinstance(url, unicode):
url = url.encode(req.encoding) # req.setup_params() expects encoded strings
- querystring = urlparse.urlparse(url)[-2]
- params = urlparse.parse_qs(querystring)
+ querystring = urlparse(url)[-2]
+ params = parse_qs(querystring)
req.setup_params(params)
yield req
@@ -792,7 +797,7 @@
path = location
params = {}
else:
- cleanup = lambda p: (p[0], unquote(p[1]))
+ cleanup = lambda p: (p[0], urlunquote(p[1]))
params = dict(cleanup(p.split('=', 1)) for p in params.split('&') if p)
if path.startswith(req.base_url()): # may be relative
path = path[len(req.base_url()):]
@@ -885,7 +890,7 @@
}
# maps vid : validator name (override content_type_validators)
vid_validators = dict((vid, htmlparser.VALMAP[valkey])
- for vid, valkey in VIEW_VALIDATORS.iteritems())
+ for vid, valkey in VIEW_VALIDATORS.items())
def view(self, vid, rset=None, req=None, template='main-template',
@@ -908,8 +913,11 @@
view = viewsreg.select(vid, req, rset=rset, **kwargs)
# set explicit test description
if rset is not None:
+ # coerce to "bytes" on py2 because the description will be sent to
+ # sys.stdout/stderr which takes "bytes" on py2 and "unicode" on py3
+ rql = str(rset.printable_rql())
self.set_description("testing vid=%s defined in %s with (%s)" % (
- vid, view.__module__, rset.printable_rql()))
+ vid, view.__module__, rql))
else:
self.set_description("testing vid=%s defined in %s without rset" % (
vid, view.__module__))
@@ -941,7 +949,9 @@
msg = '[%s in %s] %s' % (klass, view.__regid__, exc)
except Exception:
msg = '[%s in %s] undisplayable exception' % (klass, view.__regid__)
- raise AssertionError, msg, tcbk
+ exc = AssertionError(msg)
+ exc.__traceback__ = tcbk
+ raise exc
return self._check_html(output, view, template)
def get_validator(self, view=None, content_type=None, output=None):
@@ -954,11 +964,11 @@
if content_type is None:
content_type = 'text/html'
if content_type in ('text/html', 'application/xhtml+xml') and output:
- if output.startswith('<!DOCTYPE html>'):
+ if output.startswith(b'<!DOCTYPE html>'):
# only check XML well-formness since HTMLValidator isn't html5
# compatible and won't like various other extensions
default_validator = htmlparser.XMLSyntaxValidator
- elif output.startswith('<?xml'):
+ elif output.startswith(b'<?xml'):
default_validator = htmlparser.DTDValidator
else:
default_validator = htmlparser.HTMLValidator
@@ -974,6 +984,9 @@
def _check_html(self, output, view, template='main-template'):
"""raises an exception if the HTML is invalid"""
output = output.strip()
+ if isinstance(output, text_type):
+ # XXX
+ output = output.encode('utf-8')
validator = self.get_validator(view, output=output)
if validator is None:
return output # return raw output if no validator is defined
@@ -999,7 +1012,7 @@
str_exc = str(exc)
except Exception:
str_exc = 'undisplayable exception'
- msg += str_exc
+ msg += str_exc.encode(sys.getdefaultencoding(), 'replace')
if content is not None:
position = getattr(exc, "position", (0,))[0]
if position:
@@ -1016,7 +1029,9 @@
for idx, line in enumerate(content)
if line_context_filter(idx+1, position))
msg += u'\nfor content:\n%s' % content
- raise AssertionError, msg, tcbk
+ exc = AssertionError(msg)
+ exc.__traceback__ = tcbk
+ raise exc
def assertDocTestFile(self, testfile):
# doctest returns tuple (failure_count, test_count)
@@ -1097,7 +1112,7 @@
# new num for etype = max(current num, sum(num for possible target etypes))
#
# XXX we should first check there is no cycle then propagate changes
- for (rschema, etype), targets in relmap.iteritems():
+ for (rschema, etype), targets in relmap.items():
relfactor = sum(howmanydict[e] for e in targets)
howmanydict[str(etype)] = max(relfactor, howmanydict[etype])
return howmanydict
@@ -1167,7 +1182,7 @@
cnx.execute(rql, args)
except ValidationError as ex:
# failed to satisfy some constraint
- print 'error in automatic db population', ex
+ print('error in automatic db population', ex)
cnx.commit_state = None # reset uncommitable flag
self.post_populate(cnx)
@@ -1180,7 +1195,7 @@
else:
rql = 'Any X WHERE X is %s' % etype
rset = req.execute(rql)
- for row in xrange(len(rset)):
+ for row in range(len(rset)):
if limit and row > limit:
break
# XXX iirk
@@ -1244,7 +1259,10 @@
tags = AutoPopulateTest.tags | Tags('web', 'generated')
def setUp(self):
- assert not self.__class__ is AutomaticWebTest, 'Please subclass AutomaticWebTest to prevent database caching issue'
+ if self.__class__ is AutomaticWebTest:
+ # Prevent direct use of AutomaticWebTest to avoid database caching
+ # issues.
+ return
super(AutomaticWebTest, self).setUp()
# access to self.app for proper initialization of the authentication
@@ -1285,7 +1303,7 @@
# # XXX broken
# from cubicweb.devtools.apptest import TestEnvironment
# env = testclass._env = TestEnvironment('data', configcls=testclass.configcls)
-# for reg in env.vreg.itervalues():
+# for reg in env.vreg.values():
# reg._selected = {}
# try:
# orig_select_best = reg.__class__.__orig_select_best
@@ -1305,10 +1323,10 @@
# def print_untested_objects(testclass, skipregs=('hooks', 'etypes')):
-# for regname, reg in testclass._env.vreg.iteritems():
+# for regname, reg in testclass._env.vreg.items():
# if regname in skipregs:
# continue
-# for appobjects in reg.itervalues():
+# for appobjects in reg.values():
# for appobject in appobjects:
# if not reg._selected.get(appobject):
# print 'not tested', regname, appobject
--- a/doc/book/devrepo/testing.rst Thu Mar 06 15:55:33 2014 +0100
+++ b/doc/book/devrepo/testing.rst Thu Nov 12 10:52:28 2015 +0100
@@ -324,9 +324,9 @@
def test_blog_rss(self):
with self.admin_access.web_request() as req:
- rset = req.execute('Any B ORDERBY D DESC WHERE B is BlogEntry, '
- 'B created_by U, U login "logilab", B creation_date D')
- self.view('rss', rset, req=req)
+ rset = req.execute('Any B ORDERBY D DESC WHERE B is BlogEntry, '
+ 'B created_by U, U login "logilab", B creation_date D')
+ self.view('rss', rset, req=req)
Testing with other cubes
--- a/doc/book/devweb/views/table.rst Thu Mar 06 15:55:33 2014 +0100
+++ b/doc/book/devweb/views/table.rst Thu Nov 12 10:52:28 2015 +0100
@@ -96,8 +96,8 @@
'resource': MainEntityColRenderer(),
'workpackage': EntityTableColRenderer(
header='Workpackage',
- renderfunc=worpackage_cell,
- sortfunc=worpackage_sortvalue,),
+ renderfunc=workpackage_cell,
+ sortfunc=workpackage_sortvalue,),
'in_state': EntityTableColRenderer(
renderfunc=lambda w,x: w(x.cw_adapt_to('IWorkflowable').printable_state),
sortfunc=lambda x: x.cw_adapt_to('IWorkflowable').printable_state),
--- a/doc/tools/mode_plan.py Thu Mar 06 15:55:33 2014 +0100
+++ b/doc/tools/mode_plan.py Thu Nov 12 10:52:28 2015 +0100
@@ -23,17 +23,19 @@
rename A010-joe.en.txt to A030-joe.en.txt
accept [y/N]?
"""
+from __future__ import print_function
+
def ren(a,b):
names = glob.glob('%s*'%a)
for name in names :
- print 'rename %s to %s' % (name, name.replace(a,b))
+ print('rename %s to %s' % (name, name.replace(a,b)))
if raw_input('accept [y/N]?').lower() =='y':
for name in names:
os.system('hg mv %s %s' % (name, name.replace(a,b)))
-def ls(): print '\n'.join(sorted(os.listdir('.')))
+def ls(): print('\n'.join(sorted(os.listdir('.'))))
def move():
filenames = []
@@ -47,4 +49,4 @@
for num, name in filenames:
if num >= start:
- print 'hg mv %s %2i%s' %(name,num+1,name[2:])
+ print('hg mv %s %2i%s' %(name,num+1,name[2:]))
--- a/doc/tutorials/dataimport/diseasome_import.py Thu Mar 06 15:55:33 2014 +0100
+++ b/doc/tutorials/dataimport/diseasome_import.py Thu Nov 12 10:52:28 2015 +0100
@@ -95,7 +95,7 @@
# Perform a first commit, of the entities
store.flush()
kwargs = {}
- for uri, relations in all_relations.iteritems():
+ for uri, relations in all_relations.items():
from_eid = uri_to_eid.get(uri)
# ``subjtype`` should be initialized if ``SQLGenObjectStore`` is used
# and there are inlined relations in the schema.
@@ -108,7 +108,7 @@
kwargs['subjtype'] = uri_to_etype.get(uri)
if not from_eid:
continue
- for rtype, rels in relations.iteritems():
+ for rtype, rels in relations.items():
if rtype in ('classes', 'possible_drugs', 'omim', 'omim_page',
'chromosomal_location', 'same_as', 'gene_id',
'hgnc_id', 'hgnc_page'):
--- a/doc/tutorials/dataimport/diseasome_parser.py Thu Mar 06 15:55:33 2014 +0100
+++ b/doc/tutorials/dataimport/diseasome_parser.py Thu Nov 12 10:52:28 2015 +0100
@@ -97,4 +97,4 @@
entities[subj]['relations'].setdefault(MAPPING_RELS[rel], set())
entities[subj]['relations'][MAPPING_RELS[rel]].add(unicode(obj))
return ((ent.get('attributes'), ent.get('relations'))
- for ent in entities.itervalues())
+ for ent in entities.values())
--- a/entities/__init__.py Thu Mar 06 15:55:33 2014 +0100
+++ b/entities/__init__.py Thu Nov 12 10:52:28 2015 +0100
@@ -19,6 +19,7 @@
__docformat__ = "restructuredtext en"
+from six import text_type, string_types
from logilab.common.decorators import classproperty
@@ -41,7 +42,7 @@
@classproperty
def cw_etype(cls):
"""entity type as a unicode string"""
- return unicode(cls.__regid__)
+ return text_type(cls.__regid__)
@classmethod
def cw_create_url(cls, req, **kwargs):
@@ -144,7 +145,7 @@
return self.dc_title().lower()
value = self.cw_attr_value(rtype)
# do not restrict to `unicode` because Bytes will return a `str` value
- if isinstance(value, basestring):
+ if isinstance(value, string_types):
return self.printable_value(rtype, format='text/plain').lower()
return value
--- a/entities/adapters.py Thu Mar 06 15:55:33 2014 +0100
+++ b/entities/adapters.py Thu Nov 12 10:52:28 2015 +0100
@@ -20,7 +20,7 @@
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from itertools import chain
from warnings import warn
@@ -162,7 +162,7 @@
return words
def merge_weight_dict(maindict, newdict):
- for weight, words in newdict.iteritems():
+ for weight, words in newdict.items():
maindict.setdefault(weight, []).extend(words)
class IDownloadableAdapter(view.EntityAdapter):
@@ -171,23 +171,25 @@
__abstract__ = True
def download_url(self, **kwargs): # XXX not really part of this interface
- """return a URL to download entity's content"""
+ """return a URL to download entity's content
+
+ It should be a unicode object containing url-encoded ASCII."""
raise NotImplementedError
def download_content_type(self):
- """return MIME type of the downloadable content"""
+ """return MIME type (unicode) of the downloadable content"""
raise NotImplementedError
def download_encoding(self):
- """return encoding of the downloadable content"""
+ """return encoding (unicode) of the downloadable content"""
raise NotImplementedError
def download_file_name(self):
- """return file name of the downloadable content"""
+ """return file name (unicode) of the downloadable content"""
raise NotImplementedError
def download_data(self):
- """return actual data of the downloadable content"""
+ """return actual data (bytes) of the downloadable content"""
raise NotImplementedError
# XXX should propose to use two different relations for children/parent
@@ -386,7 +388,7 @@
for rschema, attrschema in eschema.attribute_definitions():
rdef = rschema.rdef(eschema, attrschema)
for constraint in rdef.constraints:
- if cstrname == 'cstr' + md5(eschema.type + rschema.type + constraint.type() + (constraint.serialize() or '')).hexdigest():
+ if cstrname == 'cstr' + md5((eschema.type + rschema.type + constraint.type() + (constraint.serialize() or '')).encode('ascii')).hexdigest():
break
else:
continue
--- a/entities/authobjs.py Thu Mar 06 15:55:33 2014 +0100
+++ b/entities/authobjs.py Thu Nov 12 10:52:28 2015 +0100
@@ -19,6 +19,8 @@
__docformat__ = "restructuredtext en"
+from six import string_types
+
from logilab.common.decorators import cached
from cubicweb import Unauthorized
@@ -126,7 +128,7 @@
:type groups: str or iterable(str)
:param groups: a group name or an iterable on group names
"""
- if isinstance(groups, basestring):
+ if isinstance(groups, string_types):
groups = frozenset((groups,))
elif isinstance(groups, (tuple, list)):
groups = frozenset(groups)
--- a/entities/lib.py Thu Mar 06 15:55:33 2014 +0100
+++ b/entities/lib.py Thu Nov 12 10:52:28 2015 +0100
@@ -19,9 +19,10 @@
__docformat__ = "restructuredtext en"
from warnings import warn
+from datetime import datetime
-from urlparse import urlsplit, urlunsplit
-from datetime import datetime
+from six.moves import range
+from six.moves.urllib.parse import urlsplit, urlunsplit
from logilab.mtconverter import xml_escape
@@ -67,7 +68,7 @@
{'y': self.eid})
if skipeids is None:
skipeids = set()
- for i in xrange(len(rset)):
+ for i in range(len(rset)):
eid = rset[i][0]
if eid in skipeids:
continue
--- a/entities/sources.py Thu Mar 06 15:55:33 2014 +0100
+++ b/entities/sources.py Thu Nov 12 10:52:28 2015 +0100
@@ -42,7 +42,7 @@
cfg.update(config)
options = SOURCE_TYPES[self.type].options
sconfig = SourceConfiguration(self._cw.vreg.config, options=options)
- for opt, val in cfg.iteritems():
+ for opt, val in cfg.items():
try:
sconfig.set_option(opt, val)
except OptionError:
--- a/entities/test/unittest_base.py Thu Mar 06 15:55:33 2014 +0100
+++ b/entities/test/unittest_base.py Thu Nov 12 10:52:28 2015 +0100
@@ -60,7 +60,7 @@
# XXX move to yams
self.assertEqual(self.schema['CWUser'].meta_attributes(), {})
self.assertEqual(dict((str(k), v)
- for k, v in self.schema['State'].meta_attributes().iteritems()),
+ for k, v in self.schema['State'].meta_attributes().items()),
{'description_format': ('format', 'description')})
def test_fti_rql_method(self):
--- a/entities/test/unittest_wfobjs.py Thu Mar 06 15:55:33 2014 +0100
+++ b/entities/test/unittest_wfobjs.py Thu Nov 12 10:52:28 2015 +0100
@@ -107,7 +107,7 @@
def setup_database(self):
rschema = self.schema['in_state']
- for rdef in rschema.rdefs.itervalues():
+ for rdef in rschema.rdefs.values():
self.assertEqual(rdef.cardinality, '1*')
with self.admin_access.client_cnx() as cnx:
self.member_eid = self.create_user(cnx, 'member').eid
--- a/entities/wfobjs.py Thu Mar 06 15:55:33 2014 +0100
+++ b/entities/wfobjs.py Thu Nov 12 10:52:28 2015 +0100
@@ -21,9 +21,11 @@
* workflow history (TrInfo)
* adapter for workflowable entities (IWorkflowableAdapter)
"""
+from __future__ import print_function
__docformat__ = "restructuredtext en"
+from six import text_type, string_types
from logilab.common.decorators import cached, clear_cache
from logilab.common.deprecation import deprecated
@@ -97,7 +99,7 @@
def transition_by_name(self, trname):
rset = self._cw.execute('Any T, TN WHERE T name TN, T name %(n)s, '
'T transition_of WF, WF eid %(wf)s',
- {'n': unicode(trname), 'wf': self.eid})
+ {'n': text_type(trname), 'wf': self.eid})
if rset:
return rset.get_entity(0, 0)
return None
@@ -114,7 +116,7 @@
def add_state(self, name, initial=False, **kwargs):
"""add a state to this workflow"""
- state = self._cw.create_entity('State', name=unicode(name), **kwargs)
+ state = self._cw.create_entity('State', name=text_type(name), **kwargs)
self._cw.execute('SET S state_of WF WHERE S eid %(s)s, WF eid %(wf)s',
{'s': state.eid, 'wf': self.eid})
if initial:
@@ -126,7 +128,7 @@
def _add_transition(self, trtype, name, fromstates,
requiredgroups=(), conditions=(), **kwargs):
- tr = self._cw.create_entity(trtype, name=unicode(name), **kwargs)
+ tr = self._cw.create_entity(trtype, name=text_type(name), **kwargs)
self._cw.execute('SET T transition_of WF '
'WHERE T eid %(t)s, WF eid %(wf)s',
{'t': tr.eid, 'wf': self.eid})
@@ -224,19 +226,19 @@
matches = user.matching_groups(groups)
if matches:
if DBG:
- print 'may_be_fired: %r may fire: user matches %s' % (self.name, groups)
+ print('may_be_fired: %r may fire: user matches %s' % (self.name, groups))
return matches
if 'owners' in groups and user.owns(eid):
if DBG:
- print 'may_be_fired: %r may fire: user is owner' % self.name
+ print('may_be_fired: %r may fire: user is owner' % self.name)
return True
# check one of the rql expression conditions matches if any
if self.condition:
if DBG:
- print ('my_be_fired: %r: %s' %
- (self.name, [(rqlexpr.expression,
+ print('my_be_fired: %r: %s' %
+ (self.name, [(rqlexpr.expression,
rqlexpr.check_expression(self._cw, eid))
- for rqlexpr in self.condition]))
+ for rqlexpr in self.condition]))
for rqlexpr in self.condition:
if rqlexpr.check_expression(self._cw, eid):
return True
@@ -256,13 +258,13 @@
for gname in requiredgroups:
rset = self._cw.execute('SET T require_group G '
'WHERE T eid %(x)s, G name %(gn)s',
- {'x': self.eid, 'gn': unicode(gname)})
+ {'x': self.eid, 'gn': text_type(gname)})
assert rset, '%s is not a known group' % gname
- if isinstance(conditions, basestring):
+ if isinstance(conditions, string_types):
conditions = (conditions,)
for expr in conditions:
- if isinstance(expr, basestring):
- kwargs = {'expr': unicode(expr)}
+ if isinstance(expr, string_types):
+ kwargs = {'expr': text_type(expr)}
else:
assert isinstance(expr, dict)
kwargs = expr
@@ -414,7 +416,7 @@
"""return the default workflow for entities of this type"""
# XXX CWEType method
wfrset = self._cw.execute('Any WF WHERE ET default_workflow WF, '
- 'ET name %(et)s', {'et': unicode(self.entity.cw_etype)})
+ 'ET name %(et)s', {'et': text_type(self.entity.cw_etype)})
if wfrset:
return wfrset.get_entity(0, 0)
self.warning("can't find any workflow for %s", self.entity.cw_etype)
@@ -479,7 +481,7 @@
'Any T,TT, TN WHERE S allowed_transition T, S eid %(x)s, '
'T type TT, T type %(type)s, '
'T name TN, T transition_of WF, WF eid %(wfeid)s',
- {'x': self.current_state.eid, 'type': unicode(type),
+ {'x': self.current_state.eid, 'type': text_type(type),
'wfeid': self.current_workflow.eid})
for tr in rset.entities():
if tr.may_be_fired(self.entity.eid):
@@ -528,7 +530,7 @@
def _get_transition(self, tr):
assert self.current_workflow
- if isinstance(tr, basestring):
+ if isinstance(tr, string_types):
_tr = self.current_workflow.transition_by_name(tr)
assert _tr is not None, 'not a %s transition: %s' % (
self.__regid__, tr)
@@ -549,7 +551,7 @@
tr = self._get_transition(tr)
if any(tr_ for tr_ in self.possible_transitions()
if tr_.eid == tr.eid):
- self.fire_transition(tr)
+ self.fire_transition(tr, comment, commentformat)
def change_state(self, statename, comment=None, commentformat=None, tr=None):
"""change the entity's state to the given state (name or entity) in
--- a/entity.py Thu Mar 06 15:55:33 2014 +0100
+++ b/entity.py Thu Nov 12 10:52:28 2015 +0100
@@ -22,6 +22,9 @@
from warnings import warn
from functools import partial
+from six import text_type, string_types, integer_types
+from six.moves import range
+
from logilab.common.decorators import cached
from logilab.common.deprecation import deprecated
from logilab.common.registry import yes
@@ -57,7 +60,7 @@
"""return True if value can be used at the end of a Rest URL path"""
if value is None:
return False
- value = unicode(value)
+ value = text_type(value)
# the check for ?, /, & are to prevent problems when running
# behind Apache mod_proxy
if value == u'' or u'?' in value or u'/' in value or u'&' in value:
@@ -105,7 +108,7 @@
"""
st = cstr.snippet_rqlst.copy()
# replace relations in ST by eid infos from linkto where possible
- for (info_rtype, info_role), eids in lt_infos.iteritems():
+ for (info_rtype, info_role), eids in lt_infos.items():
eid = eids[0] # NOTE: we currently assume a pruned lt_info with only 1 eid
for rel in st.iget_nodes(RqlRelation):
targetvar = rel_matches(rel, info_rtype, info_role, evar.name)
@@ -132,7 +135,7 @@
def pruned_lt_info(eschema, lt_infos):
pruned = {}
- for (lt_rtype, lt_role), eids in lt_infos.iteritems():
+ for (lt_rtype, lt_role), eids in lt_infos.items():
# we can only use lt_infos describing relation with a cardinality
# of value 1 towards the linked entity
if not len(eids) == 1:
@@ -144,6 +147,7 @@
pruned[(lt_rtype, lt_role)] = eids
return pruned
+
class Entity(AppObject):
"""an entity instance has e_schema automagically set on
the class and instances has access to their issuing cursor.
@@ -279,7 +283,7 @@
select = Select()
mainvar = select.get_variable(mainvar)
select.add_selected(mainvar)
- elif isinstance(mainvar, basestring):
+ elif isinstance(mainvar, string_types):
assert mainvar in select.defined_vars
mainvar = select.get_variable(mainvar)
# eases string -> syntax tree test transition: please remove once stable
@@ -455,7 +459,7 @@
if len(value) == 0:
continue # avoid crash with empty IN clause
elif len(value) == 1:
- value = iter(value).next()
+ value = next(iter(value))
else:
# prepare IN clause
pendingrels.append( (attr, role, value) )
@@ -530,6 +534,7 @@
def __init__(self, req, rset=None, row=None, col=0):
AppObject.__init__(self, req, rset=rset, row=row, col=col)
self._cw_related_cache = {}
+ self._cw_adapters_cache = {}
if rset is not None:
self.eid = rset[row][col]
else:
@@ -545,12 +550,12 @@
raise NotImplementedError('comparison not implemented for %s' % self.__class__)
def __eq__(self, other):
- if isinstance(self.eid, (int, long)):
+ if isinstance(self.eid, integer_types):
return self.eid == other.eid
return self is other
def __hash__(self):
- if isinstance(self.eid, (int, long)):
+ if isinstance(self.eid, integer_types):
return self.eid
return super(Entity, self).__hash__()
@@ -567,10 +572,7 @@
return None if it can not be adapted.
"""
- try:
- cache = self._cw_adapters_cache
- except AttributeError:
- self._cw_adapters_cache = cache = {}
+ cache = self._cw_adapters_cache
try:
return cache[interface]
except KeyError:
@@ -677,8 +679,8 @@
if path is None:
# fallback url: <base-url>/<eid> url is used as cw entities uri,
# prefer it to <base-url>/<etype>/eid/<eid>
- return unicode(value)
- return '%s/%s' % (path, self._cw.url_quote(value))
+ return text_type(value)
+ return u'%s/%s' % (path, self._cw.url_quote(value))
def cw_attr_metadata(self, attr, metadata):
"""return a metadata for an attribute (None if unspecified)"""
@@ -695,7 +697,7 @@
attr = str(attr)
if value is _marker:
value = getattr(self, attr)
- if isinstance(value, basestring):
+ if isinstance(value, string_types):
value = value.strip()
if value is None or value == '': # don't use "not", 0 is an acceptable value
return u''
@@ -849,7 +851,7 @@
if attributes is None:
self._cw_completed = True
varmaker = rqlvar_maker()
- V = varmaker.next()
+ V = next(varmaker)
rql = ['WHERE %s eid %%(x)s' % V]
selected = []
for attr in (attributes or self._cw_to_complete_attributes(skip_bytes, skip_pwd)):
@@ -857,7 +859,7 @@
if attr in self.cw_attr_cache:
continue
# case where attribute must be completed, but is not yet in entity
- var = varmaker.next()
+ var = next(varmaker)
rql.append('%s %s %s' % (V, attr, var))
selected.append((attr, var))
# +1 since this doesn't include the main variable
@@ -876,7 +878,7 @@
# * user has read perm on the relation and on the target entity
assert rschema.inlined
assert role == 'subject'
- var = varmaker.next()
+ var = next(varmaker)
# keep outer join anyway, we don't want .complete to crash on
# missing mandatory relation (see #1058267)
rql.append('%s %s %s?' % (V, rtype, var))
@@ -892,10 +894,10 @@
raise Exception('unable to fetch attributes for entity with eid %s'
% self.eid)
# handle attributes
- for i in xrange(1, lastattr):
+ for i in range(1, lastattr):
self.cw_attr_cache[str(selected[i-1][0])] = rset[i]
# handle relations
- for i in xrange(lastattr, len(rset)):
+ for i in range(lastattr, len(rset)):
rtype, role = selected[i-1][0]
value = rset[i]
if value is None:
@@ -1145,9 +1147,7 @@
self._cw.vreg.solutions(self._cw, select, args)
# insert RQL expressions for schema constraints into the rql syntax tree
if vocabconstraints:
- # RQLConstraint is a subclass for RQLVocabularyConstraint, so they
- # will be included as well
- cstrcls = RQLVocabularyConstraint
+ cstrcls = (RQLVocabularyConstraint, RQLConstraint)
else:
cstrcls = RQLConstraint
lt_infos = pruned_lt_info(self.e_schema, lt_infos or {})
@@ -1236,8 +1236,8 @@
no relation is given
"""
if rtype is None:
- self._cw_related_cache = {}
- self._cw_adapters_cache = {}
+ self._cw_related_cache.clear()
+ self._cw_adapters_cache.clear()
else:
assert role
self._cw_related_cache.pop('%s_%s' % (rtype, role), None)
--- a/etwist/request.py Thu Mar 06 15:55:33 2014 +0100
+++ b/etwist/request.py Thu Nov 12 10:52:28 2015 +0100
@@ -31,7 +31,7 @@
self._twreq = req
super(CubicWebTwistedRequestAdapter, self).__init__(
vreg, https, req.args, headers=req.received_headers)
- for key, name_stream_list in req.files.iteritems():
+ for key, name_stream_list in req.files.items():
for name, stream in name_stream_list:
if name is not None:
name = unicode(name, self.encoding)
--- a/etwist/server.py Thu Mar 06 15:55:33 2014 +0100
+++ b/etwist/server.py Thu Nov 12 10:52:28 2015 +0100
@@ -22,8 +22,10 @@
import select
import traceback
import threading
-from urlparse import urlsplit, urlunsplit
from cgi import FieldStorage, parse_header
+
+from six.moves.urllib.parse import urlsplit, urlunsplit
+
from cubicweb.statsd_logger import statsd_timeit
from twisted.internet import reactor, task, threads
--- a/etwist/service.py Thu Mar 06 15:55:33 2014 +0100
+++ b/etwist/service.py Thu Nov 12 10:52:28 2015 +0100
@@ -15,6 +15,8 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
+from __future__ import print_function
+
import os
import sys
@@ -22,7 +24,7 @@
import win32serviceutil
import win32service
except ImportError:
- print 'Win32 extensions for Python are likely not installed.'
+ print('Win32 extensions for Python are likely not installed.')
sys.exit(3)
from os.path import join
--- a/ext/rest.py Thu Mar 06 15:55:33 2014 +0100
+++ b/ext/rest.py Thu Nov 12 10:52:28 2015 +0100
@@ -37,7 +37,9 @@
from itertools import chain
from logging import getLogger
from os.path import join
-from urlparse import urlsplit
+
+from six import text_type
+from six.moves.urllib.parse import urlsplit
from docutils import statemachine, nodes, utils, io
from docutils.core import Publisher
@@ -168,7 +170,7 @@
rql = params['rql']
if vid is None:
vid = params.get('vid')
- except (ValueError, KeyError), exc:
+ except (ValueError, KeyError) as exc:
msg = inliner.reporter.error('Could not parse bookmark path %s [%s].'
% (bookmark.path, exc), line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
@@ -182,7 +184,7 @@
vid = 'noresult'
view = _cw.vreg['views'].select(vid, _cw, rset=rset)
content = view.render()
- except Exception, exc:
+ except Exception as exc:
content = 'An error occurred while interpreting directive bookmark: %r' % exc
set_classes(options)
return [nodes.raw('', content, format='html')], []
@@ -395,7 +397,7 @@
the data formatted as HTML or the original data if an error occurred
"""
req = context._cw
- if isinstance(data, unicode):
+ if isinstance(data, text_type):
encoding = 'unicode'
# remove unprintable characters unauthorized in xml
data = data.translate(ESC_UCAR_TABLE)
@@ -438,8 +440,8 @@
return res
except BaseException:
LOGGER.exception('error while publishing ReST text')
- if not isinstance(data, unicode):
- data = unicode(data, encoding, 'replace')
+ if not isinstance(data, text_type):
+ data = text_type(data, encoding, 'replace')
return xml_escape(req._('error while publishing ReST text')
+ '\n\n' + data)
--- a/ext/tal.py Thu Mar 06 15:55:33 2014 +0100
+++ b/ext/tal.py Thu Nov 12 10:52:28 2015 +0100
@@ -184,7 +184,10 @@
interpreter.execute(self)
except UnicodeError as unierror:
LOGGER.exception(str(unierror))
- raise simpleTALES.ContextContentException("found non-unicode %r string in Context!" % unierror.args[1]), None, sys.exc_info()[-1]
+ exc = simpleTALES.ContextContentException(
+ "found non-unicode %r string in Context!" % unierror.args[1])
+ exc.__traceback__ = sys.exc_info()[-1]
+ raise exc
def compile_template(template):
@@ -203,7 +206,7 @@
:type filepath: str
:param template: path of the file to compile
"""
- fp = file(filepath)
+ fp = open(filepath)
file_content = unicode(fp.read()) # template file should be pure ASCII
fp.close()
return compile_template(file_content)
@@ -232,7 +235,8 @@
result = eval(expr, globals, locals)
except Exception as ex:
ex = ex.__class__('in %r: %s' % (expr, ex))
- raise ex, None, sys.exc_info()[-1]
+ ex.__traceback__ = sys.exc_info()[-1]
+ raise ex
if (isinstance (result, simpleTALES.ContextVariable)):
return result.value()
return result
--- a/ext/test/unittest_rest.py Thu Mar 06 15:55:33 2014 +0100
+++ b/ext/test/unittest_rest.py Thu Nov 12 10:52:28 2015 +0100
@@ -15,6 +15,8 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
+from six import PY3
+
from logilab.common.testlib import unittest_main
from cubicweb.devtools.testlib import CubicWebTC
@@ -79,7 +81,9 @@
context = self.context(req)
out = rest_publish(context, ':rql:`Any X WHERE X is CWUser:toto`')
self.assertTrue(out.startswith("<p>an error occurred while interpreting this "
- "rql directive: ObjectNotFound(u'toto',)</p>"))
+ "rql directive: ObjectNotFound(%s'toto',)</p>" %
+ ('' if PY3 else 'u')),
+ out)
def test_rql_role_without_vid(self):
with self.admin_access.web_request() as req:
@@ -221,7 +225,7 @@
%(rql)s
""" % {'rql': rql,
'colvids': ', '.join(["%d=%s" % (k, v)
- for k, v in colvids.iteritems()])
+ for k, v in colvids.items()])
})
view = self.vreg['views'].select('table', req, rset=req.execute(rql))
view.cellvids = colvids
--- a/gettext.py Thu Mar 06 15:55:33 2014 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,795 +0,0 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
-"""Internationalization and localization support.
-
-This module provides internationalization (I18N) and localization (L10N)
-support for your Python programs by providing an interface to the GNU gettext
-message catalog library.
-
-I18N refers to the operation by which a program is made aware of multiple
-languages. L10N refers to the adaptation of your program, once
-internationalized, to the local language and cultural habits.
-
-"""
-
-# This module represents the integration of work, contributions, feedback, and
-# suggestions from the following people:
-#
-# Martin von Loewis, who wrote the initial implementation of the underlying
-# C-based libintlmodule (later renamed _gettext), along with a skeletal
-# gettext.py implementation.
-#
-# Peter Funk, who wrote fintl.py, a fairly complete wrapper around intlmodule,
-# which also included a pure-Python implementation to read .mo files if
-# intlmodule wasn't available.
-#
-# James Henstridge, who also wrote a gettext.py module, which has some
-# interesting, but currently unsupported experimental features: the notion of
-# a Catalog class and instances, and the ability to add to a catalog file via
-# a Python API.
-#
-# Barry Warsaw integrated these modules, wrote the .install() API and code,
-# and conformed all C and Python code to Python's coding standards.
-#
-# Francois Pinard and Marc-Andre Lemburg also contributed valuably to this
-# module.
-#
-# J. David Ibanez implemented plural forms. Bruno Haible fixed some bugs.
-#
-# TODO:
-# - Lazy loading of .mo files. Currently the entire catalog is loaded into
-# memory, but that's probably bad for large translated programs. Instead,
-# the lexical sort of original strings in GNU .mo files should be exploited
-# to do binary searches and lazy initializations. Or you might want to use
-# the undocumented double-hash algorithm for .mo files with hash tables, but
-# you'll need to study the GNU gettext code to do this.
-#
-# - Support Solaris .mo file formats. Unfortunately, we've been unable to
-# find this format documented anywhere.
-
-
-import locale, copy, os, re, struct, sys
-from errno import ENOENT
-
-
-__all__ = ['NullTranslations', 'GNUTranslations', 'Catalog',
- 'find', 'translation', 'install', 'textdomain', 'bindtextdomain',
- 'dgettext', 'dngettext', 'gettext', 'ngettext',
- ]
-
-_default_localedir = os.path.join(sys.prefix, 'share', 'locale')
-
-
-def test(condition, true, false):
- """
- Implements the C expression:
-
- condition ? true : false
-
- Required to correctly interpret plural forms.
- """
- if condition:
- return true
- else:
- return false
-
-
-def c2py(plural):
- """Gets a C expression as used in PO files for plural forms and returns a
- Python lambda function that implements an equivalent expression.
- """
- # Security check, allow only the "n" identifier
- try:
- from cStringIO import StringIO
- except ImportError:
- from StringIO import StringIO
- import token, tokenize
- tokens = tokenize.generate_tokens(StringIO(plural).readline)
- try:
- danger = [x for x in tokens if x[0] == token.NAME and x[1] != 'n']
- except tokenize.TokenError:
- raise ValueError, \
- 'plural forms expression error, maybe unbalanced parenthesis'
- else:
- if danger:
- raise ValueError, 'plural forms expression could be dangerous'
-
- # Replace some C operators by their Python equivalents
- plural = plural.replace('&&', ' and ')
- plural = plural.replace('||', ' or ')
-
- expr = re.compile(r'\!([^=])')
- plural = expr.sub(' not \\1', plural)
-
- # Regular expression and replacement function used to transform
- # "a?b:c" to "test(a,b,c)".
- expr = re.compile(r'(.*?)\?(.*?):(.*)')
- def repl(x):
- return "test(%s, %s, %s)" % (x.group(1), x.group(2),
- expr.sub(repl, x.group(3)))
-
- # Code to transform the plural expression, taking care of parentheses
- stack = ['']
- for c in plural:
- if c == '(':
- stack.append('')
- elif c == ')':
- if len(stack) == 1:
- # Actually, we never reach this code, because unbalanced
- # parentheses get caught in the security check at the
- # beginning.
- raise ValueError, 'unbalanced parenthesis in plural form'
- s = expr.sub(repl, stack.pop())
- stack[-1] += '(%s)' % s
- else:
- stack[-1] += c
- plural = expr.sub(repl, stack.pop())
-
- return eval('lambda n: int(%s)' % plural)
-
-
-
-def _expand_lang(locale):
- from locale import normalize
- locale = normalize(locale)
- COMPONENT_CODESET = 1 << 0
- COMPONENT_TERRITORY = 1 << 1
- COMPONENT_MODIFIER = 1 << 2
- # split up the locale into its base components
- mask = 0
- pos = locale.find('@')
- if pos >= 0:
- modifier = locale[pos:]
- locale = locale[:pos]
- mask |= COMPONENT_MODIFIER
- else:
- modifier = ''
- pos = locale.find('.')
- if pos >= 0:
- codeset = locale[pos:]
- locale = locale[:pos]
- mask |= COMPONENT_CODESET
- else:
- codeset = ''
- pos = locale.find('_')
- if pos >= 0:
- territory = locale[pos:]
- locale = locale[:pos]
- mask |= COMPONENT_TERRITORY
- else:
- territory = ''
- language = locale
- ret = []
- for i in range(mask+1):
- if not (i & ~mask): # if all components for this combo exist ...
- val = language
- if i & COMPONENT_TERRITORY: val += territory
- if i & COMPONENT_CODESET: val += codeset
- if i & COMPONENT_MODIFIER: val += modifier
- ret.append(val)
- ret.reverse()
- return ret
-
-
-
-class NullTranslations:
- def __init__(self, fp=None):
- self._info = {}
- self._charset = None
- self._output_charset = None
- self._fallback = None
- if fp is not None:
- self._parse(fp)
-
- def _parse(self, fp):
- pass
-
- def add_fallback(self, fallback):
- if self._fallback:
- self._fallback.add_fallback(fallback)
- else:
- self._fallback = fallback
-
- def gettext(self, message):
- if self._fallback:
- return self._fallback.gettext(message)
- return message
-
- def pgettext(self, context, message):
- if self._fallback:
- return self._fallback.pgettext(context, message)
- return message
-
- def lgettext(self, message):
- if self._fallback:
- return self._fallback.lgettext(message)
- return message
-
- def lpgettext(self, context, message):
- if self._fallback:
- return self._fallback.lpgettext(context, message)
- return message
-
- def ngettext(self, msgid1, msgid2, n):
- if self._fallback:
- return self._fallback.ngettext(msgid1, msgid2, n)
- if n == 1:
- return msgid1
- else:
- return msgid2
-
- def npgettext(self, context, msgid1, msgid2, n):
- if self._fallback:
- return self._fallback.npgettext(context, msgid1, msgid2, n)
- if n == 1:
- return msgid1
- else:
- return msgid2
-
- def lngettext(self, msgid1, msgid2, n):
- if self._fallback:
- return self._fallback.lngettext(msgid1, msgid2, n)
- if n == 1:
- return msgid1
- else:
- return msgid2
-
- def lnpgettext(self, context, msgid1, msgid2, n):
- if self._fallback:
- return self._fallback.lnpgettext(context, msgid1, msgid2, n)
- if n == 1:
- return msgid1
- else:
- return msgid2
-
- def ugettext(self, message):
- if self._fallback:
- return self._fallback.ugettext(message)
- return unicode(message)
-
- def upgettext(self, context, message):
- if self._fallback:
- return self._fallback.upgettext(context, message)
- return unicode(message)
-
- def ungettext(self, msgid1, msgid2, n):
- if self._fallback:
- return self._fallback.ungettext(msgid1, msgid2, n)
- if n == 1:
- return unicode(msgid1)
- else:
- return unicode(msgid2)
-
- def unpgettext(self, context, msgid1, msgid2, n):
- if self._fallback:
- return self._fallback.unpgettext(context, msgid1, msgid2, n)
- if n == 1:
- return unicode(msgid1)
- else:
- return unicode(msgid2)
-
- def info(self):
- return self._info
-
- def charset(self):
- return self._charset
-
- def output_charset(self):
- return self._output_charset
-
- def set_output_charset(self, charset):
- self._output_charset = charset
-
- def install(self, unicode=False, names=None):
- import __builtin__
- __builtin__.__dict__['_'] = unicode and self.ugettext or self.gettext
- if hasattr(names, "__contains__"):
- if "gettext" in names:
- __builtin__.__dict__['gettext'] = __builtin__.__dict__['_']
- if "pgettext" in names:
- __builtin__.__dict__['pgettext'] = (unicode and self.upgettext
- or self.pgettext)
- if "ngettext" in names:
- __builtin__.__dict__['ngettext'] = (unicode and self.ungettext
- or self.ngettext)
- if "npgettext" in names:
- __builtin__.__dict__['npgettext'] = \
- (unicode and self.unpgettext or self.npgettext)
- if "lgettext" in names:
- __builtin__.__dict__['lgettext'] = self.lgettext
- if "lpgettext" in names:
- __builtin__.__dict__['lpgettext'] = self.lpgettext
- if "lngettext" in names:
- __builtin__.__dict__['lngettext'] = self.lngettext
- if "lnpgettext" in names:
- __builtin__.__dict__['lnpgettext'] = self.lnpgettext
-
-
-class GNUTranslations(NullTranslations):
- # Magic number of .mo files
- LE_MAGIC = 0x950412deL
- BE_MAGIC = 0xde120495L
-
- # The encoding of a msgctxt and a msgid in a .mo file is
- # msgctxt + "\x04" + msgid (gettext version >= 0.15)
- CONTEXT_ENCODING = "%s\x04%s"
-
- def _parse(self, fp):
- """Override this method to support alternative .mo formats."""
- unpack = struct.unpack
- filename = getattr(fp, 'name', '')
- # Parse the .mo file header, which consists of 5 little endian 32
- # bit words.
- self._catalog = catalog = {}
- self.plural = lambda n: int(n != 1) # germanic plural by default
- buf = fp.read()
- buflen = len(buf)
- # Are we big endian or little endian?
- magic = unpack('<I', buf[:4])[0]
- if magic == self.LE_MAGIC:
- version, msgcount, masteridx, transidx = unpack('<4I', buf[4:20])
- ii = '<II'
- elif magic == self.BE_MAGIC:
- version, msgcount, masteridx, transidx = unpack('>4I', buf[4:20])
- ii = '>II'
- else:
- raise IOError(0, 'Bad magic number', filename)
- # Now put all messages from the .mo file buffer into the catalog
- # dictionary.
- for i in xrange(0, msgcount):
- mlen, moff = unpack(ii, buf[masteridx:masteridx+8])
- mend = moff + mlen
- tlen, toff = unpack(ii, buf[transidx:transidx+8])
- tend = toff + tlen
- if mend < buflen and tend < buflen:
- msg = buf[moff:mend]
- tmsg = buf[toff:tend]
- else:
- raise IOError(0, 'File is corrupt', filename)
- # See if we're looking at GNU .mo conventions for metadata
- if mlen == 0:
- # Catalog description
- lastk = k = None
- for item in tmsg.splitlines():
- item = item.strip()
- if not item:
- continue
- if ':' in item:
- k, v = item.split(':', 1)
- k = k.strip().lower()
- v = v.strip()
- self._info[k] = v
- lastk = k
- elif lastk:
- self._info[lastk] += '\n' + item
- if k == 'content-type':
- self._charset = v.split('charset=')[1]
- elif k == 'plural-forms':
- v = v.split(';')
- plural = v[1].split('plural=')[1]
- self.plural = c2py(plural)
- # Note: we unconditionally convert both msgids and msgstrs to
- # Unicode using the character encoding specified in the charset
- # parameter of the Content-Type header. The gettext documentation
- # strongly encourages msgids to be us-ascii, but some appliations
- # require alternative encodings (e.g. Zope's ZCML and ZPT). For
- # traditional gettext applications, the msgid conversion will
- # cause no problems since us-ascii should always be a subset of
- # the charset encoding. We may want to fall back to 8-bit msgids
- # if the Unicode conversion fails.
- if '\x00' in msg:
- # Plural forms
- msgid1, msgid2 = msg.split('\x00')
- tmsg = tmsg.split('\x00')
- if self._charset:
- msgid1 = unicode(msgid1, self._charset)
- tmsg = [unicode(x, self._charset) for x in tmsg]
- for i in range(len(tmsg)):
- catalog[(msgid1, i)] = tmsg[i]
- else:
- if self._charset:
- msg = unicode(msg, self._charset)
- tmsg = unicode(tmsg, self._charset)
- catalog[msg] = tmsg
- # advance to next entry in the seek tables
- masteridx += 8
- transidx += 8
-
- def gettext(self, message):
- missing = object()
- tmsg = self._catalog.get(message, missing)
- if tmsg is missing:
- if self._fallback:
- return self._fallback.gettext(message)
- return message
- # Encode the Unicode tmsg back to an 8-bit string, if possible
- if self._output_charset:
- return tmsg.encode(self._output_charset)
- elif self._charset:
- return tmsg.encode(self._charset)
- return tmsg
-
- def pgettext(self, context, message):
- ctxt_msg_id = self.CONTEXT_ENCODING % (context, message)
- missing = object()
- tmsg = self._catalog.get(ctxt_msg_id, missing)
- if tmsg is missing:
- if self._fallback:
- return self._fallback.pgettext(context, message)
- return message
- # Encode the Unicode tmsg back to an 8-bit string, if possible
- if self._output_charset:
- return tmsg.encode(self._output_charset)
- elif self._charset:
- return tmsg.encode(self._charset)
- return tmsg
-
- def lgettext(self, message):
- missing = object()
- tmsg = self._catalog.get(message, missing)
- if tmsg is missing:
- if self._fallback:
- return self._fallback.lgettext(message)
- return message
- if self._output_charset:
- return tmsg.encode(self._output_charset)
- return tmsg.encode(locale.getpreferredencoding())
-
- def lpgettext(self, context, message):
- ctxt_msg_id = self.CONTEXT_ENCODING % (context, message)
- missing = object()
- tmsg = self._catalog.get(ctxt_msg_id, missing)
- if tmsg is missing:
- if self._fallback:
- return self._fallback.lpgettext(context, message)
- return message
- if self._output_charset:
- return tmsg.encode(self._output_charset)
- return tmsg.encode(locale.getpreferredencoding())
-
- def ngettext(self, msgid1, msgid2, n):
- try:
- tmsg = self._catalog[(msgid1, self.plural(n))]
- if self._output_charset:
- return tmsg.encode(self._output_charset)
- elif self._charset:
- return tmsg.encode(self._charset)
- return tmsg
- except KeyError:
- if self._fallback:
- return self._fallback.ngettext(msgid1, msgid2, n)
- if n == 1:
- return msgid1
- else:
- return msgid2
-
- def npgettext(self, context, msgid1, msgid2, n):
- ctxt_msg_id = self.CONTEXT_ENCODING % (context, msgid1)
- try:
- tmsg = self._catalog[(ctxt_msg_id, self.plural(n))]
- if self._output_charset:
- return tmsg.encode(self._output_charset)
- elif self._charset:
- return tmsg.encode(self._charset)
- return tmsg
- except KeyError:
- if self._fallback:
- return self._fallback.npgettext(context, msgid1, msgid2, n)
- if n == 1:
- return msgid1
- else:
- return msgid2
-
- def lngettext(self, msgid1, msgid2, n):
- try:
- tmsg = self._catalog[(msgid1, self.plural(n))]
- if self._output_charset:
- return tmsg.encode(self._output_charset)
- return tmsg.encode(locale.getpreferredencoding())
- except KeyError:
- if self._fallback:
- return self._fallback.lngettext(msgid1, msgid2, n)
- if n == 1:
- return msgid1
- else:
- return msgid2
-
- def lnpgettext(self, context, msgid1, msgid2, n):
- ctxt_msg_id = self.CONTEXT_ENCODING % (context, msgid1)
- try:
- tmsg = self._catalog[(ctxt_msg_id, self.plural(n))]
- if self._output_charset:
- return tmsg.encode(self._output_charset)
- return tmsg.encode(locale.getpreferredencoding())
- except KeyError:
- if self._fallback:
- return self._fallback.lnpgettext(context, msgid1, msgid2, n)
- if n == 1:
- return msgid1
- else:
- return msgid2
-
- def ugettext(self, message):
- missing = object()
- tmsg = self._catalog.get(message, missing)
- if tmsg is missing:
- if self._fallback:
- return self._fallback.ugettext(message)
- return unicode(message)
- return tmsg
-
- def upgettext(self, context, message):
- ctxt_message_id = self.CONTEXT_ENCODING % (context, message)
- missing = object()
- tmsg = self._catalog.get(ctxt_message_id, missing)
- if tmsg is missing:
- # XXX logilab patch for compat w/ catalog generated by cw < 3.5
- return self.ugettext(message)
- if self._fallback:
- return self._fallback.upgettext(context, message)
- return unicode(message)
- return tmsg
-
- def ungettext(self, msgid1, msgid2, n):
- try:
- tmsg = self._catalog[(msgid1, self.plural(n))]
- except KeyError:
- if self._fallback:
- return self._fallback.ungettext(msgid1, msgid2, n)
- if n == 1:
- tmsg = unicode(msgid1)
- else:
- tmsg = unicode(msgid2)
- return tmsg
-
- def unpgettext(self, context, msgid1, msgid2, n):
- ctxt_message_id = self.CONTEXT_ENCODING % (context, msgid1)
- try:
- tmsg = self._catalog[(ctxt_message_id, self.plural(n))]
- except KeyError:
- if self._fallback:
- return self._fallback.unpgettext(context, msgid1, msgid2, n)
- if n == 1:
- tmsg = unicode(msgid1)
- else:
- tmsg = unicode(msgid2)
- return tmsg
-
-
-# Locate a .mo file using the gettext strategy
-def find(domain, localedir=None, languages=None, all=0):
- # Get some reasonable defaults for arguments that were not supplied
- if localedir is None:
- localedir = _default_localedir
- if languages is None:
- languages = []
- for envar in ('LANGUAGE', 'LC_ALL', 'LC_MESSAGES', 'LANG'):
- val = os.environ.get(envar)
- if val:
- languages = val.split(':')
- break
- if 'C' not in languages:
- languages.append('C')
- # now normalize and expand the languages
- nelangs = []
- for lang in languages:
- for nelang in _expand_lang(lang):
- if nelang not in nelangs:
- nelangs.append(nelang)
- # select a language
- if all:
- result = []
- else:
- result = None
- for lang in nelangs:
- if lang == 'C':
- break
- mofile = os.path.join(localedir, lang, 'LC_MESSAGES', '%s.mo' % domain)
- if os.path.exists(mofile):
- if all:
- result.append(mofile)
- else:
- return mofile
- return result
-
-
-
-# a mapping between absolute .mo file path and Translation object
-_translations = {}
-
-def translation(domain, localedir=None, languages=None,
- class_=None, fallback=False, codeset=None):
- if class_ is None:
- class_ = GNUTranslations
- mofiles = find(domain, localedir, languages, all=1)
- if not mofiles:
- if fallback:
- return NullTranslations()
- raise IOError(ENOENT, 'No translation file found for domain', domain)
- # TBD: do we need to worry about the file pointer getting collected?
- # Avoid opening, reading, and parsing the .mo file after it's been done
- # once.
- result = None
- for mofile in mofiles:
- key = os.path.abspath(mofile)
- t = _translations.get(key)
- if t is None:
- t = _translations.setdefault(key, class_(open(mofile, 'rb')))
- # Copy the translation object to allow setting fallbacks and
- # output charset. All other instance data is shared with the
- # cached object.
- t = copy.copy(t)
- if codeset:
- t.set_output_charset(codeset)
- if result is None:
- result = t
- else:
- result.add_fallback(t)
- return result
-
-
-def install(domain, localedir=None, unicode=False, codeset=None, names=None):
- t = translation(domain, localedir, fallback=True, codeset=codeset)
- t.install(unicode, names)
-
-
-
-# a mapping b/w domains and locale directories
-_localedirs = {}
-# a mapping b/w domains and codesets
-_localecodesets = {}
-# current global domain, `messages' used for compatibility w/ GNU gettext
-_current_domain = 'messages'
-
-
-def textdomain(domain=None):
- global _current_domain
- if domain is not None:
- _current_domain = domain
- return _current_domain
-
-
-def bindtextdomain(domain, localedir=None):
- global _localedirs
- if localedir is not None:
- _localedirs[domain] = localedir
- return _localedirs.get(domain, _default_localedir)
-
-
-def bind_textdomain_codeset(domain, codeset=None):
- global _localecodesets
- if codeset is not None:
- _localecodesets[domain] = codeset
- return _localecodesets.get(domain)
-
-
-def dgettext(domain, message):
- try:
- t = translation(domain, _localedirs.get(domain, None),
- codeset=_localecodesets.get(domain))
- except IOError:
- return message
- return t.gettext(message)
-
-def dpgettext(domain, context, message):
- try:
- t = translation(domain, _localedirs.get(domain, None),
- codeset=_localecodesets.get(domain))
- except IOError:
- return message
- return t.pgettext(context, message)
-
-def ldgettext(domain, message):
- try:
- t = translation(domain, _localedirs.get(domain, None),
- codeset=_localecodesets.get(domain))
- except IOError:
- return message
- return t.lgettext(message)
-
-def ldpgettext(domain, context, message):
- try:
- t = translation(domain, _localedirs.get(domain, None),
- codeset=_localecodesets.get(domain))
- except IOError:
- return message
- return t.lpgettext(context, message)
-
-def dngettext(domain, msgid1, msgid2, n):
- try:
- t = translation(domain, _localedirs.get(domain, None),
- codeset=_localecodesets.get(domain))
- except IOError:
- if n == 1:
- return msgid1
- else:
- return msgid2
- return t.ngettext(msgid1, msgid2, n)
-
-def dnpgettext(domain, context, msgid1, msgid2, n):
- try:
- t = translation(domain, _localedirs.get(domain, None),
- codeset=_localecodesets.get(domain))
- except IOError:
- if n == 1:
- return msgid1
- else:
- return msgid2
- return t.npgettext(context, msgid1, msgid2, n)
-
-def ldngettext(domain, msgid1, msgid2, n):
- try:
- t = translation(domain, _localedirs.get(domain, None),
- codeset=_localecodesets.get(domain))
- except IOError:
- if n == 1:
- return msgid1
- else:
- return msgid2
- return t.lngettext(msgid1, msgid2, n)
-
-def ldnpgettext(domain, context, msgid1, msgid2, n):
- try:
- t = translation(domain, _localedirs.get(domain, None),
- codeset=_localecodesets.get(domain))
- except IOError:
- if n == 1:
- return msgid1
- else:
- return msgid2
- return t.lnpgettext(context, msgid1, msgid2, n)
-
-def gettext(message):
- return dgettext(_current_domain, message)
-
-def pgettext(context, message):
- return dpgettext(_current_domain, context, message)
-
-def lgettext(message):
- return ldgettext(_current_domain, message)
-
-def lpgettext(context, message):
- return ldpgettext(_current_domain, context, message)
-
-def ngettext(msgid1, msgid2, n):
- return dngettext(_current_domain, msgid1, msgid2, n)
-
-def npgettext(context, msgid1, msgid2, n):
- return dnpgettext(_current_domain, context, msgid1, msgid2, n)
-
-def lngettext(msgid1, msgid2, n):
- return ldngettext(_current_domain, msgid1, msgid2, n)
-
-def lnpgettext(context, msgid1, msgid2, n):
- return ldnpgettext(_current_domain, context, msgid1, msgid2, n)
-
-# dcgettext() has been deemed unnecessary and is not implemented.
-
-# James Henstridge's Catalog constructor from GNOME gettext. Documented usage
-# was:
-#
-# import gettext
-# cat = gettext.Catalog(PACKAGE, localedir=LOCALEDIR)
-# _ = cat.gettext
-# print _('Hello World')
-
-# The resulting catalog object currently don't support access through a
-# dictionary API, which was supported (but apparently unused) in GNOME
-# gettext.
-
-Catalog = translation
--- a/hooks/__init__.py Thu Mar 06 15:55:33 2014 +0100
+++ b/hooks/__init__.py Thu Nov 12 10:52:28 2015 +0100
@@ -52,7 +52,7 @@
def update_feeds(repo):
# take a list to avoid iterating on a dictionary whose size may
# change
- for uri, source in list(repo.sources_by_uri.iteritems()):
+ for uri, source in list(repo.sources_by_uri.items()):
if (uri == 'system'
or not repo.config.source_enabled(source)
or not source.config['synchronize']):
@@ -72,7 +72,7 @@
def __call__(self):
def expire_dataimports(repo=self.repo):
- for uri, source in repo.sources_by_uri.iteritems():
+ for uri, source in repo.sources_by_uri.items():
if (uri == 'system'
or not repo.config.source_enabled(source)):
continue
--- a/hooks/integrity.py Thu Mar 06 15:55:33 2014 +0100
+++ b/hooks/integrity.py Thu Nov 12 10:52:28 2015 +0100
@@ -20,10 +20,12 @@
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from threading import Lock
+from six import text_type
+
from cubicweb import validation_error, neg_role
from cubicweb.schema import (META_RTYPES, WORKFLOW_RTYPES,
RQLConstraint, RQLUniqueConstraint)
@@ -247,7 +249,7 @@
def __call__(self):
entity = self.entity
eschema = entity.e_schema
- for attr, val in entity.cw_edited.iteritems():
+ for attr, val in entity.cw_edited.items():
if eschema.subjrels[attr].final and eschema.has_unique_values(attr):
if val is None:
continue
@@ -286,13 +288,13 @@
entity = self.entity
metaattrs = entity.e_schema.meta_attributes()
edited = entity.cw_edited
- for metaattr, (metadata, attr) in metaattrs.iteritems():
+ for metaattr, (metadata, attr) in metaattrs.items():
if metadata == 'format' and attr in edited:
try:
value = edited[attr]
except KeyError:
continue # no text to tidy
- if isinstance(value, unicode): # filter out None and Binary
+ if isinstance(value, text_type): # filter out None and Binary
if getattr(entity, str(metaattr)) == 'text/html':
edited[attr] = soup2xhtml(value, self._cw.encoding)
--- a/hooks/synccomputed.py Thu Mar 06 15:55:33 2014 +0100
+++ b/hooks/synccomputed.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,7 +18,7 @@
"""Hooks for synchronizing computed attributes"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from collections import defaultdict
@@ -40,7 +40,7 @@
self._container[computed_attribute] = set((eid,))
def precommit_event(self):
- for computed_attribute_rdef, eids in self.get_data().iteritems():
+ for computed_attribute_rdef, eids in self.get_data().items():
attr = computed_attribute_rdef.rtype
formula = computed_attribute_rdef.formula
select = self.cnx.repo.vreg.rqlhelper.parse(formula).children[0]
@@ -110,7 +110,7 @@
def __call__(self):
edited_attributes = frozenset(self.entity.cw_edited)
- for rdef, used_attributes in self.attributes_computed_attributes.iteritems():
+ for rdef, used_attributes in self.attributes_computed_attributes.items():
if edited_attributes.intersection(used_attributes):
# XXX optimize if the modified attributes belong to the same
# entity as the computed attribute
@@ -178,7 +178,7 @@
self.computed_attribute_by_relation[depend_on_rdef].append(rdef)
def generate_entity_creation_hooks(self):
- for etype, computed_attributes in self.computed_attribute_by_etype.iteritems():
+ for etype, computed_attributes in self.computed_attribute_by_etype.items():
regid = 'computed_attribute.%s_created' % etype
selector = hook.is_instance(etype)
yield type('%sCreatedHook' % etype,
@@ -188,7 +188,7 @@
'computed_attributes': computed_attributes})
def generate_relation_change_hooks(self):
- for rdef, computed_attributes in self.computed_attribute_by_relation.iteritems():
+ for rdef, computed_attributes in self.computed_attribute_by_relation.items():
regid = 'computed_attribute.%s_modified' % rdef.rtype
selector = hook.match_rtype(rdef.rtype.type,
frometypes=(rdef.subject.type,),
@@ -206,7 +206,7 @@
'optimized_computed_attributes': optimized_computed_attributes})
def generate_entity_update_hooks(self):
- for etype, attributes_computed_attributes in self.computed_attribute_by_etype_attrs.iteritems():
+ for etype, attributes_computed_attributes in self.computed_attribute_by_etype_attrs.items():
regid = 'computed_attribute.%s_updated' % etype
selector = hook.is_instance(etype)
yield type('%sModifiedHook' % etype,
--- a/hooks/syncschema.py Thu Mar 06 15:55:33 2014 +0100
+++ b/hooks/syncschema.py Thu Nov 12 10:52:28 2015 +0100
@@ -24,7 +24,7 @@
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from copy import copy
from hashlib import md5
@@ -37,7 +37,8 @@
from cubicweb import validation_error
from cubicweb.predicates import is_instance
from cubicweb.schema import (SCHEMA_TYPES, META_RTYPES, VIRTUAL_RTYPES,
- CONSTRAINTS, ETYPE_NAME_MAP, display_name)
+ CONSTRAINTS, UNIQUE_CONSTRAINTS, ETYPE_NAME_MAP,
+ display_name)
from cubicweb.server import hook, schemaserial as ss, schema2sql as y2sql
from cubicweb.server.sqlutils import SQL_PREFIX
from cubicweb.hooks.synccomputed import RecomputeAttributeOperation
@@ -208,7 +209,7 @@
repo.set_schema(repo.schema)
# CWUser class might have changed, update current session users
cwuser_cls = self.cnx.vreg['etypes'].etype_class('CWUser')
- for session in repo._sessions.itervalues():
+ for session in repo._sessions.values():
session.user.__class__ = cwuser_cls
except Exception:
self.critical('error while setting schema', exc_info=True)
@@ -717,8 +718,8 @@
syssource.update_rdef_unique(cnx, rdef)
self.unique_changed = True
if cstrtype in ('BoundaryConstraint', 'IntervalBoundConstraint', 'StaticVocabularyConstraint'):
- cstrname = 'cstr' + md5(rdef.subject.type + rdef.rtype.type + cstrtype +
- (self.oldcstr.serialize() or '')).hexdigest()
+ cstrname = 'cstr' + md5((rdef.subject.type + rdef.rtype.type + cstrtype +
+ (self.oldcstr.serialize() or '')).encode('utf-8')).hexdigest()
cnx.system_sql('ALTER TABLE %s%s DROP CONSTRAINT %s' % (SQL_PREFIX, rdef.subject.type, cstrname))
def revertprecommit_event(self):
@@ -749,7 +750,10 @@
return
rdef = self.rdef = cnx.vreg.schema.schema_by_eid(rdefentity.eid)
cstrtype = self.entity.type
- oldcstr = self.oldcstr = rdef.constraint_by_type(cstrtype)
+ if cstrtype in UNIQUE_CONSTRAINTS:
+ oldcstr = self.oldcstr = rdef.constraint_by_type(cstrtype)
+ else:
+ oldcstr = None
newcstr = self.newcstr = CONSTRAINTS[cstrtype].deserialize(self.entity.value)
# in-place modification of in-memory schema first
_set_modifiable_constraints(rdef)
@@ -769,8 +773,8 @@
self.unique_changed = True
if cstrtype in ('BoundaryConstraint', 'IntervalBoundConstraint', 'StaticVocabularyConstraint'):
if oldcstr is not None:
- oldcstrname = 'cstr' + md5(rdef.subject.type + rdef.rtype.type + cstrtype +
- (self.oldcstr.serialize() or '')).hexdigest()
+ oldcstrname = 'cstr' + md5((rdef.subject.type + rdef.rtype.type + cstrtype +
+ (self.oldcstr.serialize() or '')).encode('ascii')).hexdigest()
cnx.system_sql('ALTER TABLE %s%s DROP CONSTRAINT %s' %
(SQL_PREFIX, rdef.subject.type, oldcstrname))
cstrname, check = y2sql.check_constraint(rdef.subject, rdef.object, rdef.rtype.type,
@@ -905,11 +909,6 @@
# duh, schema not found, log error and skip operation
self.warning('no schema for %s', self.eid)
return
- if isinstance(erschema, RelationSchema): # XXX 3.6 migration
- return
- if isinstance(erschema, RelationDefinitionSchema) and \
- self.action in ('delete', 'add'): # XXX 3.6.1 migration
- return
perms = list(erschema.action_permissions(self.action))
if self.group_eid is not None:
perm = self.cnx.entity_from_eid(self.group_eid).name
--- a/hooks/syncsession.py Thu Mar 06 15:55:33 2014 +0100
+++ b/hooks/syncsession.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,7 +18,7 @@
"""Core hooks: synchronize living session on persistent data changes"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from cubicweb import UnknownProperty, BadConnectionId, validation_error
from cubicweb.predicates import is_instance
@@ -26,7 +26,7 @@
def get_user_sessions(repo, ueid):
- for session in repo._sessions.itervalues():
+ for session in repo._sessions.values():
if ueid == session.user.eid:
yield session
@@ -114,7 +114,7 @@
def __call__(self):
"""modify user permission, need to update users"""
for session in get_user_sessions(self._cw.repo, self.entity.eid):
- _DelUserOp(self._cw, session.id)
+ _DelUserOp(self._cw, session.sessionid)
# CWProperty hooks #############################################################
--- a/hooks/syncsources.py Thu Mar 06 15:55:33 2014 +0100
+++ b/hooks/syncsources.py Thu Nov 12 10:52:28 2015 +0100
@@ -17,7 +17,7 @@
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""hooks for repository sources synchronization"""
-_ = unicode
+from cubicweb import _
from socket import gethostname
--- a/hooks/test/data/schema.py Thu Mar 06 15:55:33 2014 +0100
+++ b/hooks/test/data/schema.py Thu Nov 12 10:52:28 2015 +0100
@@ -22,7 +22,7 @@
from cubicweb.schema import ERQLExpression
-_ = unicode
+from cubicweb import _
class friend(RelationDefinition):
subject = ('CWUser', 'CWGroup')
--- a/hooks/test/unittest_hooks.py Thu Mar 06 15:55:33 2014 +0100
+++ b/hooks/test/unittest_hooks.py Thu Nov 12 10:52:28 2015 +0100
@@ -24,9 +24,12 @@
from datetime import datetime
+from six import text_type
+
from cubicweb import ValidationError, AuthenticationError, BadConnectionId
from cubicweb.devtools.testlib import CubicWebTC
+
class CoreHooksTC(CubicWebTC):
def test_inlined(self):
@@ -207,7 +210,7 @@
with self.assertRaises(ValidationError) as cm:
cnx.execute('INSERT CWUser X: X login "admin"')
ex = cm.exception
- ex.translate(unicode)
+ ex.translate(text_type)
self.assertIsInstance(ex.entity, int)
self.assertEqual(ex.errors, {'login-subject': 'the value "admin" is already used, use another one'})
--- a/hooks/test/unittest_synccomputed.py Thu Mar 06 15:55:33 2014 +0100
+++ b/hooks/test/unittest_synccomputed.py Thu Nov 12 10:52:28 2015 +0100
@@ -62,7 +62,7 @@
def test_computed_attribute_by_relation(self):
comp_by_rdef = self.dependencies.computed_attribute_by_relation
self.assertEqual(len(comp_by_rdef), 1)
- key, values = iter(comp_by_rdef.iteritems()).next()
+ key, values = next(iter(comp_by_rdef.items()))
self.assertEqual(key.rtype, 'works_for')
self.assertEqual(len(values), 1)
self.assertEqual(values[0].rtype, 'total_salary')
@@ -73,7 +73,7 @@
values = comp_by_attr['Person']
self.assertEqual(len(values), 2)
values = set((rdef.formula, tuple(v))
- for rdef, v in values.iteritems())
+ for rdef, v in values.items())
self.assertEquals(values,
set((('Any 2014 - D WHERE X birth_year D', tuple(('birth_year',))),
('Any SUM(SA) GROUPBY X WHERE P works_for X, P salary SA', tuple(('salary',)))))
--- a/hooks/test/unittest_syncschema.py Thu Mar 06 15:55:33 2014 +0100
+++ b/hooks/test/unittest_syncschema.py Thu Nov 12 10:52:28 2015 +0100
@@ -19,6 +19,7 @@
from logilab.common.testlib import unittest_main
+from yams.constraints import BoundaryConstraint
from cubicweb import ValidationError, Binary
from cubicweb.schema import META_RTYPES
from cubicweb.devtools import startpgcluster, stoppgcluster, PostgresApptestConfiguration
@@ -382,5 +383,23 @@
self.assertEqual(cstr.values, (u'normal', u'auto', u'new'))
cnx.execute('INSERT Transition T: T name "hop", T type "new"')
+ def test_add_constraint(self):
+ with self.admin_access.repo_cnx() as cnx:
+ rdef = self.schema['EmailPart'].rdef('ordernum')
+ cstr = BoundaryConstraint('>=', 0)
+ cnx.execute('INSERT CWConstraint X: X value %(v)s, X cstrtype CT, '
+ 'EDEF constrained_by X WHERE CT name %(ct)s, EDEF eid %(x)s',
+ {'ct': cstr.__class__.__name__, 'v': cstr.serialize(), 'x': rdef.eid})
+ cnx.commit()
+ cstr2 = rdef.constraint_by_type('BoundaryConstraint')
+ self.assertEqual(cstr, cstr2)
+ cstr3 = BoundaryConstraint('<=', 1000)
+ cnx.execute('INSERT CWConstraint X: X value %(v)s, X cstrtype CT, '
+ 'EDEF constrained_by X WHERE CT name %(ct)s, EDEF eid %(x)s',
+ {'ct': cstr3.__class__.__name__, 'v': cstr3.serialize(), 'x': rdef.eid})
+ cnx.commit()
+ self.assertCountEqual(rdef.constraints, [cstr, cstr3])
+
+
if __name__ == '__main__':
unittest_main()
--- a/hooks/test/unittest_syncsession.py Thu Mar 06 15:55:33 2014 +0100
+++ b/hooks/test/unittest_syncsession.py Thu Nov 12 10:52:28 2015 +0100
@@ -22,6 +22,8 @@
syncschema.py hooks are mostly tested in server/test/unittest_migrations.py
"""
+from six import text_type
+
from cubicweb import ValidationError
from cubicweb.devtools.testlib import CubicWebTC
@@ -32,13 +34,13 @@
with self.assertRaises(ValidationError) as cm:
req.execute('INSERT CWProperty X: X pkey "bla.bla", '
'X value "hop", X for_user U')
- cm.exception.translate(unicode)
+ cm.exception.translate(text_type)
self.assertEqual(cm.exception.errors,
{'pkey-subject': 'unknown property key bla.bla'})
with self.assertRaises(ValidationError) as cm:
req.execute('INSERT CWProperty X: X pkey "bla.bla", X value "hop"')
- cm.exception.translate(unicode)
+ cm.exception.translate(text_type)
self.assertEqual(cm.exception.errors,
{'pkey-subject': 'unknown property key bla.bla'})
--- a/hooks/workflow.py Thu Mar 06 15:55:33 2014 +0100
+++ b/hooks/workflow.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,7 +18,7 @@
"""Core hooks: workflow related hooks"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from datetime import datetime
--- a/i18n.py Thu Mar 06 15:55:33 2014 +0100
+++ b/i18n.py Thu Nov 12 10:52:28 2015 +0100
@@ -16,6 +16,7 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""Some i18n/gettext utilities."""
+from __future__ import print_function
__docformat__ = "restructuredtext en"
@@ -24,6 +25,8 @@
from os.path import join, basename, splitext, exists
from glob import glob
+from six import PY2
+
from cubicweb.toolsutils import create_dir
def extract_from_tal(files, output_file):
@@ -39,10 +42,10 @@
def add_msg(w, msgid, msgctx=None):
"""write an empty pot msgid definition"""
- if isinstance(msgid, unicode):
+ if PY2 and isinstance(msgid, unicode):
msgid = msgid.encode('utf-8')
if msgctx:
- if isinstance(msgctx, unicode):
+ if PY2 and isinstance(msgctx, unicode):
msgctx = msgctx.encode('utf-8')
w('msgctxt "%s"\n' % msgctx)
msgid = msgid.replace('"', r'\"').splitlines()
@@ -80,7 +83,7 @@
"""
from subprocess import CalledProcessError
from logilab.common.fileutils import ensure_fs_mode
- print '-> compiling message catalogs to %s' % destdir
+ print('-> compiling message catalogs to %s' % destdir)
errors = []
for lang in langs:
langdir = join(destdir, lang, 'LC_MESSAGES')
--- a/mail.py Thu Mar 06 15:55:33 2014 +0100
+++ b/mail.py Thu Nov 12 10:52:28 2015 +0100
@@ -28,16 +28,27 @@
from email.utils import formatdate
from socket import gethostname
+from six import PY2, PY3, text_type
+
+
def header(ustring):
+ if PY3:
+ return Header(ustring, 'utf-8')
return Header(ustring.encode('UTF-8'), 'UTF-8')
def addrheader(uaddr, uname=None):
# even if an email address should be ascii, encode it using utf8 since
# automatic tests may generate non ascii email address
- addr = uaddr.encode('UTF-8')
+ if PY2:
+ addr = uaddr.encode('UTF-8')
+ else:
+ addr = uaddr
if uname:
- return '%s <%s>' % (header(uname).encode(), addr)
- return addr
+ val = '%s <%s>' % (header(uname).encode(), addr)
+ else:
+ val = addr
+ assert isinstance(val, str) # bytes in py2, ascii-encoded unicode in py3
+ return val
def construct_message_id(appid, eid, withtimestamp=True):
@@ -46,7 +57,7 @@
else:
addrpart = 'eid=%s' % eid
# we don't want any equal sign nor trailing newlines
- leftpart = b64encode(addrpart, '.-').rstrip().rstrip('=')
+ leftpart = b64encode(addrpart.encode('ascii'), b'.-').decode('ascii').rstrip().rstrip('=')
return '<%s@%s.%s>' % (leftpart, appid, gethostname())
@@ -75,7 +86,7 @@
to_addrs and cc_addrs are expected to be a list of email address without
name
"""
- assert type(content) is unicode, repr(content)
+ assert isinstance(content, text_type), repr(content)
msg = MIMEText(content.encode('UTF-8'), 'plain', 'UTF-8')
# safety: keep only the first newline
try:
@@ -86,13 +97,13 @@
if uinfo.get('email'):
email = uinfo['email']
elif config and config['sender-addr']:
- email = unicode(config['sender-addr'])
+ email = text_type(config['sender-addr'])
else:
email = u''
if uinfo.get('name'):
name = uinfo['name']
elif config and config['sender-name']:
- name = unicode(config['sender-name'])
+ name = text_type(config['sender-name'])
else:
name = u''
msg['From'] = addrheader(email, name)
--- a/md5crypt.py Thu Mar 06 15:55:33 2014 +0100
+++ b/md5crypt.py Thu Nov 12 10:52:28 2015 +0100
@@ -38,31 +38,37 @@
this stuff is worth it, you can buy me a beer in return. Poul-Henning Kamp
"""
-MAGIC = '$1$' # Magic string
-ITOA64 = "./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
+MAGIC = b'$1$' # Magic string
+ITOA64 = b"./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
from hashlib import md5 # pylint: disable=E0611
+from six import text_type, indexbytes
+from six.moves import range
+
+
def to64 (v, n):
- ret = ''
+ ret = bytearray()
while (n - 1 >= 0):
n = n - 1
- ret = ret + ITOA64[v & 0x3f]
+ ret.append(ITOA64[v & 0x3f])
v = v >> 6
return ret
def crypt(pw, salt):
- if isinstance(pw, unicode):
+ if isinstance(pw, text_type):
pw = pw.encode('utf-8')
+ if isinstance(salt, text_type):
+ salt = salt.encode('ascii')
# Take care of the magic string if present
if salt.startswith(MAGIC):
salt = salt[len(MAGIC):]
# salt can have up to 8 characters:
- salt = salt.split('$', 1)[0]
+ salt = salt.split(b'$', 1)[0]
salt = salt[:8]
ctx = pw + MAGIC + salt
final = md5(pw + salt + pw).digest()
- for pl in xrange(len(pw), 0, -16):
+ for pl in range(len(pw), 0, -16):
if pl > 16:
ctx = ctx + final[:16]
else:
@@ -71,7 +77,7 @@
i = len(pw)
while i:
if i & 1:
- ctx = ctx + chr(0) #if ($i & 1) { $ctx->add(pack("C", 0)); }
+ ctx = ctx + b'\0' #if ($i & 1) { $ctx->add(pack("C", 0)); }
else:
ctx = ctx + pw[0]
i = i >> 1
@@ -79,8 +85,8 @@
# The following is supposed to make
# things run slower.
# my question: WTF???
- for i in xrange(1000):
- ctx1 = ''
+ for i in range(1000):
+ ctx1 = b''
if i & 1:
ctx1 = ctx1 + pw
else:
@@ -95,21 +101,21 @@
ctx1 = ctx1 + pw
final = md5(ctx1).digest()
# Final xform
- passwd = ''
- passwd = passwd + to64((int(ord(final[0])) << 16)
- |(int(ord(final[6])) << 8)
- |(int(ord(final[12]))),4)
- passwd = passwd + to64((int(ord(final[1])) << 16)
- |(int(ord(final[7])) << 8)
- |(int(ord(final[13]))), 4)
- passwd = passwd + to64((int(ord(final[2])) << 16)
- |(int(ord(final[8])) << 8)
- |(int(ord(final[14]))), 4)
- passwd = passwd + to64((int(ord(final[3])) << 16)
- |(int(ord(final[9])) << 8)
- |(int(ord(final[15]))), 4)
- passwd = passwd + to64((int(ord(final[4])) << 16)
- |(int(ord(final[10])) << 8)
- |(int(ord(final[5]))), 4)
- passwd = passwd + to64((int(ord(final[11]))), 2)
+ passwd = b''
+ passwd += to64((indexbytes(final, 0) << 16)
+ |(indexbytes(final, 6) << 8)
+ |(indexbytes(final, 12)),4)
+ passwd += to64((indexbytes(final, 1) << 16)
+ |(indexbytes(final, 7) << 8)
+ |(indexbytes(final, 13)), 4)
+ passwd += to64((indexbytes(final, 2) << 16)
+ |(indexbytes(final, 8) << 8)
+ |(indexbytes(final, 14)), 4)
+ passwd += to64((indexbytes(final, 3) << 16)
+ |(indexbytes(final, 9) << 8)
+ |(indexbytes(final, 15)), 4)
+ passwd += to64((indexbytes(final, 4) << 16)
+ |(indexbytes(final, 10) << 8)
+ |(indexbytes(final, 5)), 4)
+ passwd += to64((indexbytes(final, 11)), 2)
return passwd
--- a/migration.py Thu Mar 06 15:55:33 2014 +0100
+++ b/migration.py Thu Nov 12 10:52:28 2015 +0100
@@ -16,6 +16,7 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""utilities for instances migration"""
+from __future__ import print_function
__docformat__ = "restructuredtext en"
@@ -25,6 +26,9 @@
import tempfile
from os.path import exists, join, basename, splitext
from itertools import chain
+from warnings import warn
+
+from six import string_types
from logilab.common import IGNORED_EXTENSIONS
from logilab.common.decorators import cached
@@ -49,7 +53,7 @@
assert fromversion <= toversion, (fromversion, toversion)
if not exists(directory):
if not quiet:
- print directory, "doesn't exists, no migration path"
+ print(directory, "doesn't exists, no migration path")
return []
if fromversion == toversion:
return []
@@ -93,9 +97,9 @@
stream = open(scriptpath)
scriptcontent = stream.read()
stream.close()
- print
- print scriptcontent
- print
+ print()
+ print(scriptcontent)
+ print()
else:
return True
@@ -139,9 +143,6 @@
raise
raise AttributeError(name)
- def repo_connect(self):
- return self.config.repository()
-
def migrate(self, vcconf, toupgrade, options):
"""upgrade the given set of cubes
@@ -243,7 +244,7 @@
# avoid '_' to be added to builtins by sys.display_hook
def do_not_add___to_builtins(obj):
if obj is not None:
- print repr(obj)
+ print(repr(obj))
sys.displayhook = do_not_add___to_builtins
local_ctx = self._create_context()
try:
@@ -349,7 +350,16 @@
else:
pyname = splitext(basename(migrscript))[0]
scriptlocals['__name__'] = pyname
- execfile(migrscript, scriptlocals)
+ with open(migrscript, 'rb') as fobj:
+ fcontent = fobj.read()
+ try:
+ code = compile(fcontent, migrscript, 'exec')
+ except SyntaxError:
+ # try without print_function
+ code = compile(fcontent, migrscript, 'exec', 0, True)
+ warn('[3.22] script %r should be updated to work with print_function'
+ % migrscript, DeprecationWarning)
+ exec(code, scriptlocals)
if funcname is not None:
try:
func = scriptlocals[funcname]
@@ -399,7 +409,7 @@
"""modify the list of used cubes in the in-memory config
returns newly inserted cubes, including dependencies
"""
- if isinstance(cubes, basestring):
+ if isinstance(cubes, string_types):
cubes = (cubes,)
origcubes = self.config.cubes()
newcubes = [p for p in self.config.expand_cubes(cubes)
@@ -454,6 +464,10 @@
def version_strictly_lower(a, b):
+ if a is None:
+ return True
+ if b is None:
+ return False
if a:
a = Version(a)
if b:
@@ -491,8 +505,8 @@
self.dependencies[cube] = dict(self.config.cube_dependencies(cube))
self.dependencies[cube]['cubicweb'] = self.config.cube_depends_cubicweb_version(cube)
# compute reverse dependencies
- for cube, dependencies in self.dependencies.iteritems():
- for name, constraint in dependencies.iteritems():
+ for cube, dependencies in self.dependencies.items():
+ for name, constraint in dependencies.items():
self.reverse_dependencies.setdefault(name,set())
if constraint:
try:
@@ -522,9 +536,9 @@
elif op == None:
continue
else:
- print ('unable to handle %s in %s, set to `%s %s` '
- 'but currently up to `%s %s`' %
- (cube, source, oper, version, op, ver))
+ print('unable to handle %s in %s, set to `%s %s` '
+ 'but currently up to `%s %s`' %
+ (cube, source, oper, version, op, ver))
# "solve" constraint satisfaction problem
if cube not in self.cubes:
self.errors.append( ('add', cube, version, source) )
@@ -536,4 +550,4 @@
elif oper is None:
pass # no constraint on version
else:
- print 'unknown operator', oper
+ print('unknown operator', oper)
--- a/misc/cwfs/cwfs.py Thu Mar 06 15:55:33 2014 +0100
+++ b/misc/cwfs/cwfs.py Thu Nov 12 10:52:28 2015 +0100
@@ -80,17 +80,17 @@
self._restrictions = []
def parse(self) :
- self._entity = self._components.next()
+ self._entity = next(self._components)
try:
self.process_entity()
except StopIteration :
pass
def process_entity(self) :
- _next = self._components.next()
+ _next = next(self._components)
if _next in self.schema.get_attrs(self._entity) :
self._attr = _next
- _next = self._components.next()
+ _next = next(self._components)
self._restrictions.append( (self._entity, self._attr, _next) )
self._attr = None
self._rel = None
@@ -136,7 +136,7 @@
def parse(self):
self._var = self._alphabet.pop(0)
- self._e_type = self._components.next()
+ self._e_type = next(self._components)
e_type = self._e_type.capitalize()
self._restrictions.append('%s is %s' % (self._var, e_type))
try:
@@ -146,11 +146,11 @@
return 'Any %s WHERE %s' % (self._var, ', '.join(self._restrictions))
def process_entity(self) :
- _next = self._components.next()
+ _next = next(self._components)
if _next in self.schema.get_attrs(self._e_type) :
attr = _next
try:
- _next = self._components.next()
+ _next = next(self._components)
self._restrictions.append('%s %s %s' % (self._var, attr, _next))
except StopIteration:
a_var = self._alphabet.pop(0)
@@ -163,7 +163,7 @@
self._restrictions.append('%s %s %s' % (self._var, rel, r_var))
self._var = r_var
try:
- _next = self._components.next()
+ _next = next(self._components)
self._restrictions.append('%s is %s' % (r_var, _next.capitalize()))
except StopIteration:
raise
--- a/misc/cwfs/cwfs_test.py Thu Mar 06 15:55:33 2014 +0100
+++ b/misc/cwfs/cwfs_test.py Thu Nov 12 10:52:28 2015 +0100
@@ -30,7 +30,7 @@
sections = []
buffer = ""
in_section = False
- for line in file(filename) :
+ for line in open(filename) :
if line.startswith('Test::'):
in_section = True
buffer = ""
--- a/misc/migration/3.10.0_Any.py Thu Mar 06 15:55:33 2014 +0100
+++ b/misc/migration/3.10.0_Any.py Thu Nov 12 10:52:28 2015 +0100
@@ -1,3 +1,5 @@
+from six import text_type
+
from cubicweb.server.session import hooks_control
for uri, cfg in config.read_sources_file().items():
@@ -24,7 +26,7 @@
repo.sources_by_uri.pop(uri)
config = u'\n'.join('%s=%s' % (key, value) for key, value in cfg.items()
if key != 'adapter' and value is not None)
- create_entity('CWSource', name=unicode(uri), type=unicode(cfg['adapter']),
+ create_entity('CWSource', name=text_type(uri), type=text_type(cfg['adapter']),
config=config)
commit()
--- a/misc/migration/3.14.0_Any.py Thu Mar 06 15:55:33 2014 +0100
+++ b/misc/migration/3.14.0_Any.py Thu Nov 12 10:52:28 2015 +0100
@@ -1,3 +1,5 @@
+from __future__ import print_function
+
config['rql-cache-size'] = config['rql-cache-size'] * 10
add_entity_type('CWDataImport')
@@ -10,4 +12,4 @@
mainvars = guess_rrqlexpr_mainvars(expression)
yamscstr = CONSTRAINTS[rqlcstr.type](expression, mainvars)
rqlcstr.cw_set(value=yamscstr.serialize())
- print 'updated', rqlcstr.type, rqlcstr.value.strip()
+ print('updated', rqlcstr.type, rqlcstr.value.strip())
--- a/misc/migration/3.15.4_Any.py Thu Mar 06 15:55:33 2014 +0100
+++ b/misc/migration/3.15.4_Any.py Thu Nov 12 10:52:28 2015 +0100
@@ -1,3 +1,5 @@
+from __future__ import print_function
+
from logilab.common.shellutils import generate_password
from cubicweb.server.utils import crypt_password
@@ -5,7 +7,7 @@
salt = user.upassword.getvalue()
if crypt_password('', salt) == salt:
passwd = generate_password()
- print 'setting random password for user %s' % user.login
+ print('setting random password for user %s' % user.login)
user.set_attributes(upassword=passwd)
commit()
--- a/misc/migration/3.21.0_Any.py Thu Mar 06 15:55:33 2014 +0100
+++ b/misc/migration/3.21.0_Any.py Thu Nov 12 10:52:28 2015 +0100
@@ -1,3 +1,5 @@
+from __future__ import print_function
+
from cubicweb.schema import PURE_VIRTUAL_RTYPES
from cubicweb.server.schema2sql import rschema_has_table
@@ -27,7 +29,7 @@
' SELECT eid FROM entities) AS eids' % args,
ask_confirm=False)[0][0]
if count:
- print '%s references %d unknown entities, deleting' % (rschema, count)
+ print('%s references %d unknown entities, deleting' % (rschema, count))
sql('DELETE FROM %(r)s_relation '
'WHERE eid_from IN (SELECT eid_from FROM %(r)s_relation EXCEPT SELECT eid FROM entities)' % args)
sql('DELETE FROM %(r)s_relation '
@@ -65,14 +67,14 @@
broken_eids = sql('SELECT cw_eid FROM cw_%(e)s WHERE cw_%(r)s IS NULL' % args,
ask_confirm=False)
if broken_eids:
- print 'Required relation %(e)s.%(r)s missing' % args
+ print('Required relation %(e)s.%(r)s missing' % args)
args['eids'] = ', '.join(str(eid) for eid, in broken_eids)
rql('DELETE %(e)s X WHERE X eid IN (%(eids)s)' % args)
broken_eids = sql('SELECT cw_eid FROM cw_%(e)s WHERE cw_%(r)s IN (SELECT cw_%(r)s FROM cw_%(e)s '
'EXCEPT SELECT eid FROM entities)' % args,
ask_confirm=False)
if broken_eids:
- print 'Required relation %(e)s.%(r)s references unknown objects, deleting subject entities' % args
+ print('Required relation %(e)s.%(r)s references unknown objects, deleting subject entities' % args)
args['eids'] = ', '.join(str(eid) for eid, in broken_eids)
rql('DELETE %(e)s X WHERE X eid IN (%(eids)s)' % args)
else:
@@ -81,7 +83,7 @@
' EXCEPT'
' SELECT eid FROM entities) AS eids' % args,
ask_confirm=False)[0][0]:
- print '%(e)s.%(r)s references unknown entities, deleting relation' % args
+ print('%(e)s.%(r)s references unknown entities, deleting relation' % args)
sql('UPDATE cw_%(e)s SET cw_%(r)s = NULL WHERE cw_%(r)s IS NOT NULL AND cw_%(r)s IN '
'(SELECT cw_%(r)s FROM cw_%(e)s EXCEPT SELECT eid FROM entities)' % args)
@@ -104,7 +106,7 @@
' EXCEPT'
' SELECT eid FROM entities) AS eids' % args,
ask_confirm=False)[0][0]:
- print '%(e)s has nonexistent entities, deleting' % args
+ print('%(e)s has nonexistent entities, deleting' % args)
sql('DELETE FROM cw_%(e)s WHERE cw_eid IN '
'(SELECT cw_eid FROM cw_%(e)s EXCEPT SELECT eid FROM entities)' % args)
args['c'] = 'cw_%(e)s_cw_eid_fkey' % args
--- a/misc/migration/3.8.5_Any.py Thu Mar 06 15:55:33 2014 +0100
+++ b/misc/migration/3.8.5_Any.py Thu Nov 12 10:52:28 2015 +0100
@@ -1,3 +1,5 @@
+from __future__ import print_function
+
def migrate_varchar_to_nvarchar():
dbdriver = config.system_source_config['db-driver']
if dbdriver != "sqlserver2005":
@@ -52,7 +54,7 @@
for statement in generated_statements:
- print statement
+ print(statement)
sql(statement, ask_confirm=False)
commit()
--- a/misc/migration/bootstrapmigration_repository.py Thu Mar 06 15:55:33 2014 +0100
+++ b/misc/migration/bootstrapmigration_repository.py Thu Nov 12 10:52:28 2015 +0100
@@ -19,6 +19,9 @@
it should only include low level schema changes
"""
+from __future__ import print_function
+
+from six import text_type
from cubicweb import ConfigurationError
from cubicweb.server.session import hooks_control
@@ -77,8 +80,8 @@
sql('ALTER TABLE "entities" DROP COLUMN "mtime"')
sql('ALTER TABLE "entities" DROP COLUMN "source"')
except: # programming error, already migrated
- print "Failed to drop mtime or source database columns"
- print "'entities' table of the database has probably been already updated"
+ print("Failed to drop mtime or source database columns")
+ print("'entities' table of the database has probably been already updated")
commit()
@@ -101,7 +104,7 @@
driver = config.system_source_config['db-driver']
if not (driver == 'postgres' or driver.startswith('sqlserver')):
import sys
- print >>sys.stderr, 'This migration is not supported for backends other than sqlserver or postgres (yet).'
+ print('This migration is not supported for backends other than sqlserver or postgres (yet).', file=sys.stderr)
sys.exit(1)
add_relation_definition('CWAttribute', 'add_permission', 'CWGroup')
@@ -148,7 +151,7 @@
default = yams.DATE_FACTORY_MAP[atype](default)
else:
assert atype == 'String', atype
- default = unicode(default)
+ default = text_type(default)
return Binary.zpickle(default)
dbh = repo.system_source.dbhelper
@@ -196,7 +199,7 @@
(rschema.type, ','.join(subjects))))
if martians:
martians = ','.join(martians)
- print 'deleting broken relations %s for eids %s' % (rschema.type, martians)
+ print('deleting broken relations %s for eids %s' % (rschema.type, martians))
sql('DELETE FROM %s_relation WHERE eid_from IN (%s) OR eid_to IN (%s)' % (rschema.type, martians, martians))
with session.deny_all_hooks_but():
rql('SET X %(r)s Y WHERE Y %(r)s X, NOT X %(r)s Y' % {'r': rschema.type})
@@ -219,20 +222,20 @@
if driver == 'postgres':
for indexname, in sql('select indexname from pg_indexes'):
if indexname.startswith('unique_'):
- print 'dropping index', indexname
+ print('dropping index', indexname)
sql('DROP INDEX %s' % indexname)
commit()
elif driver.startswith('sqlserver'):
for viewname, in sql('select name from sys.views'):
if viewname.startswith('utv_'):
- print 'dropping view (index should be cascade-deleted)', viewname
+ print('dropping view (index should be cascade-deleted)', viewname)
sql('DROP VIEW %s' % viewname)
commit()
# recreate the constraints, hook will lead to low-level recreation
for eschema in sorted(schema.entities()):
if eschema._unique_together:
- print 'recreate unique indexes for', eschema
+ print('recreate unique indexes for', eschema)
rql_args = schemaserial.uniquetogether2rqls(eschema)
for rql, args in rql_args:
args['x'] = eschema.eid
@@ -243,10 +246,10 @@
for rschema in sorted(schema.relations()):
if rschema.final:
if rschema.type in fsschema:
- print 'sync perms for', rschema.type
+ print('sync perms for', rschema.type)
sync_schema_props_perms(rschema.type, syncprops=False, ask_confirm=False, commit=False)
else:
- print 'WARNING: attribute %s missing from fs schema' % rschema.type
+ print('WARNING: attribute %s missing from fs schema' % rschema.type)
commit()
if applcubicwebversion < (3, 17, 0) and cubicwebversion >= (3, 17, 0):
@@ -298,7 +301,7 @@
with hooks_control(session, session.HOOKS_ALLOW_ALL, 'integrity'):
for rschema in repo.schema.relations():
rpermsdict = permsdict.get(rschema.eid, {})
- for rdef in rschema.rdefs.itervalues():
+ for rdef in rschema.rdefs.values():
for action in rdef.ACTIONS:
actperms = []
for something in rpermsdict.get(action == 'update' and 'add' or action, ()):
--- a/misc/migration/postcreate.py Thu Mar 06 15:55:33 2014 +0100
+++ b/misc/migration/postcreate.py Thu Nov 12 10:52:28 2015 +0100
@@ -16,13 +16,19 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""cubicweb post creation script, set user's workflow"""
+from __future__ import print_function
+
+from six import text_type
+
+from cubicweb import _
+
# insert versions
create_entity('CWProperty', pkey=u'system.version.cubicweb',
- value=unicode(config.cubicweb_version()))
+ value=text_type(config.cubicweb_version()))
for cube in config.cubes():
create_entity('CWProperty', pkey=u'system.version.%s' % cube.lower(),
- value=unicode(config.cube_version(cube)))
+ value=text_type(config.cube_version(cube)))
# some entities have been added before schema entities, fix the 'is' and
# 'is_instance_of' relations
@@ -30,8 +36,8 @@
sql('INSERT INTO %s_relation '
'SELECT X.eid, ET.cw_eid FROM entities as X, cw_CWEType as ET '
'WHERE X.type=ET.cw_name AND NOT EXISTS('
- ' SELECT 1 from is_relation '
- ' WHERE eid_from=X.eid AND eid_to=ET.cw_eid)' % rtype)
+ ' SELECT 1 from %s_relation '
+ ' WHERE eid_from=X.eid AND eid_to=ET.cw_eid)' % (rtype, rtype))
# user workflow
userwf = add_workflow(_('default user workflow'), 'CWUser')
@@ -46,11 +52,11 @@
if hasattr(config, 'anonymous_user'):
anonlogin, anonpwd = config.anonymous_user()
if anonlogin == session.user.login:
- print 'you are using a manager account as anonymous user.'
- print 'Hopefully this is not a production instance...'
+ print('you are using a manager account as anonymous user.')
+ print('Hopefully this is not a production instance...')
elif anonlogin:
from cubicweb.server import create_user
- create_user(session, unicode(anonlogin), anonpwd, u'guests')
+ create_user(session, text_type(anonlogin), anonpwd, u'guests')
# need this since we already have at least one user in the database (the default admin)
for user in rql('Any X WHERE X is CWUser').entities():
--- a/misc/scripts/cwuser_ldap2system.py Thu Mar 06 15:55:33 2014 +0100
+++ b/misc/scripts/cwuser_ldap2system.py Thu Nov 12 10:52:28 2015 +0100
@@ -1,3 +1,5 @@
+from __future__ import print_function
+
import base64
from cubicweb.server.utils import crypt_password
@@ -20,10 +22,10 @@
rset = sql("SELECT eid,type,source,extid,mtime FROM entities WHERE source!='system'", ask_confirm=False)
for eid, type, source, extid, mtime in rset:
if type != 'CWUser':
- print "don't know what to do with entity type", type
+ print("don't know what to do with entity type", type)
continue
if not source.lower().startswith('ldap'):
- print "don't know what to do with source type", source
+ print("don't know what to do with source type", source)
continue
extid = base64.decodestring(extid)
ldapinfos = [x.strip().split('=') for x in extid.split(',')]
@@ -33,7 +35,7 @@
args = dict(eid=eid, type=type, source=source, login=login,
firstname=firstname, surname=surname, mtime=mtime,
pwd=dbhelper.binary_value(crypt_password('toto')))
- print args
+ print(args)
sql(insert, args)
sql(update, args)
--- a/misc/scripts/detect_cycle.py Thu Mar 06 15:55:33 2014 +0100
+++ b/misc/scripts/detect_cycle.py Thu Nov 12 10:52:28 2015 +0100
@@ -1,9 +1,10 @@
+from __future__ import print_function
try:
rtype, = __args__
except ValueError:
- print 'USAGE: cubicweb-ctl shell <instance> detect_cycle.py -- <relation type>'
- print
+ print('USAGE: cubicweb-ctl shell <instance> detect_cycle.py -- <relation type>')
+ print()
graph = {}
for fromeid, toeid in rql('Any X,Y WHERE X %s Y' % rtype):
@@ -12,4 +13,4 @@
from logilab.common.graph import get_cycles
for cycle in get_cycles(graph):
- print 'cycle', '->'.join(str(n) for n in cycle)
+ print('cycle', '->'.join(str(n) for n in cycle))
--- a/misc/scripts/ldap_change_base_dn.py Thu Mar 06 15:55:33 2014 +0100
+++ b/misc/scripts/ldap_change_base_dn.py Thu Nov 12 10:52:28 2015 +0100
@@ -1,10 +1,12 @@
+from __future__ import print_function
+
from base64 import b64decode, b64encode
try:
uri, newdn = __args__
except ValueError:
- print 'USAGE: cubicweb-ctl shell <instance> ldap_change_base_dn.py -- <ldap source uri> <new dn>'
- print
- print 'you should not have updated your sources file yet'
+ print('USAGE: cubicweb-ctl shell <instance> ldap_change_base_dn.py -- <ldap source uri> <new dn>')
+ print()
+ print('you should not have updated your sources file yet')
olddn = repo.sources_by_uri[uri].config['user-base-dn']
@@ -16,9 +18,9 @@
olduserdn = b64decode(extid)
newuserdn = olduserdn.replace(olddn, newdn)
if newuserdn != olduserdn:
- print olduserdn, '->', newuserdn
+ print(olduserdn, '->', newuserdn)
sql("UPDATE entities SET extid='%s' WHERE eid=%s" % (b64encode(newuserdn), eid))
commit()
-print 'you can now update the sources file to the new dn and restart the instance'
+print('you can now update the sources file to the new dn and restart the instance')
--- a/misc/scripts/ldapuser2ldapfeed.py Thu Mar 06 15:55:33 2014 +0100
+++ b/misc/scripts/ldapuser2ldapfeed.py Thu Nov 12 10:52:28 2015 +0100
@@ -2,6 +2,8 @@
Once this script is run, execute c-c db-check to cleanup relation tables.
"""
+from __future__ import print_function
+
import sys
from collections import defaultdict
from logilab.common.shellutils import generate_password
@@ -14,12 +16,12 @@
' on the command line)')
sys.exit(1)
except KeyError:
- print '%s is not an active source' % source_name
+ print('%s is not an active source' % source_name)
sys.exit(1)
# check source is reachable before doing anything
if not source.get_connection().cnx:
- print '%s is not reachable. Fix this before running this script' % source_name
+ print('%s is not reachable. Fix this before running this script' % source_name)
sys.exit(1)
raw_input('Ensure you have shutdown all instances of this application before continuing.'
@@ -31,7 +33,7 @@
from cubicweb.server.edition import EditedEntity
-print '******************** backport entity content ***************************'
+print('******************** backport entity content ***************************')
todelete = defaultdict(list)
extids = set()
@@ -39,17 +41,17 @@
for entity in rql('Any X WHERE X cw_source S, S eid %(s)s', {'s': source.eid}).entities():
etype = entity.cw_etype
if not source.support_entity(etype):
- print "source doesn't support %s, delete %s" % (etype, entity.eid)
+ print("source doesn't support %s, delete %s" % (etype, entity.eid))
todelete[etype].append(entity)
continue
try:
entity.complete()
except Exception:
- print '%s %s much probably deleted, delete it (extid %s)' % (
- etype, entity.eid, entity.cw_metainformation()['extid'])
+ print('%s %s much probably deleted, delete it (extid %s)' % (
+ etype, entity.eid, entity.cw_metainformation()['extid']))
todelete[etype].append(entity)
continue
- print 'get back', etype, entity.eid
+ print('get back', etype, entity.eid)
entity.cw_edited = EditedEntity(entity, **entity.cw_attr_cache)
if not entity.creation_date:
entity.cw_edited['creation_date'] = datetime.now()
@@ -61,7 +63,7 @@
if not entity.cwuri:
entity.cw_edited['cwuri'] = '%s/?dn=%s' % (
source.urls[0], extid.decode('utf-8', 'ignore'))
- print entity.cw_edited
+ print(entity.cw_edited)
if extid in extids:
duplicates.append(extid)
continue
@@ -73,13 +75,13 @@
# only cleanup entities table, remaining stuff should be cleaned by a c-c
# db-check to be run after this script
if duplicates:
- print 'found %s duplicate entries' % len(duplicates)
+ print('found %s duplicate entries' % len(duplicates))
from pprint import pprint
pprint(duplicates)
-print len(todelete), 'entities will be deleted'
-for etype, entities in todelete.iteritems():
- print 'deleting', etype, [e.login for e in entities]
+print(len(todelete), 'entities will be deleted')
+for etype, entities in todelete.items():
+ print('deleting', etype, [e.login for e in entities])
system_source.delete_info_multi(session, entities, source_name)
@@ -89,9 +91,9 @@
if raw_input('Commit?') in 'yY':
- print 'committing'
+ print('committing')
commit()
else:
rollback()
- print 'rolled back'
+ print('rolled back')
--- a/misc/scripts/pyroforge2datafeed.py Thu Mar 06 15:55:33 2014 +0100
+++ b/misc/scripts/pyroforge2datafeed.py Thu Nov 12 10:52:28 2015 +0100
@@ -2,6 +2,8 @@
Once this script is run, execute c-c db-check to cleanup relation tables.
"""
+from __future__ import print_function
+
import sys
try:
@@ -12,14 +14,14 @@
' on the command line)')
sys.exit(1)
except KeyError:
- print '%s is not an active source' % source_name
+ print('%s is not an active source' % source_name)
sys.exit(1)
# check source is reachable before doing anything
try:
source.get_connection()._repo
except AttributeError:
- print '%s is not reachable. Fix this before running this script' % source_name
+ print('%s is not reachable. Fix this before running this script' % source_name)
sys.exit(1)
raw_input('Ensure you have shutdown all instances of this application before continuing.'
@@ -39,7 +41,7 @@
))
-print '******************** backport entity content ***************************'
+print('******************** backport entity content ***************************')
from cubicweb.server import debugged
todelete = {}
@@ -47,20 +49,20 @@
for entity in rql('Any X WHERE X cw_source S, S eid %(s)s', {'s': source.eid}).entities():
etype = entity.cw_etype
if not source.support_entity(etype):
- print "source doesn't support %s, delete %s" % (etype, entity.eid)
+ print("source doesn't support %s, delete %s" % (etype, entity.eid))
elif etype in DONT_GET_BACK_ETYPES:
- print 'ignore %s, delete %s' % (etype, entity.eid)
+ print('ignore %s, delete %s' % (etype, entity.eid))
else:
try:
entity.complete()
if not host in entity.cwuri:
- print 'SKIP foreign entity', entity.cwuri, source.config['base-url']
+ print('SKIP foreign entity', entity.cwuri, source.config['base-url'])
continue
except Exception:
- print '%s %s much probably deleted, delete it (extid %s)' % (
- etype, entity.eid, entity.cw_metainformation()['extid'])
+ print('%s %s much probably deleted, delete it (extid %s)' % (
+ etype, entity.eid, entity.cw_metainformation()['extid']))
else:
- print 'get back', etype, entity.eid
+ print('get back', etype, entity.eid)
entity.cw_edited = EditedEntity(entity, **entity.cw_attr_cache)
system_source.add_entity(session, entity)
sql("UPDATE entities SET asource=%(asource)s, source='system', extid=%(extid)s "
@@ -72,11 +74,11 @@
# only cleanup entities table, remaining stuff should be cleaned by a c-c
# db-check to be run after this script
-for entities in todelete.itervalues():
+for entities in todelete.values():
system_source.delete_info_multi(session, entities, source_name)
-print '******************** backport mapping **********************************'
+print('******************** backport mapping **********************************')
session.disable_hook_categories('cw.sources')
mapping = []
for mappart in rql('Any X,SCH WHERE X cw_schema SCH, X cw_for_source S, S eid %(s)s',
@@ -85,13 +87,13 @@
if schemaent.cw_etype != 'CWEType':
assert schemaent.cw_etype == 'CWRType'
sch = schema._eid_index[schemaent.eid]
- for rdef in sch.rdefs.itervalues():
+ for rdef in sch.rdefs.values():
if not source.support_entity(rdef.subject) \
or not source.support_entity(rdef.object):
continue
if rdef.subject in DONT_GET_BACK_ETYPES \
and rdef.object in DONT_GET_BACK_ETYPES:
- print 'dont map', rdef
+ print('dont map', rdef)
continue
if rdef.subject in DONT_GET_BACK_ETYPES:
options = u'action=link\nlinkattr=name'
@@ -105,7 +107,7 @@
roles = 'object',
else:
roles = 'subject',
- print 'map', rdef, options, roles
+ print('map', rdef, options, roles)
for role in roles:
mapping.append( (
(str(rdef.subject), str(rdef.rtype), str(rdef.object)),
--- a/misc/scripts/repair_file_1-9_migration.py Thu Mar 06 15:55:33 2014 +0100
+++ b/misc/scripts/repair_file_1-9_migration.py Thu Nov 12 10:52:28 2015 +0100
@@ -4,13 +4,14 @@
* on our intranet on July 07 2010
* on our extranet on July 16 2010
"""
+from __future__ import print_function
try:
backupinstance, = __args__
except ValueError:
- print 'USAGE: cubicweb-ctl shell <instance> repair_file_1-9_migration.py -- <backup instance id>'
- print
- print 'you should restored the backup on a new instance, accessible through pyro'
+ print('USAGE: cubicweb-ctl shell <instance> repair_file_1-9_migration.py -- <backup instance id>')
+ print()
+ print('you should restored the backup on a new instance, accessible through pyro')
from cubicweb import cwconfig, dbapi
from cubicweb.server.session import hooks_control
@@ -32,20 +33,20 @@
'XX from_entity YY, YY name "File")'):
if rtype in ('is', 'is_instance_of'):
continue
- print rtype
+ print(rtype)
for feid, xeid in backupcu.execute('Any F,X WHERE F %s X, F is IN (File,Image)' % rtype):
- print 'restoring relation %s between file %s and %s' % (rtype, feid, xeid),
- print rql('SET F %s X WHERE F eid %%(f)s, X eid %%(x)s, NOT F %s X' % (rtype, rtype),
- {'f': feid, 'x': xeid})
+ print('restoring relation %s between file %s and %s' % (rtype, feid, xeid), end=' ')
+ print(rql('SET F %s X WHERE F eid %%(f)s, X eid %%(x)s, NOT F %s X' % (rtype, rtype),
+ {'f': feid, 'x': xeid}))
for rtype, in backupcu.execute('DISTINCT Any RTN WHERE X relation_type RT, RT name RTN,'
'X to_entity Y, Y name "Image", X is CWRelation, '
'EXISTS(XX is CWRelation, XX relation_type RT, '
'XX to_entity YY, YY name "File")'):
- print rtype
+ print(rtype)
for feid, xeid in backupcu.execute('Any F,X WHERE X %s F, F is IN (File,Image)' % rtype):
- print 'restoring relation %s between %s and file %s' % (rtype, xeid, feid),
- print rql('SET X %s F WHERE F eid %%(f)s, X eid %%(x)s, NOT X %s F' % (rtype, rtype),
- {'f': feid, 'x': xeid})
+ print('restoring relation %s between %s and file %s' % (rtype, xeid, feid), end=' ')
+ print(rql('SET X %s F WHERE F eid %%(f)s, X eid %%(x)s, NOT X %s F' % (rtype, rtype),
+ {'f': feid, 'x': xeid}))
commit()
--- a/misc/scripts/repair_splitbrain_ldapuser_source.py Thu Mar 06 15:55:33 2014 +0100
+++ b/misc/scripts/repair_splitbrain_ldapuser_source.py Thu Nov 12 10:52:28 2015 +0100
@@ -14,6 +14,7 @@
deciding to apply it for you. And then ADAPT it tou your needs.
"""
+from __future__ import print_function
import base64
from collections import defaultdict
@@ -28,12 +29,12 @@
' on the command line)')
sys.exit(1)
except KeyError:
- print '%s is not an active source' % source_name
+ print('%s is not an active source' % source_name)
sys.exit(1)
# check source is reachable before doing anything
if not source.get_connection().cnx:
- print '%s is not reachable. Fix this before running this script' % source_name
+ print('%s is not reachable. Fix this before running this script' % source_name)
sys.exit(1)
def find_dupes():
@@ -52,11 +53,11 @@
CWUser = schema['CWUser']
for extid, eids in dupes.items():
newest = eids.pop() # we merge everything on the newest
- print 'merging ghosts of', extid, 'into', newest
+ print('merging ghosts of', extid, 'into', newest)
# now we merge pairwise into the newest
for old in eids:
subst = {'old': old, 'new': newest}
- print ' merging', old
+ print(' merging', old)
gone_eids.append(old)
for rschema in CWUser.subject_relations():
if rschema.final or rschema == 'identity':
@@ -83,24 +84,24 @@
rollback()
return
commit() # XXX flushing operations is wanted rather than really committing
- print 'clean up entities table'
+ print('clean up entities table')
sql('DELETE FROM entities WHERE eid IN (%s)' % (', '.join(str(x) for x in gone_eids)))
commit()
def main():
dupes = find_dupes()
if not dupes:
- print 'No duplicate user'
+ print('No duplicate user')
return
- print 'Found %s duplicate user instances' % len(dupes)
+ print('Found %s duplicate user instances' % len(dupes))
while True:
- print 'Fix or dry-run? (f/d) ... or Ctrl-C to break out'
+ print('Fix or dry-run? (f/d) ... or Ctrl-C to break out')
answer = raw_input('> ')
if answer.lower() not in 'fd':
continue
- print 'Please STOP THE APPLICATION INSTANCES (service or interactive), and press Return when done.'
+ print('Please STOP THE APPLICATION INSTANCES (service or interactive), and press Return when done.')
raw_input('<I swear all running instances and workers of the application are stopped>')
with hooks_control(session, session.HOOKS_DENY_ALL):
merge_dupes(dupes, docommit=answer=='f')
--- a/multipart.py Thu Mar 06 15:55:33 2014 +0100
+++ b/multipart.py Thu Nov 12 10:52:28 2015 +0100
@@ -41,14 +41,13 @@
from wsgiref.headers import Headers
import re, sys
try:
- from urlparse import parse_qs
-except ImportError: # pragma: no cover (fallback for Python 2.5)
- from cgi import parse_qs
-try:
from io import BytesIO
except ImportError: # pragma: no cover (fallback for Python 2.5)
from StringIO import StringIO as BytesIO
+from six import PY3, text_type
+from six.moves.urllib.parse import parse_qs
+
##############################################################################
################################ Helper & Misc ################################
##############################################################################
@@ -63,7 +62,7 @@
""" A dict that remembers old values for each key """
def __init__(self, *a, **k):
self.dict = dict()
- for k, v in dict(*a, **k).iteritems():
+ for k, v in dict(*a, **k).items():
self[k] = v
def __len__(self): return len(self.dict)
@@ -84,12 +83,12 @@
return self.dict[key][index]
def iterallitems(self):
- for key, values in self.dict.iteritems():
+ for key, values in self.dict.items():
for value in values:
yield key, value
def tob(data, enc='utf8'): # Convert strings to bytes (py2 and py3)
- return data.encode(enc) if isinstance(data, unicode) else data
+ return data.encode(enc) if isinstance(data, text_type) else data
def copy_file(stream, target, maxread=-1, buffer_size=2*16):
''' Read from :stream and write to :target until :maxread or EOF. '''
@@ -397,14 +396,19 @@
'application/x-url-encoded'):
mem_limit = kw.get('mem_limit', 2**20)
if content_length > mem_limit:
- raise MultipartError("Request to big. Increase MAXMEM.")
+ raise MultipartError("Request too big. Increase MAXMEM.")
data = stream.read(mem_limit)
if stream.read(1): # These is more that does not fit mem_limit
- raise MultipartError("Request to big. Increase MAXMEM.")
+ raise MultipartError("Request too big. Increase MAXMEM.")
+ if PY3:
+ data = data.decode('ascii')
data = parse_qs(data, keep_blank_values=True)
- for key, values in data.iteritems():
+ for key, values in data.items():
for value in values:
- forms[key] = value.decode(charset)
+ if PY3:
+ forms[key] = value
+ else:
+ forms[key.decode(charset)] = value.decode(charset)
else:
raise MultipartError("Unsupported content type.")
except MultipartError:
--- a/predicates.py Thu Mar 06 15:55:33 2014 +0100
+++ b/predicates.py Thu Nov 12 10:52:28 2015 +0100
@@ -24,6 +24,9 @@
from warnings import warn
from operator import eq
+from six import string_types, integer_types
+from six.moves import range
+
from logilab.common.deprecation import deprecated
from logilab.common.registry import Predicate, objectify_predicate, yes
@@ -106,7 +109,7 @@
if accept_none is None:
accept_none = self.accept_none
if not accept_none and \
- any(rset[i][col] is None for i in xrange(len(rset))):
+ any(row[col] is None for row in rset):
return 0
etypes = rset.column_types(col)
else:
@@ -332,7 +335,7 @@
# on rset containing several entity types, each row may be
# individually adaptable, while the whole rset won't be if the
# same adapter can't be used for each type
- for row in xrange(len(kwargs['rset'])):
+ for row in range(len(kwargs['rset'])):
kwargs.setdefault('col', 0)
_score = super(adaptable, self).__call__(cls, req, row=row, **kwargs)
if not _score:
@@ -489,10 +492,13 @@
page_size = kwargs.get('page_size')
if page_size is None:
page_size = req.form.get('page_size')
+ if page_size is not None:
+ try:
+ page_size = int(page_size)
+ except ValueError:
+ page_size = None
if page_size is None:
page_size = req.property_value('navigation.page-size')
- else:
- page_size = int(page_size)
if len(rset) <= (page_size*self.nbpages):
return 0
return self.nbpages
@@ -611,7 +617,7 @@
super(is_instance, self).__init__(**kwargs)
self.expected_etypes = expected_etypes
for etype in self.expected_etypes:
- assert isinstance(etype, basestring), etype
+ assert isinstance(etype, string_types), etype
def __str__(self):
return '%s(%s)' % (self.__class__.__name__,
@@ -671,7 +677,7 @@
score = scorefunc(*args, **kwargs)
if not score:
return 0
- if isinstance(score, (int, long)):
+ if isinstance(score, integer_types):
return score
return 1
self.score_entity = intscore
@@ -1091,7 +1097,7 @@
"""
if from_state_name is not None:
warn("on_fire_transition's from_state_name argument is unused", DeprecationWarning)
- if isinstance(tr_names, basestring):
+ if isinstance(tr_names, string_types):
tr_names = set((tr_names,))
def match_etype_and_transition(trinfo):
# take care trinfo.transition is None when calling change_state
@@ -1291,7 +1297,7 @@
raise ValueError("match_form_params() can't be called with both "
"positional and named arguments")
if expected:
- if len(expected) == 1 and not isinstance(expected[0], basestring):
+ if len(expected) == 1 and not isinstance(expected[0], string_types):
raise ValueError("match_form_params() positional arguments "
"must be strings")
super(match_form_params, self).__init__(*expected)
--- a/pylintext.py Thu Mar 06 15:55:33 2014 +0100
+++ b/pylintext.py Thu Nov 12 10:52:28 2015 +0100
@@ -17,7 +17,7 @@
def cubicweb_transform(module):
# handle objectify_predicate decorator (and its former name until bw compat
# is kept). Only look at module level functions, should be enough.
- for assnodes in module.locals.itervalues():
+ for assnodes in module.locals.values():
for node in assnodes:
if isinstance(node, scoped_nodes.Function) and node.decorators:
for decorator in node.decorators.nodes:
--- a/repoapi.py Thu Mar 06 15:55:33 2014 +0100
+++ b/repoapi.py Thu Nov 12 10:52:28 2015 +0100
@@ -17,21 +17,17 @@
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""Official API to access the content of a repository
"""
+from warnings import warn
+
+from six import add_metaclass
+
from logilab.common.deprecation import class_deprecated
from cubicweb.utils import parse_repo_uri
-from cubicweb import ConnectionError, AuthenticationError
+from cubicweb import AuthenticationError
from cubicweb.server.session import Connection
-### private function for specific method ############################
-
-def _get_inmemory_repo(config, vreg=None):
- from cubicweb.server.repository import Repository
- from cubicweb.server.utils import TasksManager
- return Repository(config, TasksManager(), vreg=vreg)
-
-
### public API ######################################################
def get_repository(uri=None, config=None, vreg=None):
@@ -41,16 +37,11 @@
The returned repository may be an in-memory repository or a proxy object
using a specific RPC method, depending on the given URI.
"""
- if uri is None:
- return _get_inmemory_repo(config, vreg)
-
- protocol, hostport, appid = parse_repo_uri(uri)
+ if uri is not None:
+ warn('[3.22] get_repository only wants a config')
- if protocol == 'inmemory':
- # me may have been called with a dummy 'inmemory://' uri ...
- return _get_inmemory_repo(config, vreg)
-
- raise ConnectionError('unknown protocol: `%s`' % protocol)
+ assert config is not None, 'get_repository(config=config)'
+ return config.repository(vreg)
def connect(repo, login, **kwargs):
"""Take credential and return associated Connection.
@@ -75,6 +66,6 @@
return connect(repo, anon_login, password=anon_password)
+@add_metaclass(class_deprecated)
class ClientConnection(Connection):
- __metaclass__ = class_deprecated
__deprecation_warning__ = '[3.20] %(cls)s is deprecated, use Connection instead'
--- a/req.py Thu Mar 06 15:55:33 2014 +0100
+++ b/req.py Thu Nov 12 10:52:28 2015 +0100
@@ -20,10 +20,10 @@
__docformat__ = "restructuredtext en"
from warnings import warn
-from urlparse import urlsplit, urlunsplit
-from urllib import quote as urlquote, unquote as urlunquote
from datetime import time, datetime, timedelta
-from cgi import parse_qs, parse_qsl
+
+from six import PY2, PY3, text_type
+from six.moves.urllib.parse import parse_qs, parse_qsl, quote as urlquote, unquote as urlunquote, urlsplit, urlunsplit
from logilab.common.decorators import cached
from logilab.common.deprecation import deprecated
@@ -73,7 +73,7 @@
# connection
self.user = None
self.local_perm_cache = {}
- self._ = unicode
+ self._ = text_type
def _set_user(self, orig_user):
"""set the user for this req_session_base
@@ -219,7 +219,7 @@
parts.append(
'%(varname)s %(attr)s X, '
'%(varname)s eid %%(reverse_%(attr)s)s'
- % {'attr': attr, 'varname': varmaker.next()})
+ % {'attr': attr, 'varname': next(varmaker)})
else:
assert attr in eschema.subjrels, \
'%s not in %s subject relations' % (attr, eschema)
@@ -300,7 +300,7 @@
def build_url_params(self, **kwargs):
"""return encoded params to incorporate them in a URL"""
args = []
- for param, values in kwargs.iteritems():
+ for param, values in kwargs.items():
if not isinstance(values, (list, tuple)):
values = (values,)
for value in values:
@@ -313,7 +313,7 @@
necessary encoding / decoding. Also it's designed to quote each
part of a url path and so the '/' character will be encoded as well.
"""
- if isinstance(value, unicode):
+ if PY2 and isinstance(value, unicode):
quoted = urlquote(value.encode(self.encoding), safe=safe)
return unicode(quoted, self.encoding)
return urlquote(str(value), safe=safe)
@@ -324,6 +324,8 @@
decoding is based on `self.encoding` which is the encoding
used in `url_quote`
"""
+ if PY3:
+ return urlunquote(quoted)
if isinstance(quoted, unicode):
quoted = quoted.encode(self.encoding)
try:
@@ -333,6 +335,10 @@
def url_parse_qsl(self, querystring):
"""return a list of (key, val) found in the url quoted query string"""
+ if PY3:
+ for key, val in parse_qsl(querystring):
+ yield key, val
+ return
if isinstance(querystring, unicode):
querystring = querystring.encode(self.encoding)
for key, val in parse_qsl(querystring):
@@ -348,12 +354,12 @@
newparams may only be mono-valued.
"""
- if isinstance(url, unicode):
+ if PY2 and isinstance(url, unicode):
url = url.encode(self.encoding)
schema, netloc, path, query, fragment = urlsplit(url)
query = parse_qs(query)
# sort for testing predictability
- for key, val in sorted(newparams.iteritems()):
+ for key, val in sorted(newparams.items()):
query[key] = (self.url_quote(val),)
query = '&'.join(u'%s=%s' % (param, value)
for param, values in sorted(query.items())
--- a/rqlrewrite.py Thu Mar 06 15:55:33 2014 +0100
+++ b/rqlrewrite.py Thu Nov 12 10:52:28 2015 +0100
@@ -22,6 +22,8 @@
"""
__docformat__ = "restructuredtext en"
+from six import text_type, string_types
+
from rql import nodes as n, stmts, TypeResolverException
from rql.utils import common_parent
@@ -54,7 +56,7 @@
eschema = schema.eschema
allpossibletypes = {}
for solution in solutions:
- for varname, etype in solution.iteritems():
+ for varname, etype in solution.items():
# XXX not considering aliases by design, right ?
if varname not in newroot.defined_vars or eschema(etype).final:
continue
@@ -92,7 +94,7 @@
for etype in sorted(possibletypes):
node.append(n.Constant(etype, 'etype'))
else:
- etype = iter(possibletypes).next()
+ etype = next(iter(possibletypes))
node = n.Constant(etype, 'etype')
comp = mytyperel.children[1]
comp.replace(comp.children[0], node)
@@ -286,7 +288,7 @@
if fnode.name == 'FTIRANK':
# we've to fetch the has_text relation as well
var = fnode.children[0].variable
- rel = iter(var.stinfo['ftirels']).next()
+ rel = next(iter(var.stinfo['ftirels']))
assert not rel.ored(), 'unsupported'
newselect.add_restriction(rel.copy(newselect))
# remove relation from the orig select and
@@ -330,7 +332,7 @@
union.replace(select, newselect)
elif not () in localchecks:
union.remove(select)
- for lcheckdef, lchecksolutions in localchecks.iteritems():
+ for lcheckdef, lchecksolutions in localchecks.items():
if not lcheckdef:
continue
myrqlst = select.copy(solutions=lchecksolutions)
@@ -427,7 +429,7 @@
def insert_varmap_snippets(self, varmap, rqlexprs, varexistsmap):
try:
self.init_from_varmap(varmap, varexistsmap)
- except VariableFromSubQuery, ex:
+ except VariableFromSubQuery as ex:
# variable may have been moved to a newly inserted subquery
# we should insert snippet in that subquery
subquery = self.select.aliases[ex.variable].query
@@ -548,7 +550,7 @@
'cant check security of %s, ambigous type for %s in %s',
stmt, varname, key[0]) # key[0] == the rql expression
raise Unauthorized()
- etype = iter(ptypes).next()
+ etype = next(iter(ptypes))
eschema = self.schema.eschema(etype)
if not eschema.has_perm(self.session, action):
rqlexprs = eschema.get_rqlexprs(action)
@@ -621,7 +623,7 @@
while argname in self.kwargs:
argname = subselect.allocate_varname()
subselect.add_constant_restriction(subselect.get_variable(self.u_varname),
- 'eid', unicode(argname), 'Substitute')
+ 'eid', text_type(argname), 'Substitute')
self.kwargs[argname] = self.session.user.eid
add_types_restriction(self.schema, subselect, subselect,
solutions=self.solutions)
@@ -646,7 +648,7 @@
# insert "is" where necessary
varexistsmap = {}
self.removing_ambiguity = True
- for (erqlexpr, varmap, oldvarname), etype in variantes[0].iteritems():
+ for (erqlexpr, varmap, oldvarname), etype in variantes[0].items():
varname = self.rewritten[(erqlexpr, varmap, oldvarname)]
var = self.select.defined_vars[varname]
exists = var.references()[0].scope
@@ -655,7 +657,7 @@
# insert ORED exists where necessary
for variante in variantes[1:]:
self.insert_snippets(snippets, varexistsmap)
- for key, etype in variante.iteritems():
+ for key, etype in variante.items():
varname = self.rewritten[key]
try:
var = self.select.defined_vars[varname]
@@ -674,7 +676,7 @@
variantes = set()
for sol in newsolutions:
variante = []
- for key, newvar in self.rewritten.iteritems():
+ for key, newvar in self.rewritten.items():
variante.append( (key, sol[newvar]) )
variantes.add(tuple(variante))
# rebuild variantes as dict
@@ -682,7 +684,7 @@
# remove variable which have always the same type
for key in self.rewritten:
it = iter(variantes)
- etype = it.next()[key]
+ etype = next(it)[key]
for variante in it:
if variante[key] != etype:
break
@@ -700,7 +702,7 @@
# no more references, undefine the variable
del self.select.defined_vars[vref.name]
removed.add(vref.name)
- for key, newvar in self.rewritten.items(): # I mean items we alter it
+ for key, newvar in list(self.rewritten.items()):
if newvar in removed:
del self.rewritten[key]
@@ -760,7 +762,7 @@
# insert "U eid %(u)s"
stmt.add_constant_restriction(
stmt.get_variable(self.u_varname),
- 'eid', unicode(argname), 'Substitute')
+ 'eid', text_type(argname), 'Substitute')
self.kwargs[argname] = self.session.user.eid
return self.u_varname
key = (self.current_expr, self.varmap, vname)
@@ -883,7 +885,7 @@
return n.Constant(vi['const'], 'Int')
return n.VariableRef(stmt.get_variable(selectvar))
vname_or_term = self._get_varname_or_term(node.name)
- if isinstance(vname_or_term, basestring):
+ if isinstance(vname_or_term, string_types):
return n.VariableRef(stmt.get_variable(vname_or_term))
# shared term
return vname_or_term.copy(stmt)
--- a/rset.py Thu Mar 06 15:55:33 2014 +0100
+++ b/rset.py Thu Nov 12 10:52:28 2015 +0100
@@ -16,11 +16,14 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""The `ResultSet` class which is returned as result of an rql query"""
-
+from __future__ import print_function
__docformat__ = "restructuredtext en"
from warnings import warn
+from six import PY3
+from six.moves import range
+
from logilab.common import nullobject
from logilab.common.decorators import cached, clear_cache, copy_cache
from rql import nodes, stmts
@@ -101,7 +104,7 @@
if self._rsetactions is None:
self._rsetactions = {}
if kwargs:
- key = tuple(sorted(kwargs.iteritems()))
+ key = tuple(sorted(kwargs.items()))
else:
key = None
try:
@@ -118,12 +121,9 @@
def __getitem__(self, i):
"""returns the ith element of the result set"""
+ #print('__getitem__', i)
return self.rows[i] #ResultSetRow(self.rows[i])
- def __getslice__(self, i, j):
- """returns slice [i:j] of the result set"""
- return self.rows[i:j]
-
def __iter__(self):
"""Returns an iterator over rows"""
return iter(self.rows)
@@ -186,7 +186,7 @@
"""
rows, descr = [], []
rset = self.copy(rows, descr)
- for i in xrange(len(self)):
+ for i in range(len(self)):
if not filtercb(self.get_entity(i, col)):
continue
rows.append(self.rows[i])
@@ -215,10 +215,10 @@
rset = self.copy(rows, descr)
if col >= 0:
entities = sorted(enumerate(self.entities(col)),
- key=lambda (i, e): keyfunc(e), reverse=reverse)
+ key=lambda t: keyfunc(t[1]), reverse=reverse)
else:
entities = sorted(enumerate(self),
- key=lambda (i, e): keyfunc(e), reverse=reverse)
+ key=lambda t: keyfunc(t[1]), reverse=reverse)
for index, _ in entities:
rows.append(self.rows[index])
descr.append(self.description[index])
@@ -311,7 +311,7 @@
newselect.limit = limit
newselect.offset = offset
aliases = [nodes.VariableRef(newselect.get_variable(chr(65+i), i))
- for i in xrange(len(rqlst.children[0].selection))]
+ for i in range(len(rqlst.children[0].selection))]
for vref in aliases:
newselect.append_selected(nodes.VariableRef(vref.variable))
newselect.set_with([nodes.SubQuery(aliases, rqlst)], check=False)
@@ -373,6 +373,8 @@
warn('[3.21] the "encoded" argument is deprecated', DeprecationWarning)
encoding = self.req.encoding
rqlstr = self.syntax_tree().as_string(kwargs=self.args)
+ if PY3:
+ return rqlstr
# sounds like we get encoded or unicode string due to a bug in as_string
if not encoded:
if isinstance(rqlstr, unicode):
@@ -387,7 +389,7 @@
def entities(self, col=0):
"""iter on entities with eid in the `col` column of the result set"""
- for i in xrange(len(self)):
+ for i in range(len(self)):
# may have None values in case of outer join (or aggregat on eid
# hacks)
if self.rows[i][col] is not None:
@@ -507,9 +509,9 @@
eschema = entity.e_schema
eid_col, attr_cols, rel_cols = self._rset_structure(eschema, col)
entity.eid = rowvalues[eid_col]
- for attr, col_idx in attr_cols.iteritems():
+ for attr, col_idx in attr_cols.items():
entity.cw_attr_cache[attr] = rowvalues[col_idx]
- for (rtype, role), col_idx in rel_cols.iteritems():
+ for (rtype, role), col_idx in rel_cols.items():
value = rowvalues[col_idx]
if value is None:
if role == 'subject':
@@ -606,7 +608,7 @@
except AttributeError:
# not a variable
continue
- for i in xrange(len(select.selection)):
+ for i in range(len(select.selection)):
if i == col:
continue
coletype = self.description[row][i]
--- a/rtags.py Thu Mar 06 15:55:33 2014 +0100
+++ b/rtags.py Thu Nov 12 10:52:28 2015 +0100
@@ -40,6 +40,8 @@
import logging
from warnings import warn
+from six import string_types
+
from logilab.common.logging_ext import set_log_methods
from logilab.common.registry import RegistrableInstance, yes
@@ -95,7 +97,7 @@
def init(self, schema, check=True):
# XXX check existing keys against schema
if check:
- for (stype, rtype, otype, tagged), value in self._tagdefs.items():
+ for (stype, rtype, otype, tagged), value in list(self._tagdefs.items()):
for ertype in (stype, rtype, otype):
if ertype != '*' and not ertype in schema:
self.warning('removing rtag %s: %s, %s undefined in schema',
@@ -145,7 +147,7 @@
return tag
def _tag_etype_attr(self, etype, attr, desttype='*', *args, **kwargs):
- if isinstance(attr, basestring):
+ if isinstance(attr, string_types):
attr, role = attr, 'subject'
else:
attr, role = attr
--- a/schema.py Thu Mar 06 15:55:33 2014 +0100
+++ b/schema.py Thu Nov 12 10:52:28 2015 +0100
@@ -16,15 +16,18 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""classes to define schemas for CubicWeb"""
+from __future__ import print_function
__docformat__ = "restructuredtext en"
-_ = unicode
import re
from os.path import join, basename
from logging import getLogger
from warnings import warn
+from six import PY2, text_type, string_types, add_metaclass
+from six.moves import range
+
from logilab.common import tempattr
from logilab.common.decorators import cached, clear_cache, monkeypatch, cachedproperty
from logilab.common.logging_ext import set_log_methods
@@ -45,7 +48,7 @@
from rql.analyze import ETypeResolver
import cubicweb
-from cubicweb import ETYPE_NAME_MAP, ValidationError, Unauthorized
+from cubicweb import ETYPE_NAME_MAP, ValidationError, Unauthorized, _
try:
from cubicweb import server
@@ -102,6 +105,9 @@
INTERNAL_TYPES = set(('CWProperty', 'CWCache', 'ExternalUri', 'CWDataImport',
'CWSource', 'CWSourceHostConfig', 'CWSourceSchemaConfig'))
+UNIQUE_CONSTRAINTS = ('SizeConstraint', 'FormatConstraint',
+ 'StaticVocabularyConstraint',
+ 'RQLVocabularyConstraint')
_LOGGER = getLogger('cubicweb.schemaloader')
@@ -142,7 +148,10 @@
suppressing and reinserting an expression if only a space has been
added/removed for instance)
"""
- return u', '.join(' '.join(expr.split()) for expr in rqlstring.split(','))
+ union = parse(u'Any 1 WHERE %s' % rqlstring).as_string()
+ if PY2 and isinstance(union, str):
+ union = union.decode('utf-8')
+ return union.split(' WHERE ', 1)[1]
def _check_valid_formula(rdef, formula_rqlst):
@@ -204,7 +213,7 @@
"""
self.eid = eid # eid of the entity representing this rql expression
assert mainvars, 'bad mainvars %s' % mainvars
- if isinstance(mainvars, basestring):
+ if isinstance(mainvars, string_types):
mainvars = set(splitstrip(mainvars))
elif not isinstance(mainvars, set):
mainvars = set(mainvars)
@@ -246,6 +255,9 @@
return self.expression == other.expression
return False
+ def __ne__(self, other):
+ return not (self == other)
+
def __hash__(self):
return hash(self.expression)
@@ -271,7 +283,7 @@
def transform_has_permission(self):
found = None
rqlst = self.rqlst
- for var in rqlst.defined_vars.itervalues():
+ for var in rqlst.defined_vars.values():
for varref in var.references():
rel = varref.relation()
if rel is None:
@@ -319,7 +331,7 @@
"""
creating = kwargs.get('creating')
if not creating and self.eid is not None:
- key = (self.eid, tuple(sorted(kwargs.iteritems())))
+ key = (self.eid, tuple(sorted(kwargs.items())))
try:
return _cw.local_perm_cache[key]
except KeyError:
@@ -363,7 +375,7 @@
get_eschema = _cw.vreg.schema.eschema
try:
for eaction, col in has_perm_defs:
- for i in xrange(len(rset)):
+ for i in range(len(rset)):
eschema = get_eschema(rset.description[i][col])
eschema.check_perm(_cw, eaction, eid=rset[i][col])
if self.eid is not None:
@@ -400,13 +412,35 @@
return self._check(_cw, x=eid, **kwargs)
return self._check(_cw, **kwargs)
-def constraint_by_eid(self, eid):
- for cstr in self.constraints:
- if cstr.eid == eid:
- return cstr
- raise ValueError('No constraint with eid %d' % eid)
-RelationDefinitionSchema.constraint_by_eid = constraint_by_eid
+
+class CubicWebRelationDefinitionSchema(RelationDefinitionSchema):
+ def constraint_by_eid(self, eid):
+ for cstr in self.constraints:
+ if cstr.eid == eid:
+ return cstr
+ raise ValueError('No constraint with eid %d' % eid)
+
+ def rql_expression(self, expression, mainvars=None, eid=None):
+ """rql expression factory"""
+ if self.rtype.final:
+ return ERQLExpression(expression, mainvars, eid)
+ return RRQLExpression(expression, mainvars, eid)
+ def check_permission_definitions(self):
+ super(CubicWebRelationDefinitionSchema, self).check_permission_definitions()
+ schema = self.subject.schema
+ for action, groups in self.permissions.items():
+ for group_or_rqlexpr in groups:
+ if action == 'read' and \
+ isinstance(group_or_rqlexpr, RQLExpression):
+ msg = "can't use rql expression for read permission of %s"
+ raise BadSchemaDefinition(msg % self)
+ if self.final and isinstance(group_or_rqlexpr, RRQLExpression):
+ msg = "can't use RRQLExpression on %s, use an ERQLExpression"
+ raise BadSchemaDefinition(msg % self)
+ if not self.final and isinstance(group_or_rqlexpr, ERQLExpression):
+ msg = "can't use ERQLExpression on %s, use a RRQLExpression"
+ raise BadSchemaDefinition(msg % self)
def vargraph(rqlst):
""" builds an adjacency graph of variables from the rql syntax tree, e.g:
@@ -522,7 +556,7 @@
if not deps:
eschemas.append(eschema)
del graph[eschema]
- for deps in graph.itervalues():
+ for deps in graph.values():
try:
deps.remove(eschema)
except KeyError:
@@ -548,9 +582,9 @@
key = key + '_' + form
# ensure unicode
if context is not None:
- return unicode(req.pgettext(context, key))
+ return text_type(req.pgettext(context, key))
else:
- return unicode(req._(key))
+ return text_type(req._(key))
# Schema objects definition ###################################################
@@ -576,7 +610,7 @@
assert action in self.ACTIONS, action
#assert action in self._groups, '%s %s' % (self, action)
try:
- return frozenset(g for g in self.permissions[action] if isinstance(g, basestring))
+ return frozenset(g for g in self.permissions[action] if isinstance(g, string_types))
except KeyError:
return ()
PermissionMixIn.get_groups = get_groups
@@ -595,7 +629,7 @@
assert action in self.ACTIONS, action
#assert action in self._rqlexprs, '%s %s' % (self, action)
try:
- return tuple(g for g in self.permissions[action] if not isinstance(g, basestring))
+ return tuple(g for g in self.permissions[action] if not isinstance(g, string_types))
except KeyError:
return ()
PermissionMixIn.get_rqlexprs = get_rqlexprs
@@ -665,7 +699,7 @@
groups = self.get_groups(action)
if _cw.user.matching_groups(groups):
if DBG:
- print ('check_perm: %r %r: user matches %s' % (action, _self_str, groups))
+ print('check_perm: %r %r: user matches %s' % (action, _self_str, groups))
return
# if 'owners' in allowed groups, check if the user actually owns this
# object, if so that's enough
@@ -676,14 +710,14 @@
kwargs.get('creating')
or ('eid' in kwargs and _cw.user.owns(kwargs['eid']))):
if DBG:
- print ('check_perm: %r %r: user is owner or creation time' %
- (action, _self_str))
+ print('check_perm: %r %r: user is owner or creation time' %
+ (action, _self_str))
return
# else if there is some rql expressions, check them
if DBG:
- print ('check_perm: %r %r %s' %
- (action, _self_str, [(rqlexpr, kwargs, rqlexpr.check(_cw, **kwargs))
- for rqlexpr in self.get_rqlexprs(action)]))
+ print('check_perm: %r %r %s' %
+ (action, _self_str, [(rqlexpr, kwargs, rqlexpr.check(_cw, **kwargs))
+ for rqlexpr in self.get_rqlexprs(action)]))
if any(rqlexpr.check(_cw, **kwargs)
for rqlexpr in self.get_rqlexprs(action)):
return
@@ -691,35 +725,10 @@
PermissionMixIn.check_perm = check_perm
-RelationDefinitionSchema._RPROPERTIES['eid'] = None
+CubicWebRelationDefinitionSchema._RPROPERTIES['eid'] = None
# remember rproperties defined at this point. Others will have to be serialized in
# CWAttribute.extra_props
-KNOWN_RPROPERTIES = RelationDefinitionSchema.ALL_PROPERTIES()
-
-def rql_expression(self, expression, mainvars=None, eid=None):
- """rql expression factory"""
- if self.rtype.final:
- return ERQLExpression(expression, mainvars, eid)
- return RRQLExpression(expression, mainvars, eid)
-RelationDefinitionSchema.rql_expression = rql_expression
-
-orig_check_permission_definitions = RelationDefinitionSchema.check_permission_definitions
-def check_permission_definitions(self):
- orig_check_permission_definitions(self)
- schema = self.subject.schema
- for action, groups in self.permissions.iteritems():
- for group_or_rqlexpr in groups:
- if action == 'read' and \
- isinstance(group_or_rqlexpr, RQLExpression):
- msg = "can't use rql expression for read permission of %s"
- raise BadSchemaDefinition(msg % self)
- if self.final and isinstance(group_or_rqlexpr, RRQLExpression):
- msg = "can't use RRQLExpression on %s, use an ERQLExpression"
- raise BadSchemaDefinition(msg % self)
- if not self.final and isinstance(group_or_rqlexpr, ERQLExpression):
- msg = "can't use ERQLExpression on %s, use a RRQLExpression"
- raise BadSchemaDefinition(msg % self)
-RelationDefinitionSchema.check_permission_definitions = check_permission_definitions
+KNOWN_RPROPERTIES = CubicWebRelationDefinitionSchema.ALL_PROPERTIES()
class CubicWebEntitySchema(EntitySchema):
@@ -763,7 +772,7 @@
def check_permission_definitions(self):
super(CubicWebEntitySchema, self).check_permission_definitions()
- for groups in self.permissions.itervalues():
+ for groups in self.permissions.values():
for group_or_rqlexpr in groups:
if isinstance(group_or_rqlexpr, RRQLExpression):
msg = "can't use RRQLExpression on %s, use an ERQLExpression"
@@ -870,6 +879,7 @@
class CubicWebRelationSchema(PermissionMixIn, RelationSchema):
permissions = {}
ACTIONS = ()
+ rdef_class = CubicWebRelationDefinitionSchema
def __init__(self, schema=None, rdef=None, eid=None, **kwargs):
if rdef is not None:
@@ -906,7 +916,7 @@
if rdef.may_have_permission(action, req):
return True
else:
- for rdef in self.rdefs.itervalues():
+ for rdef in self.rdefs.values():
if rdef.may_have_permission(action, req):
return True
return False
@@ -948,7 +958,7 @@
if not rdef.has_perm(_cw, action, **kwargs):
return False
else:
- for rdef in self.rdefs.itervalues():
+ for rdef in self.rdefs.values():
if not rdef.has_perm(_cw, action, **kwargs):
return False
return True
@@ -986,7 +996,7 @@
etype_name_re = r'[A-Z][A-Za-z0-9]*[a-z]+[A-Za-z0-9]*$'
def add_entity_type(self, edef):
- edef.name = edef.name.encode()
+ edef.name = str(edef.name)
edef.name = bw_normalize_etype(edef.name)
if not re.match(self.etype_name_re, edef.name):
raise BadSchemaDefinition(
@@ -1011,7 +1021,7 @@
raise BadSchemaDefinition(
'%r is not a valid name for a relation type. It should be '
'lower cased' % rdef.name)
- rdef.name = rdef.name.encode()
+ rdef.name = str(rdef.name)
rschema = super(CubicWebSchema, self).add_relation_type(rdef)
self._eid_index[rschema.eid] = rschema
return rschema
@@ -1071,7 +1081,7 @@
def iter_computed_attributes(self):
for relation in self.relations():
- for rdef in relation.rdefs.itervalues():
+ for rdef in relation.rdefs.values():
if rdef.final and rdef.formula is not None:
yield rdef
@@ -1198,11 +1208,11 @@
return ';%s;%s\n%s' % (','.join(sorted(self.mainvars)), self.expression,
self.msg or '')
+ @classmethod
def deserialize(cls, value):
value, msg = value.split('\n', 1)
_, mainvars, expression = value.split(';', 2)
return cls(expression, mainvars, msg)
- deserialize = classmethod(deserialize)
def repo_check(self, session, eidfrom, rtype, eidto=None):
"""raise ValidationError if the relation doesn't satisfy the constraint
@@ -1245,7 +1255,7 @@
return _cw.execute(rql, args, build_descr=False)
-class RQLConstraint(RepoEnforcedRQLConstraintMixIn, RQLVocabularyConstraint):
+class RQLConstraint(RepoEnforcedRQLConstraintMixIn, BaseRQLConstraint):
"""the rql constraint is similar to the RQLVocabularyConstraint but
are also enforced at the repository level
"""
@@ -1287,12 +1297,13 @@
make_workflowable(cls)
return cls
+
+@add_metaclass(workflowable_definition)
class WorkflowableEntityType(ybo.EntityType):
"""Use this base class instead of :class:`EntityType` to have workflow
relations (i.e. `in_state`, `wf_info_for` and `custom_workflow`) on your
entity type.
"""
- __metaclass__ = workflowable_definition
__abstract__ = True
--- a/schemas/Bookmark.py Thu Mar 06 15:55:33 2014 +0100
+++ b/schemas/Bookmark.py Thu Nov 12 10:52:28 2015 +0100
@@ -19,7 +19,7 @@
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from yams.buildobjs import EntityType, RelationType, SubjectRelation, String
from cubicweb.schema import RRQLExpression
--- a/schemas/base.py Thu Mar 06 15:55:33 2014 +0100
+++ b/schemas/base.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,7 +18,7 @@
"""core CubicWeb schema, but not necessary at bootstrap time"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from yams.buildobjs import (EntityType, RelationType, RelationDefinition,
SubjectRelation,
--- a/schemas/bootstrap.py Thu Mar 06 15:55:33 2014 +0100
+++ b/schemas/bootstrap.py Thu Nov 12 10:52:28 2015 +0100
@@ -19,7 +19,7 @@
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from yams.buildobjs import (EntityType, RelationType, RelationDefinition, Bytes,
SubjectRelation, RichString, String, Boolean, Int)
--- a/schemas/workflow.py Thu Mar 06 15:55:33 2014 +0100
+++ b/schemas/workflow.py Thu Nov 12 10:52:28 2015 +0100
@@ -19,7 +19,7 @@
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from yams.buildobjs import (EntityType, RelationType, RelationDefinition,
SubjectRelation,
--- a/selectors.py Thu Mar 06 15:55:33 2014 +0100
+++ b/selectors.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,6 +18,8 @@
from warnings import warn
+from six import string_types
+
from logilab.common.deprecation import deprecated, class_renamed
from cubicweb.predicates import *
@@ -84,7 +86,7 @@
See `EntityPredicate` documentation for behaviour when row is not specified.
- :param *etypes: entity types (`basestring`) which should be refused
+ :param *etypes: entity types (`string_types`) which should be refused
"""
def __init__(self, *etypes):
super(_but_etype, self).__init__()
--- a/server/__init__.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/__init__.py Thu Nov 12 10:52:28 2015 +0100
@@ -20,6 +20,7 @@
The server module contains functions to initialize a new repository.
"""
+from __future__ import print_function
__docformat__ = "restructuredtext en"
@@ -28,6 +29,9 @@
from glob import glob
from contextlib import contextmanager
+from six import text_type, string_types
+from six.moves import filter
+
from logilab.common.modutils import LazyObject
from logilab.common.textutils import splitstrip
from logilab.common.registry import yes
@@ -138,7 +142,7 @@
if not debugmode:
DEBUG = 0
return
- if isinstance(debugmode, basestring):
+ if isinstance(debugmode, string_types):
for mode in splitstrip(debugmode, sep='|'):
DEBUG |= globals()[mode]
else:
@@ -196,7 +200,7 @@
user = session.create_entity('CWUser', login=login, upassword=pwd)
for group in groups:
session.execute('SET U in_group G WHERE U eid %(u)s, G name %(group)s',
- {'u': user.eid, 'group': unicode(group)})
+ {'u': user.eid, 'group': text_type(group)})
return user
def init_repository(config, interactive=True, drop=False, vreg=None,
@@ -242,10 +246,10 @@
if failed:
failed = sqlexec(failed, execute, cnx=sqlcnx,
pbtitle='-> dropping tables (second pass)')
- remainings = filter(drop_filter, helper.list_tables(sqlcursor))
+ remainings = list(filter(drop_filter, helper.list_tables(sqlcursor)))
assert not remainings, 'Remaining tables: %s' % ', '.join(remainings)
_title = '-> creating tables '
- print _title,
+ print(_title, end=' ')
# schema entities and relations tables
# can't skip entities table even if system source doesn't support them,
# they are used sometimes by generated sql. Keeping them empty is much
@@ -255,8 +259,8 @@
# if not repo.system_source.support_entity(str(e))])
failed = sqlexec(schemasql, execute, pbtitle=_title, delimiter=';;')
if failed:
- print 'The following SQL statements failed. You should check your schema.'
- print failed
+ print('The following SQL statements failed. You should check your schema.')
+ print(failed)
raise Exception('execution of the sql schema failed, you should check your schema')
sqlcursor.close()
sqlcnx.commit()
@@ -267,19 +271,19 @@
repo.system_source.eid = ssource.eid
cnx.execute('SET X cw_source X WHERE X eid %(x)s', {'x': ssource.eid})
# insert base groups and default admin
- print '-> inserting default user and default groups.'
+ print('-> inserting default user and default groups.')
try:
- login = unicode(sourcescfg['admin']['login'])
+ login = text_type(sourcescfg['admin']['login'])
pwd = sourcescfg['admin']['password']
except KeyError:
if interactive:
msg = 'enter login and password of the initial manager account'
login, pwd = manager_userpasswd(msg=msg, confirm=True)
else:
- login, pwd = unicode(source['db-user']), source['db-password']
+ login, pwd = text_type(source['db-user']), source['db-password']
# sort for eid predicatability as expected in some server tests
for group in sorted(BASE_GROUPS):
- cnx.create_entity('CWGroup', name=unicode(group))
+ cnx.create_entity('CWGroup', name=text_type(group))
admin = create_user(cnx, login, pwd, u'managers')
cnx.execute('SET X owned_by U WHERE X is IN (CWGroup,CWSource), U eid %(u)s',
{'u': admin.eid})
@@ -310,7 +314,7 @@
# (drop instance attribute to get back to class attribute)
del config.cubicweb_appobject_path
del config.cube_appobject_path
- print '-> database for instance %s initialized.' % config.appid
+ print('-> database for instance %s initialized.' % config.appid)
def initialize_schema(config, schema, mhandler, event='create'):
--- a/server/checkintegrity.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/checkintegrity.py Thu Nov 12 10:52:28 2015 +0100
@@ -20,6 +20,8 @@
* integrity of a CubicWeb repository. Hum actually only the system database is
checked.
"""
+from __future__ import print_function
+
__docformat__ = "restructuredtext en"
import sys
@@ -27,7 +29,7 @@
from logilab.common.shellutils import ProgressBar
-from cubicweb.schema import PURE_VIRTUAL_RTYPES, VIRTUAL_RTYPES
+from cubicweb.schema import PURE_VIRTUAL_RTYPES, VIRTUAL_RTYPES, UNIQUE_CONSTRAINTS
from cubicweb.server.sqlutils import SQL_PREFIX
def notify_fixed(fix):
@@ -90,11 +92,11 @@
dbhelper = repo.system_source.dbhelper
cursor = cnx.cnxset.cu
if not dbhelper.has_fti_table(cursor):
- print 'no text index table'
+ print('no text index table')
dbhelper.init_fti(cursor)
repo.system_source.do_fti = True # ensure full-text indexation is activated
if etypes is None:
- print 'Reindexing entities'
+ print('Reindexing entities')
etypes = set()
for eschema in schema.entities():
if eschema.final:
@@ -107,8 +109,8 @@
# clear fti table first
cnx.system_sql('DELETE FROM %s' % dbhelper.fti_table)
else:
- print 'Reindexing entities of type %s' % \
- ', '.join(sorted(str(e) for e in etypes))
+ print('Reindexing entities of type %s' % \
+ ', '.join(sorted(str(e) for e in etypes)))
# clear fti table first. Use subquery for sql compatibility
cnx.system_sql("DELETE FROM %s WHERE EXISTS(SELECT 1 FROM ENTITIES "
"WHERE eid=%s AND type IN (%s))" % (
@@ -138,10 +140,7 @@
def check_schema(schema, cnx, eids, fix=1):
"""check serialized schema"""
- print 'Checking serialized schema'
- unique_constraints = ('SizeConstraint', 'FormatConstraint',
- 'VocabularyConstraint',
- 'RQLVocabularyConstraint')
+ print('Checking serialized schema')
rql = ('Any COUNT(X),RN,SN,ON,CTN GROUPBY RN,SN,ON,CTN ORDERBY 1 '
'WHERE X is CWConstraint, R constrained_by X, '
'R relation_type RT, RT name RN, R from_entity ST, ST name SN, '
@@ -149,17 +148,17 @@
for count, rn, sn, on, cstrname in cnx.execute(rql):
if count == 1:
continue
- if cstrname in unique_constraints:
- print "ERROR: got %s %r constraints on relation %s.%s.%s" % (
- count, cstrname, sn, rn, on)
+ if cstrname in UNIQUE_CONSTRAINTS:
+ print("ERROR: got %s %r constraints on relation %s.%s.%s" % (
+ count, cstrname, sn, rn, on))
if fix:
- print 'dunno how to fix, do it yourself'
+ print('dunno how to fix, do it yourself')
def check_text_index(schema, cnx, eids, fix=1):
"""check all entities registered in the text index"""
- print 'Checking text index'
+ print('Checking text index')
msg = ' Entity with eid %s exists in the text index but in no source (autofix will remove from text index)'
cursor = cnx.system_sql('SELECT uid FROM appears;')
for row in cursor.fetchall():
@@ -173,7 +172,7 @@
def check_entities(schema, cnx, eids, fix=1):
"""check all entities registered in the repo system table"""
- print 'Checking entities system table'
+ print('Checking entities system table')
# system table but no source
msg = ' Entity %s with eid %s exists in the system table but in no source (autofix will delete the entity)'
cursor = cnx.system_sql('SELECT eid,type FROM entities;')
@@ -231,7 +230,7 @@
'WHERE s.cw_name=e.type AND NOT EXISTS(SELECT 1 FROM is_instance_of_relation as cs '
' WHERE cs.eid_from=e.eid AND cs.eid_to=s.cw_eid)')
notify_fixed(True)
- print 'Checking entities tables'
+ print('Checking entities tables')
msg = ' Entity with eid %s exists in the %s table but not in the system table (autofix will delete the entity)'
for eschema in schema.entities():
if eschema.final:
@@ -266,7 +265,7 @@
"""check that eids referenced by relations are registered in the repo system
table
"""
- print 'Checking relations'
+ print('Checking relations')
for rschema in schema.relations():
if rschema.final or rschema.type in PURE_VIRTUAL_RTYPES:
continue
@@ -290,7 +289,7 @@
cursor = cnx.system_sql('SELECT eid_from FROM %s_relation;' % rschema)
except Exception as ex:
# usually because table doesn't exist
- print 'ERROR', ex
+ print('ERROR', ex)
continue
for row in cursor.fetchall():
eid = row[0]
@@ -313,14 +312,14 @@
def check_mandatory_relations(schema, cnx, eids, fix=1):
"""check entities missing some mandatory relation"""
- print 'Checking mandatory relations'
+ print('Checking mandatory relations')
msg = '%s #%s is missing mandatory %s relation %s (autofix will delete the entity)'
for rschema in schema.relations():
if rschema.final or rschema in PURE_VIRTUAL_RTYPES or rschema in ('is', 'is_instance_of'):
continue
smandatory = set()
omandatory = set()
- for rdef in rschema.rdefs.itervalues():
+ for rdef in rschema.rdefs.values():
if rdef.cardinality[0] in '1+':
smandatory.add(rdef.subject)
if rdef.cardinality[1] in '1+':
@@ -343,12 +342,12 @@
"""check for entities stored in the system source missing some mandatory
attribute
"""
- print 'Checking mandatory attributes'
+ print('Checking mandatory attributes')
msg = '%s #%s is missing mandatory attribute %s (autofix will delete the entity)'
for rschema in schema.relations():
if not rschema.final or rschema in VIRTUAL_RTYPES:
continue
- for rdef in rschema.rdefs.itervalues():
+ for rdef in rschema.rdefs.values():
if rdef.cardinality[0] in '1+':
rql = 'Any X WHERE X %s NULL, X is %s, X cw_source S, S name "system"' % (
rschema, rdef.subject)
@@ -364,7 +363,7 @@
FIXME: rewrite using RQL queries ?
"""
- print 'Checking metadata'
+ print('Checking metadata')
cursor = cnx.system_sql("SELECT DISTINCT type FROM entities;")
eidcolumn = SQL_PREFIX + 'eid'
msg = ' %s with eid %s has no %s (autofix will set it to now)'
@@ -406,9 +405,9 @@
if fix:
cnx.commit()
else:
- print
+ print()
if not fix:
- print 'WARNING: Diagnostic run, nothing has been corrected'
+ print('WARNING: Diagnostic run, nothing has been corrected')
if reindex:
cnx.rollback()
reindex_entities(repo.schema, cnx, withpb=withpb)
--- a/server/cwzmq.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/cwzmq.py Thu Nov 12 10:52:28 2015 +0100
@@ -65,7 +65,7 @@
def add_subscriber(self, address):
subscriber = Subscriber(self.ioloop, address)
- for topic, callback in self._topics.iteritems():
+ for topic, callback in self._topics.items():
subscriber.subscribe(topic, callback)
self._subscribers.append(subscriber)
--- a/server/edition.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/edition.py Thu Nov 12 10:52:28 2015 +0100
@@ -38,7 +38,7 @@
class EditedEntity(dict):
"""encapsulate entities attributes being written by an RQL query"""
def __init__(self, entity, **kwargs):
- dict.__init__(self, **kwargs)
+ super(EditedEntity, self).__init__(**kwargs)
self.entity = entity
self.skip_security = set()
self.querier_pending_relations = {}
@@ -50,10 +50,13 @@
def __lt__(self, other):
# we don't want comparison by value inherited from dict
- return id(self) < id(other)
+ raise NotImplementedError
def __eq__(self, other):
- return id(self) == id(other)
+ return self is other
+
+ def __ne__(self, other):
+ return not (self == other)
def __setitem__(self, attr, value):
assert attr != 'eid'
@@ -93,7 +96,7 @@
setitem = self.__setitem__
else:
setitem = self.edited_attribute
- for attr, value in values.iteritems():
+ for attr, value in values.items():
setitem(attr, value)
def edited_attribute(self, attr, value):
--- a/server/hook.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/hook.py Thu Nov 12 10:52:28 2015 +0100
@@ -248,6 +248,8 @@
.. autoclass:: cubicweb.server.hook.LateOperation
.. autoclass:: cubicweb.server.hook.DataOperationMixIn
"""
+from __future__ import print_function
+
__docformat__ = "restructuredtext en"
from warnings import warn
@@ -331,7 +333,7 @@
with cnx.running_hooks_ops():
for hook in hooks:
if debug:
- print event, _kwargs, hook
+ print(event, _kwargs, hook)
hook()
def get_pruned_hooks(self, cnx, event, entities, eids_from_to, kwargs):
@@ -370,7 +372,7 @@
pruned = set()
cnx.pruned_hooks_cache[cache_key] = pruned
if look_for_selector is not None:
- for id, hooks in self.iteritems():
+ for id, hooks in self.items():
for hook in hooks:
enabled_cat, main_filter = hook.filterable_selectors()
if enabled_cat is not None:
@@ -382,14 +384,14 @@
(main_filter.frometypes is not None or \
main_filter.toetypes is not None):
continue
- first_kwargs = _iter_kwargs(entities, eids_from_to, kwargs).next()
+ first_kwargs = next(_iter_kwargs(entities, eids_from_to, kwargs))
if not main_filter(hook, cnx, **first_kwargs):
pruned.add(hook)
return pruned
def filtered_possible_objects(self, pruned, *args, **kwargs):
- for appobjects in self.itervalues():
+ for appobjects in self.values():
if pruned:
filtered_objects = [obj for obj in appobjects if obj not in pruned]
if not filtered_objects:
--- a/server/hooksmanager.py Thu Mar 06 15:55:33 2014 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,22 +0,0 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
-# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
-#
-# This file is part of CubicWeb.
-#
-# CubicWeb is free software: you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free
-# Software Foundation, either version 2.1 of the License, or (at your option)
-# any later version.
-#
-# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
-# details.
-#
-# You should have received a copy of the GNU Lesser General Public License along
-# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
-from logilab.common.deprecation import class_renamed, class_moved
-from cubicweb.server import hook
-
-SystemHook = class_renamed('SystemHook', hook.Hook)
-Hook = class_moved(hook.Hook)
--- a/server/migractions.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/migractions.py Thu Nov 12 10:52:28 2015 +0100
@@ -26,6 +26,8 @@
* add an entity
* execute raw RQL queries
"""
+from __future__ import print_function
+
__docformat__ = "restructuredtext en"
import sys
@@ -40,6 +42,8 @@
from warnings import warn
from contextlib import contextmanager
+from six import PY2, text_type
+
from logilab.common.deprecation import deprecated
from logilab.common.decorators import cached, clear_cache
@@ -93,7 +97,7 @@
self.repo = repo
self.session = cnx.session
elif connect:
- self.repo_connect()
+ self.repo = config.repository()
self.set_cnx()
else:
self.session = None
@@ -134,30 +138,24 @@
try:
self.cnx = repoapi.connect(self.repo, login, password=pwd)
if not 'managers' in self.cnx.user.groups:
- print 'migration need an account in the managers group'
+ print('migration need an account in the managers group')
else:
break
except AuthenticationError:
- print 'wrong user/password'
+ print('wrong user/password')
except (KeyboardInterrupt, EOFError):
- print 'aborting...'
+ print('aborting...')
sys.exit(0)
try:
login, pwd = manager_userpasswd()
except (KeyboardInterrupt, EOFError):
- print 'aborting...'
+ print('aborting...')
sys.exit(0)
self.session = self.repo._get_session(self.cnx.sessionid)
-
- @cached
- def repo_connect(self):
- self.repo = repoapi.get_repository(config=self.config)
- return self.repo
-
def cube_upgraded(self, cube, version):
self.cmd_set_property('system.version.%s' % cube.lower(),
- unicode(version))
+ text_type(version))
self.commit()
def shutdown(self):
@@ -191,7 +189,7 @@
def backup_database(self, backupfile=None, askconfirm=True, format='native'):
config = self.config
- repo = self.repo_connect()
+ repo = self.repo
# paths
timestamp = datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
instbkdir = osp.join(config.appdatahome, 'backup')
@@ -202,13 +200,13 @@
# check backup has to be done
if osp.exists(backupfile) and not \
self.confirm('Backup file %s exists, overwrite it?' % backupfile):
- print '-> no backup done.'
+ print('-> no backup done.')
return
elif askconfirm and not self.confirm('Backup %s database?' % config.appid):
- print '-> no backup done.'
+ print('-> no backup done.')
return
open(backupfile,'w').close() # kinda lock
- os.chmod(backupfile, 0600)
+ os.chmod(backupfile, 0o600)
# backup
source = repo.system_source
tmpdir = tempfile.mkdtemp()
@@ -217,7 +215,7 @@
try:
source.backup(osp.join(tmpdir, source.uri), self.confirm, format=format)
except Exception as ex:
- print '-> error trying to backup %s [%s]' % (source.uri, ex)
+ print('-> error trying to backup %s [%s]' % (source.uri, ex))
if not self.confirm('Continue anyway?', default='n'):
raise SystemExit(1)
else:
@@ -226,7 +224,7 @@
format_file.write('%s\n' % format)
with open(osp.join(tmpdir, 'versions.txt'), 'w') as version_file:
versions = repo.get_versions()
- for cube, version in versions.iteritems():
+ for cube, version in versions.items():
version_file.write('%s %s\n' % (cube, version))
if not failed:
bkup = tarfile.open(backupfile, 'w|gz')
@@ -236,7 +234,7 @@
# call hooks
repo.hm.call_hooks('server_backup', repo=repo, timestamp=timestamp)
# done
- print '-> backup file', backupfile
+ print('-> backup file', backupfile)
finally:
shutil.rmtree(tmpdir)
@@ -268,19 +266,19 @@
if written_format in ('portable', 'native'):
format = written_format
self.config.init_cnxset_pool = False
- repo = self.repo_connect()
+ repo = self.repo
source = repo.system_source
try:
source.restore(osp.join(tmpdir, source.uri), self.confirm, drop, format)
except Exception as exc:
- print '-> error trying to restore %s [%s]' % (source.uri, exc)
+ print('-> error trying to restore %s [%s]' % (source.uri, exc))
if not self.confirm('Continue anyway?', default='n'):
raise SystemExit(1)
shutil.rmtree(tmpdir)
# call hooks
repo.init_cnxset_pool()
repo.hm.call_hooks('server_restore', repo=repo, timestamp=backupfile)
- print '-> database restored.'
+ print('-> database restored.')
def commit(self):
self.cnx.commit()
@@ -362,11 +360,11 @@
directory = osp.join(self.config.cube_dir(cube), 'schema')
sql_scripts = glob(osp.join(directory, '*.%s.sql' % driver))
for fpath in sql_scripts:
- print '-> installing', fpath
+ print('-> installing', fpath)
failed = sqlexec(open(fpath).read(), self.cnx.system_sql, False,
delimiter=';;')
if failed:
- print '-> ERROR, skipping', fpath
+ print('-> ERROR, skipping', fpath)
# schema synchronization internals ########################################
@@ -424,7 +422,7 @@
{'x': expreid}, ask_confirm=False)
else:
newexprs.pop(expression)
- for expression in newexprs.itervalues():
+ for expression in newexprs.values():
expr = expression.expression
if not confirm or self.confirm('Add %s expression for %s permission of %s?'
% (expr, action, erschema)):
@@ -460,7 +458,10 @@
assert reporschema.eid, reporschema
self.rqlexecall(ss.updaterschema2rql(rschema, reporschema.eid),
ask_confirm=self.verbosity>=2)
- if syncrdefs:
+ if rschema.rule:
+ if syncperms:
+ self._synchronize_permissions(rschema, reporschema.eid)
+ elif syncrdefs:
for subj, obj in rschema.rdefs:
if (subj, obj) not in reporschema.rdefs:
continue
@@ -552,12 +553,12 @@
for name in cols:
rschema = repoeschema.subjrels.get(name)
if rschema is None:
- print 'dont add %s unique constraint on %s, missing %s' % (
- ','.join(cols), eschema, name)
+ print('dont add %s unique constraint on %s, missing %s' % (
+ ','.join(cols), eschema, name))
return False
if not (rschema.final or rschema.inlined):
- print 'dont add %s unique constraint on %s, %s is neither final nor inlined' % (
- ','.join(cols), eschema, name)
+ print('dont add %s unique constraint on %s, %s is neither final nor inlined' % (
+ ','.join(cols), eschema, name))
return False
return True
@@ -574,6 +575,7 @@
against its current definition:
* order and other properties
* constraints
+ * permissions
"""
subjtype, objtype = str(subjtype), str(objtype)
rschema = self.fs_schema.rschema(rtype)
@@ -743,8 +745,8 @@
rschema = self.repo.schema.rschema(attrname)
attrtype = rschema.objects(etype)[0]
except KeyError:
- print 'warning: attribute %s %s is not known, skip deletion' % (
- etype, attrname)
+ print('warning: attribute %s %s is not known, skip deletion' % (
+ etype, attrname))
else:
self.cmd_drop_relation_definition(etype, attrname, attrtype,
commit=commit)
@@ -781,7 +783,7 @@
instschema = self.repo.schema
eschema = self.fs_schema.eschema(etype)
if etype in instschema and not (eschema.final and eschema.eid is None):
- print 'warning: %s already known, skip addition' % etype
+ print('warning: %s already known, skip addition' % etype)
return
confirm = self.verbosity >= 2
groupmap = self.group_mapping()
@@ -918,7 +920,7 @@
"""
schema = self.repo.schema
if oldname not in schema:
- print 'warning: entity type %s is unknown, skip renaming' % oldname
+ print('warning: entity type %s is unknown, skip renaming' % oldname)
return
# if merging two existing entity types
if newname in schema:
@@ -997,7 +999,7 @@
# elif simply renaming an entity type
else:
self.rqlexec('SET ET name %(newname)s WHERE ET is CWEType, ET name %(on)s',
- {'newname' : unicode(newname), 'on' : oldname},
+ {'newname' : text_type(newname), 'on' : oldname},
ask_confirm=False)
if commit:
self.commit()
@@ -1017,8 +1019,8 @@
rschema = self.fs_schema.rschema(rtype)
execute = self.cnx.execute
if rtype in reposchema:
- print 'warning: relation type %s is already known, skip addition' % (
- rtype)
+ print('warning: relation type %s is already known, skip addition' % (
+ rtype))
elif rschema.rule:
gmap = self.group_mapping()
ss.execschemarql(execute, rschema, ss.crschema2rql(rschema, gmap))
@@ -1098,8 +1100,8 @@
if not rtype in self.repo.schema:
self.cmd_add_relation_type(rtype, addrdef=False, commit=True)
if (subjtype, objtype) in self.repo.schema.rschema(rtype).rdefs:
- print 'warning: relation %s %s %s is already known, skip addition' % (
- subjtype, rtype, objtype)
+ print('warning: relation %s %s %s is already known, skip addition' % (
+ subjtype, rtype, objtype))
return
rdef = self._get_rdef(rschema, subjtype, objtype)
ss.execschemarql(self.cnx.execute, rdef,
@@ -1200,7 +1202,7 @@
values = []
for k, v in kwargs.items():
values.append('X %s %%(%s)s' % (k, k))
- if isinstance(v, str):
+ if PY2 and isinstance(v, str):
kwargs[k] = unicode(v)
rql = 'SET %s WHERE %s' % (','.join(values), ','.join(restriction))
self.rqlexec(rql, kwargs, ask_confirm=self.verbosity>=2)
@@ -1233,7 +1235,7 @@
self.rqlexec('SET C value %%(v)s WHERE X from_entity S, X relation_type R,'
'X constrained_by C, C cstrtype CT, CT name "SizeConstraint",'
'S name "%s", R name "%s"' % (etype, rtype),
- {'v': unicode(SizeConstraint(size).serialize())},
+ {'v': text_type(SizeConstraint(size).serialize())},
ask_confirm=self.verbosity>=2)
else:
self.rqlexec('DELETE X constrained_by C WHERE X from_entity S, X relation_type R,'
@@ -1270,7 +1272,7 @@
:rtype: `Workflow`
"""
- wf = self.cmd_create_entity('Workflow', name=unicode(name),
+ wf = self.cmd_create_entity('Workflow', name=text_type(name),
**kwargs)
if not isinstance(wfof, (list, tuple)):
wfof = (wfof,)
@@ -1278,19 +1280,19 @@
return 'missing workflow relations, see make_workflowable(%s)' % etype
for etype in wfof:
eschema = self.repo.schema[etype]
- etype = unicode(etype)
+ etype = text_type(etype)
if ensure_workflowable:
assert 'in_state' in eschema.subjrels, _missing_wf_rel(etype)
assert 'custom_workflow' in eschema.subjrels, _missing_wf_rel(etype)
assert 'wf_info_for' in eschema.objrels, _missing_wf_rel(etype)
rset = self.rqlexec(
'SET X workflow_of ET WHERE X eid %(x)s, ET name %(et)s',
- {'x': wf.eid, 'et': unicode(etype)}, ask_confirm=False)
+ {'x': wf.eid, 'et': text_type(etype)}, ask_confirm=False)
assert rset, 'unexistant entity type %s' % etype
if default:
self.rqlexec(
'SET ET default_workflow X WHERE X eid %(x)s, ET name %(et)s',
- {'x': wf.eid, 'et': unicode(etype)}, ask_confirm=False)
+ {'x': wf.eid, 'et': text_type(etype)}, ask_confirm=False)
if commit:
self.commit()
return wf
@@ -1321,13 +1323,13 @@
To set a user specific property value, use appropriate method on CWUser
instance.
"""
- value = unicode(value)
+ value = text_type(value)
try:
prop = self.rqlexec(
'CWProperty X WHERE X pkey %(k)s, NOT X for_user U',
- {'k': unicode(pkey)}, ask_confirm=False).get_entity(0, 0)
+ {'k': text_type(pkey)}, ask_confirm=False).get_entity(0, 0)
except Exception:
- self.cmd_create_entity('CWProperty', pkey=unicode(pkey), value=value)
+ self.cmd_create_entity('CWProperty', pkey=text_type(pkey), value=value)
else:
prop.cw_set(value=value)
@@ -1351,7 +1353,7 @@
# remove from entity cache to avoid memory exhaustion
del entity.cw_attr_cache[attribute]
pb.update()
- print
+ print()
source.set_storage(etype, attribute, storage)
def cmd_create_entity(self, etype, commit=False, **kwargs):
@@ -1564,12 +1566,14 @@
else:
raise StopIteration
- def next(self):
+ def __next__(self):
if self._rsetit is not None:
- return self._rsetit.next()
+ return next(self._rsetit)
rset = self._get_rset()
self._rsetit = iter(rset)
- return self._rsetit.next()
+ return next(self._rsetit)
+
+ next = __next__
def entities(self):
try:
--- a/server/querier.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/querier.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,10 +18,15 @@
"""Helper classes to execute RQL queries on a set of sources, performing
security checking and data aggregation.
"""
+from __future__ import print_function
+
__docformat__ = "restructuredtext en"
from itertools import repeat
+from six import text_type, string_types, integer_types
+from six.moves import range
+
from rql import RQLSyntaxError, CoercionError
from rql.stmts import Union
from rql.nodes import ETYPE_PYOBJ_MAP, etype_from_pyobj, Relation, Exists, Not
@@ -61,7 +66,7 @@
def check_no_password_selected(rqlst):
"""check that Password entities are not selected"""
for solution in rqlst.solutions:
- for var, etype in solution.iteritems():
+ for var, etype in solution.items():
if etype == 'Password':
raise Unauthorized('Password selection is not allowed (%s)' % var)
@@ -103,13 +108,13 @@
solution, args))
if not user.matching_groups(rdef.get_groups('read')):
if DBG:
- print ('check_read_access: %s %s does not match %s' %
- (rdef, user.groups, rdef.get_groups('read')))
+ print('check_read_access: %s %s does not match %s' %
+ (rdef, user.groups, rdef.get_groups('read')))
# XXX rqlexpr not allowed
raise Unauthorized('read', rel.r_type)
if DBG:
- print ('check_read_access: %s %s matches %s' %
- (rdef, user.groups, rdef.get_groups('read')))
+ print('check_read_access: %s %s matches %s' %
+ (rdef, user.groups, rdef.get_groups('read')))
def get_local_checks(cnx, rqlst, solution):
"""Check that the given user has credentials to access data read by the
@@ -138,8 +143,8 @@
ex = Unauthorized('read', solution[varname])
ex.var = varname
if DBG:
- print ('check_read_access: %s %s %s %s' %
- (varname, eschema, user.groups, eschema.get_groups('read')))
+ print('check_read_access: %s %s %s %s' %
+ (varname, eschema, user.groups, eschema.get_groups('read')))
raise ex
# don't insert security on variable only referenced by 'NOT X relation Y' or
# 'NOT EXISTS(X relation Y)'
@@ -265,7 +270,7 @@
# which have a known eid
varkwargs = {}
if not cnx.transaction_data.get('security-rqlst-cache'):
- for var in rqlst.defined_vars.itervalues():
+ for var in rqlst.defined_vars.values():
if var.stinfo['constnode'] is not None:
eid = var.stinfo['constnode'].eval(self.args)
varkwargs[var.name] = int(eid)
@@ -285,7 +290,7 @@
newsolutions.append(solution)
# try to benefit of rqlexpr.check cache for entities which
# are specified by eid in query'args
- for varname, eid in varkwargs.iteritems():
+ for varname, eid in varkwargs.items():
try:
rqlexprs = localcheck.pop(varname)
except KeyError:
@@ -303,7 +308,7 @@
# mark variables protected by an rql expression
restricted_vars.update(localcheck)
# turn local check into a dict key
- localcheck = tuple(sorted(localcheck.iteritems()))
+ localcheck = tuple(sorted(localcheck.items()))
localchecks.setdefault(localcheck, []).append(solution)
# raise Unautorized exception if the user can't access to any solution
if not newsolutions:
@@ -414,7 +419,7 @@
def relation_defs(self):
"""return the list for relation definitions to insert"""
- for rdefs in self._expanded_r_defs.itervalues():
+ for rdefs in self._expanded_r_defs.values():
for rdef in rdefs:
yield rdef
for rdef in self.r_defs:
@@ -446,13 +451,13 @@
relations = {}
for subj, rtype, obj in self.relation_defs():
# if a string is given into args instead of an int, we get it here
- if isinstance(subj, basestring):
+ if isinstance(subj, string_types):
subj = int(subj)
- elif not isinstance(subj, (int, long)):
+ elif not isinstance(subj, integer_types):
subj = subj.entity.eid
- if isinstance(obj, basestring):
+ if isinstance(obj, string_types):
obj = int(obj)
- elif not isinstance(obj, (int, long)):
+ elif not isinstance(obj, integer_types):
obj = obj.entity.eid
if repo.schema.rschema(rtype).inlined:
if subj not in edited_entities:
@@ -468,7 +473,7 @@
else:
relations[rtype] = [(subj, obj)]
repo.glob_add_relations(cnx, relations)
- for edited in edited_entities.itervalues():
+ for edited in edited_entities.values():
repo.glob_update_entity(cnx, edited)
@@ -507,7 +512,7 @@
def parse(self, rql, annotate=False):
"""return a rql syntax tree for the given rql"""
try:
- return self._parse(unicode(rql), annotate=annotate)
+ return self._parse(text_type(rql), annotate=annotate)
except UnicodeError:
raise RQLSyntaxError(rql)
@@ -539,8 +544,8 @@
"""
if server.DEBUG & (server.DBG_RQL | server.DBG_SQL):
if server.DEBUG & (server.DBG_MORE | server.DBG_SQL):
- print '*'*80
- print 'querier input', repr(rql), repr(args)
+ print('*'*80)
+ print('querier input', repr(rql), repr(args))
# parse the query and binds variables
cachekey = (rql,)
try:
@@ -601,7 +606,7 @@
if args:
# different SQL generated when some argument is None or not (IS
# NULL). This should be considered when computing sql cache key
- cachekey += tuple(sorted([k for k, v in args.iteritems()
+ cachekey += tuple(sorted([k for k, v in args.items()
if v is None]))
# make an execution plan
plan = self.plan_factory(rqlst, args, cnx)
@@ -641,7 +646,7 @@
# so compute description manually even if there is only
# one solution
basedescr = [None] * len(plan.selected)
- todetermine = zip(xrange(len(plan.selected)), repeat(False))
+ todetermine = list(zip(range(len(plan.selected)), repeat(False)))
descr = _build_descr(cnx, results, basedescr, todetermine)
# FIXME: get number of affected entities / relations on non
# selection queries ?
@@ -668,7 +673,7 @@
unstables = rqlst.get_variable_indices()
basedescr = []
todetermine = []
- for i in xrange(len(rqlst.children[0].selection)):
+ for i in range(len(rqlst.children[0].selection)):
ttype = _selection_idx_type(i, rqlst, args)
if ttype is None or ttype == 'Any':
ttype = None
--- a/server/repository.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/repository.py Thu Nov 12 10:52:28 2015 +0100
@@ -25,15 +25,18 @@
point to a cubicweb instance.
* handles session management
"""
+from __future__ import print_function
+
__docformat__ = "restructuredtext en"
import threading
-import Queue
from warnings import warn
from itertools import chain
from time import time, localtime, strftime
from contextlib import contextmanager
+from six.moves import range, queue
+
from logilab.common.decorators import cached, clear_cache
from logilab.common.deprecation import deprecated
@@ -186,18 +189,18 @@
# registry hook to fix user class on registry reload
@onevent('after-registry-reload', self)
def fix_user_classes(self):
- # After registery reload the 'CWUser' class used for CWEtype
- # changed. To any existing user object have a different class than
+ # After registry reload the 'CWUser' class used for CWEtype
+ # changed. So any existing user object have a different class than
# the new loaded one. We are hot fixing this.
usercls = self.vreg['etypes'].etype_class('CWUser')
- for session in self._sessions.itervalues():
+ for session in self._sessions.values():
if not isinstance(session.user, InternalManager):
session.user.__class__ = usercls
def init_cnxset_pool(self):
"""should be called bootstrap_repository, as this is what it does"""
config = self.config
- self._cnxsets_pool = Queue.Queue()
+ self._cnxsets_pool = queue.Queue()
# 0. init a cnxset that will be used to fetch bootstrap information from
# the database
self._cnxsets_pool.put_nowait(self.system_source.wrapped_connection())
@@ -240,7 +243,7 @@
# proper initialization
self._get_cnxset().close(True)
self.cnxsets = [] # list of available cnxsets (can't iterate on a Queue)
- for i in xrange(config['connections-pool-size']):
+ for i in range(config['connections-pool-size']):
self.cnxsets.append(self.system_source.wrapped_connection())
self._cnxsets_pool.put_nowait(self.cnxsets[-1])
@@ -308,7 +311,7 @@
else:
self.vreg._set_schema(schema)
self.querier.set_schema(schema)
- for source in self.sources_by_uri.itervalues():
+ for source in self.sources_by_uri.values():
source.set_schema(schema)
self.schema = schema
@@ -377,7 +380,7 @@
def _get_cnxset(self):
try:
return self._cnxsets_pool.get(True, timeout=5)
- except Queue.Empty:
+ except queue.Empty:
raise Exception('no connections set available after 5 secs, probably either a '
'bug in code (too many uncommited/rolled back '
'connections) or too much load on the server (in '
@@ -387,13 +390,6 @@
def _free_cnxset(self, cnxset):
self._cnxsets_pool.put_nowait(cnxset)
- def pinfo(self):
- # XXX: session.cnxset is accessed from a local storage, would be interesting
- # to see if there is a cnxset set in any thread specific data)
- return '%s: %s (%s)' % (self._cnxsets_pool.qsize(),
- ','.join(session.user.login for session in self._sessions.itervalues()
- if session.cnxset),
- threading.currentThread())
def shutdown(self):
"""called on server stop event to properly close opened sessions and
connections
@@ -441,7 +437,7 @@
"""
# iter on sources_by_uri then check enabled source since sources doesn't
# contain copy based sources
- for source in self.sources_by_uri.itervalues():
+ for source in self.sources_by_uri.values():
if self.config.source_enabled(source) and source.support_entity('CWUser'):
try:
return source.authenticate(cnx, login, **authinfo)
@@ -575,7 +571,7 @@
"""
sources = {}
# remove sensitive information
- for uri, source in self.sources_by_uri.iteritems():
+ for uri, source in self.sources_by_uri.items():
sources[uri] = source.public_config
return sources
@@ -623,7 +619,7 @@
raise Exception('bad input for find_user')
with self.internal_cnx() as cnx:
varmaker = rqlvar_maker()
- vars = [(attr, varmaker.next()) for attr in fetch_attrs]
+ vars = [(attr, next(varmaker)) for attr in fetch_attrs]
rql = 'Any %s WHERE X is CWUser, ' % ','.join(var[1] for var in vars)
rql += ','.join('X %s %s' % (var[0], var[1]) for var in vars) + ','
rset = cnx.execute(rql + ','.join('X %s %%(%s)s' % (attr, attr)
@@ -919,7 +915,7 @@
# set caches asap
extid = self.init_entity_caches(cnx, entity, source)
if server.DEBUG & server.DBG_REPO:
- print 'ADD entity', self, entity.cw_etype, entity.eid, edited
+ print('ADD entity', self, entity.cw_etype, entity.eid, edited)
prefill_entity_caches(entity)
self.hm.call_hooks('before_add_entity', cnx, entity=entity)
relations = preprocess_inlined_relations(cnx, entity)
@@ -950,8 +946,8 @@
"""
entity = edited.entity
if server.DEBUG & server.DBG_REPO:
- print 'UPDATE entity', entity.cw_etype, entity.eid, \
- entity.cw_attr_cache, edited
+ print('UPDATE entity', entity.cw_etype, entity.eid,
+ entity.cw_attr_cache, edited)
hm = self.hm
eschema = entity.e_schema
cnx.set_entity_cache(entity)
@@ -1043,9 +1039,9 @@
except KeyError:
data_by_etype[etype] = [entity]
source = self.system_source
- for etype, entities in data_by_etype.iteritems():
+ for etype, entities in data_by_etype.items():
if server.DEBUG & server.DBG_REPO:
- print 'DELETE entities', etype, [entity.eid for entity in entities]
+ print('DELETE entities', etype, [entity.eid for entity in entities])
self.hm.call_hooks('before_delete_entity', cnx, entities=entities)
self._delete_cascade_multi(cnx, entities)
source.delete_entities(cnx, entities)
@@ -1067,10 +1063,10 @@
subjects_by_types = {}
objects_by_types = {}
activintegrity = cnx.is_hook_category_activated('activeintegrity')
- for rtype, eids_subj_obj in relations.iteritems():
+ for rtype, eids_subj_obj in relations.items():
if server.DEBUG & server.DBG_REPO:
for subjeid, objeid in eids_subj_obj:
- print 'ADD relation', subjeid, rtype, objeid
+ print('ADD relation', subjeid, rtype, objeid)
for subjeid, objeid in eids_subj_obj:
if rtype in relations_by_rtype:
relations_by_rtype[rtype].append((subjeid, objeid))
@@ -1105,22 +1101,22 @@
objects[objeid] = len(relations_by_rtype[rtype])
continue
objects[objeid] = len(relations_by_rtype[rtype])
- for rtype, source_relations in relations_by_rtype.iteritems():
+ for rtype, source_relations in relations_by_rtype.items():
self.hm.call_hooks('before_add_relation', cnx,
rtype=rtype, eids_from_to=source_relations)
- for rtype, source_relations in relations_by_rtype.iteritems():
+ for rtype, source_relations in relations_by_rtype.items():
source.add_relations(cnx, rtype, source_relations)
rschema = self.schema.rschema(rtype)
for subjeid, objeid in source_relations:
cnx.update_rel_cache_add(subjeid, rtype, objeid, rschema.symmetric)
- for rtype, source_relations in relations_by_rtype.iteritems():
+ for rtype, source_relations in relations_by_rtype.items():
self.hm.call_hooks('after_add_relation', cnx,
rtype=rtype, eids_from_to=source_relations)
def glob_delete_relation(self, cnx, subject, rtype, object):
"""delete a relation from the repository"""
if server.DEBUG & server.DBG_REPO:
- print 'DELETE relation', subject, rtype, object
+ print('DELETE relation', subject, rtype, object)
source = self.system_source
self.hm.call_hooks('before_delete_relation', cnx,
eidfrom=subject, rtype=rtype, eidto=object)
--- a/server/rqlannotation.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/rqlannotation.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,6 +18,7 @@
"""Functions to add additional annotations on a rql syntax tree to ease later
code generation.
"""
+from __future__ import print_function
__docformat__ = "restructuredtext en"
@@ -33,7 +34,7 @@
#if server.DEBUG:
# print '-------- sql annotate', repr(rqlst)
getrschema = annotator.schema.rschema
- for var in rqlst.defined_vars.itervalues():
+ for var in rqlst.defined_vars.values():
stinfo = var.stinfo
if stinfo.get('ftirels'):
has_text_query = True
@@ -144,7 +145,7 @@
stinfo['invariant'] = False
# see unittest_rqlannotation. test_has_text_security_cache_bug
# XXX probably more to do, but yet that work without more...
- for col_alias in rqlst.aliases.itervalues():
+ for col_alias in rqlst.aliases.values():
if col_alias.stinfo.get('ftirels'):
has_text_query = True
return has_text_query
@@ -194,7 +195,7 @@
# if DISTINCT query, can use variable from a different scope as principal
# since introduced duplicates will be removed
if scope.stmt.distinct and diffscope_rels:
- return iter(_sort(diffscope_rels)).next()
+ return next(iter(_sort(diffscope_rels)))
# XXX could use a relation from a different scope if it can't generate
# duplicates, so we should have to check cardinality
raise CantSelectPrincipal()
@@ -231,7 +232,7 @@
for select in union.children:
for subquery in select.with_:
set_qdata(getrschema, subquery.query, noinvariant)
- for var in select.defined_vars.itervalues():
+ for var in select.defined_vars.values():
if var.stinfo['invariant']:
if var in noinvariant and not var.stinfo['principal'].r_type == 'has_text':
var._q_invariant = False
@@ -317,7 +318,7 @@
def compute(self, rqlst):
# set domains for each variable
- for varname, var in rqlst.defined_vars.iteritems():
+ for varname, var in rqlst.defined_vars.items():
if var.stinfo['uidrel'] is not None or \
self.eschema(rqlst.solutions[0][varname]).final:
ptypes = var.stinfo['possibletypes']
@@ -354,9 +355,9 @@
continue
def _debug_print(self):
- print 'varsols', dict((x, sorted(str(v) for v in values))
- for x, values in self.varsols.iteritems())
- print 'ambiguous vars', sorted(self.ambiguousvars)
+ print('varsols', dict((x, sorted(str(v) for v in values))
+ for x, values in self.varsols.items()))
+ print('ambiguous vars', sorted(self.ambiguousvars))
def set_rel_constraint(self, term, rel, etypes_func):
if isinstance(term, VariableRef) and self.is_ambiguous(term.variable):
--- a/server/schema2sql.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/schema2sql.py Thu Nov 12 10:52:28 2015 +0100
@@ -162,8 +162,8 @@
def check_constraint(eschema, aschema, attr, constraint, dbhelper, prefix=''):
# XXX should find a better name
- cstrname = 'cstr' + md5(eschema.type + attr + constraint.type() +
- (constraint.serialize() or '')).hexdigest()
+ cstrname = 'cstr' + md5((eschema.type + attr + constraint.type() +
+ (constraint.serialize() or '')).encode('ascii')).hexdigest()
if constraint.type() == 'BoundaryConstraint':
value = as_sql(constraint.boundary, dbhelper, prefix)
return cstrname, '%s%s %s %s' % (prefix, attr, constraint.operator, value)
--- a/server/schemaserial.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/schemaserial.py Thu Nov 12 10:52:28 2015 +0100
@@ -16,6 +16,7 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""functions for schema / permissions (de)serialization using RQL"""
+from __future__ import print_function
__docformat__ = "restructuredtext en"
@@ -23,6 +24,8 @@
import json
import sys
+from six import PY2, text_type, string_types
+
from logilab.common.shellutils import ProgressBar, DummyProgressBar
from yams import BadSchemaDefinition, schema as schemamod, buildobjs as ybo
@@ -49,11 +52,11 @@
return res
missing = [g for g in ('owners', 'managers', 'users', 'guests') if not g in res]
if missing:
- print 'some native groups are missing but the following groups have been found:'
- print '\n'.join('* %s (%s)' % (n, eid) for n, eid in res.items())
- print
- print 'enter the eid of a to group to map to each missing native group'
- print 'or just type enter to skip permissions granted to a group'
+ print('some native groups are missing but the following groups have been found:')
+ print('\n'.join('* %s (%s)' % (n, eid) for n, eid in res.items()))
+ print()
+ print('enter the eid of a to group to map to each missing native group')
+ print('or just type enter to skip permissions granted to a group')
for group in missing:
while True:
value = raw_input('eid for group %s: ' % group).strip()
@@ -62,13 +65,13 @@
try:
eid = int(value)
except ValueError:
- print 'eid should be an integer'
+ print('eid should be an integer')
continue
for eid_ in res.values():
if eid == eid_:
break
else:
- print 'eid is not a group eid'
+ print('eid is not a group eid')
continue
res[name] = eid
break
@@ -146,7 +149,7 @@
{'x': etype, 'n': netype})
cnx.commit(False)
tocleanup = [eid]
- tocleanup += (eid for eid, cached in repo._type_source_cache.iteritems()
+ tocleanup += (eid for eid, cached in repo._type_source_cache.items()
if etype == cached[0])
repo.clear_caches(tocleanup)
cnx.commit(False)
@@ -240,7 +243,7 @@
'order', 'description', 'indexed', 'fulltextindexed',
'internationalizable', 'default', 'formula'), values))
typeparams = extra_props.get(attrs['rdefeid'])
- attrs.update(json.load(typeparams) if typeparams else {})
+ attrs.update(json.loads(typeparams.getvalue().decode('ascii')) if typeparams else {})
default = attrs['default']
if default is not None:
if isinstance(default, Binary):
@@ -281,7 +284,7 @@
else:
rtype = str(rel)
relations[1].append(rtype)
- for eschema, unique_together in unique_togethers.itervalues():
+ for eschema, unique_together in unique_togethers.values():
eschema._unique_together.append(tuple(sorted(unique_together)))
schema.infer_specialization_rules()
cnx.commit()
@@ -331,7 +334,7 @@
thispermsdict = permsidx[erschema.eid]
except KeyError:
return
- for action, somethings in thispermsdict.iteritems():
+ for action, somethings in thispermsdict.items():
erschema.permissions[action] = tuple(
isinstance(p, tuple) and erschema.rql_expression(*p) or p
for p in somethings)
@@ -344,7 +347,7 @@
current schema
"""
_title = '-> storing the schema in the database '
- print _title,
+ print(_title, end=' ')
execute = cnx.execute
eschemas = schema.entities()
pb_size = (len(eschemas + schema.relations())
@@ -366,7 +369,7 @@
cstrtypemap = {}
rql = 'INSERT CWConstraintType X: X name %(ct)s'
for cstrtype in CONSTRAINTS:
- cstrtypemap[cstrtype] = execute(rql, {'ct': unicode(cstrtype)},
+ cstrtypemap[cstrtype] = execute(rql, {'ct': text_type(cstrtype)},
build_descr=False)[0][0]
pb.update()
# serialize relations
@@ -381,10 +384,10 @@
continue
execschemarql(execute, rschema, rschema2rql(rschema, addrdef=False))
if rschema.symmetric:
- rdefs = [rdef for k, rdef in rschema.rdefs.iteritems()
+ rdefs = [rdef for k, rdef in rschema.rdefs.items()
if (rdef.subject, rdef.object) == k]
else:
- rdefs = rschema.rdefs.itervalues()
+ rdefs = rschema.rdefs.values()
for rdef in rdefs:
execschemarql(execute, rdef,
rdef2rql(rdef, cstrtypemap, groupmap))
@@ -397,7 +400,7 @@
for rql, kwargs in specialize2rql(schema):
execute(rql, kwargs, build_descr=False)
pb.update()
- print
+ print()
# high level serialization functions
@@ -455,8 +458,8 @@
columnset = set()
for columns in eschema._unique_together:
if columns in columnset:
- print ('schemaserial: skipping duplicate unique together %r %r' %
- (eschema.type, columns))
+ print('schemaserial: skipping duplicate unique together %r %r' %
+ (eschema.type, columns))
continue
columnset.add(columns)
rql, args = _uniquetogether2rql(eschema, columns)
@@ -471,7 +474,7 @@
for i, name in enumerate(unique_together):
rschema = eschema.schema.rschema(name)
rtype = 'T%d' % i
- substs[rtype] = unicode(rschema.type)
+ substs[rtype] = text_type(rschema.type)
relations.append('C relations %s' % rtype)
restrictions.append('%(rtype)s name %%(%(rtype)s)s' % {'rtype': rtype})
relations = ', '.join(relations)
@@ -483,11 +486,11 @@
def _ervalues(erschema):
try:
- type_ = unicode(erschema.type)
+ type_ = text_type(erschema.type)
except UnicodeDecodeError as e:
raise Exception("can't decode %s [was %s]" % (erschema.type, e))
try:
- desc = unicode(erschema.description) or u''
+ desc = text_type(erschema.description) or u''
except UnicodeDecodeError as e:
raise Exception("can't decode %s [was %s]" % (erschema.description, e))
return {
@@ -509,7 +512,7 @@
if addrdef:
assert cstrtypemap
# sort for testing purpose
- for rdef in sorted(rschema.rdefs.itervalues(),
+ for rdef in sorted(rschema.rdefs.values(),
key=lambda x: (x.subject, x.object)):
for rql, values in rdef2rql(rdef, cstrtypemap, groupmap):
yield rql, values
@@ -519,7 +522,7 @@
values['final'] = rschema.final
values['symmetric'] = rschema.symmetric
values['inlined'] = rschema.inlined
- if isinstance(rschema.fulltext_container, str):
+ if PY2 and isinstance(rschema.fulltext_container, str):
values['fulltext_container'] = unicode(rschema.fulltext_container)
else:
values['fulltext_container'] = rschema.fulltext_container
@@ -535,7 +538,7 @@
def crschema_relations_values(crschema):
values = _ervalues(crschema)
- values['rule'] = unicode(crschema.rule)
+ values['rule'] = text_type(crschema.rule)
# XXX why oh why?
del values['final']
relations = ['X %s %%(%s)s' % (attr, attr) for attr in sorted(values)]
@@ -581,20 +584,20 @@
value = bool(value)
elif prop == 'ordernum':
value = int(value)
- elif isinstance(value, str):
+ elif PY2 and isinstance(value, str):
value = unicode(value)
if value is not None and prop == 'default':
value = Binary.zpickle(value)
values[amap.get(prop, prop)] = value
if extra:
- values['extra_props'] = Binary(json.dumps(extra))
+ values['extra_props'] = Binary(json.dumps(extra).encode('ascii'))
relations = ['X %s %%(%s)s' % (attr, attr) for attr in sorted(values)]
return relations, values
def constraints2rql(cstrtypemap, constraints, rdefeid=None):
for constraint in constraints:
values = {'ct': cstrtypemap[constraint.type()],
- 'value': unicode(constraint.serialize()),
+ 'value': text_type(constraint.serialize()),
'x': rdefeid} # when not specified, will have to be set by the caller
yield 'INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE \
CT eid %(ct)s, EDEF eid %(x)s', values
@@ -613,23 +616,23 @@
# may occurs when modifying persistent schema
continue
for group_or_rqlexpr in grantedto:
- if isinstance(group_or_rqlexpr, basestring):
+ if isinstance(group_or_rqlexpr, string_types):
# group
try:
yield ('SET X %s_permission Y WHERE Y eid %%(g)s, X eid %%(x)s' % action,
{'g': groupmap[group_or_rqlexpr]})
except KeyError:
- print ("WARNING: group %s used in permissions for %s was ignored because it doesn't exist."
- " You may want to add it into a precreate.py file" % (group_or_rqlexpr, erschema))
+ print("WARNING: group %s used in permissions for %s was ignored because it doesn't exist."
+ " You may want to add it into a precreate.py file" % (group_or_rqlexpr, erschema))
continue
else:
# rqlexpr
rqlexpr = group_or_rqlexpr
yield ('INSERT RQLExpression E: E expression %%(e)s, E exprtype %%(t)s, '
'E mainvars %%(v)s, X %s_permission E WHERE X eid %%(x)s' % action,
- {'e': unicode(rqlexpr.expression),
- 'v': unicode(','.join(sorted(rqlexpr.mainvars))),
- 't': unicode(rqlexpr.__class__.__name__)})
+ {'e': text_type(rqlexpr.expression),
+ 'v': text_type(','.join(sorted(rqlexpr.mainvars))),
+ 't': text_type(rqlexpr.__class__.__name__)})
# update functions
@@ -641,7 +644,7 @@
def updaterschema2rql(rschema, eid):
if rschema.rule:
yield ('SET X rule %(r)s WHERE X eid %(x)s',
- {'x': eid, 'r': unicode(rschema.rule)})
+ {'x': eid, 'r': text_type(rschema.rule)})
else:
relations, values = rschema_relations_values(rschema)
values['x'] = eid
--- a/server/serverconfig.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/serverconfig.py Thu Nov 12 10:52:28 2015 +0100
@@ -16,12 +16,14 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""server.serverconfig definition"""
+from __future__ import print_function
__docformat__ = "restructuredtext en"
import sys
from os.path import join, exists
-from StringIO import StringIO
+
+from six.moves import StringIO
import logilab.common.configuration as lgconfig
from logilab.common.decorators import cached
@@ -234,18 +236,19 @@
def bootstrap_cubes(self):
from logilab.common.textutils import splitstrip
- for line in file(join(self.apphome, 'bootstrap_cubes')):
- line = line.strip()
- if not line or line.startswith('#'):
- continue
- self.init_cubes(self.expand_cubes(splitstrip(line)))
- break
- else:
- # no cubes
- self.init_cubes(())
+ with open(join(self.apphome, 'bootstrap_cubes')) as f:
+ for line in f:
+ line = line.strip()
+ if not line or line.startswith('#'):
+ continue
+ self.init_cubes(self.expand_cubes(splitstrip(line)))
+ break
+ else:
+ # no cubes
+ self.init_cubes(())
def write_bootstrap_cubes_file(self, cubes):
- stream = file(join(self.apphome, 'bootstrap_cubes'), 'w')
+ stream = open(join(self.apphome, 'bootstrap_cubes'), 'w')
stream.write('# this is a generated file only used for bootstraping\n')
stream.write('# you should not have to edit this\n')
stream.write('%s\n' % ','.join(cubes))
@@ -276,7 +279,7 @@
assert len(self.sources_mode) == 1
if source.connect_for_migration:
return True
- print 'not connecting to source', source.uri, 'during migration'
+ print('not connecting to source', source.uri, 'during migration')
return False
if 'all' in self.sources_mode:
assert len(self.sources_mode) == 1
--- a/server/serverctl.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/serverctl.py Thu Nov 12 10:52:28 2015 +0100
@@ -16,6 +16,7 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""cubicweb-ctl commands and command handlers specific to the repository"""
+from __future__ import print_function
__docformat__ = 'restructuredtext en'
@@ -28,6 +29,9 @@
import logging
import subprocess
+from six import string_types
+from six.moves import input
+
from logilab.common import nullobject
from logilab.common.configuration import Configuration, merge_options
from logilab.common.shellutils import ASK, generate_password
@@ -55,27 +59,27 @@
driver = source['db-driver']
dbhelper = get_db_helper(driver)
if interactive:
- print '-> connecting to %s database' % driver,
+ print('-> connecting to %s database' % driver, end=' ')
if dbhost:
- print '%s@%s' % (dbname, dbhost),
+ print('%s@%s' % (dbname, dbhost), end=' ')
else:
- print dbname,
+ print(dbname, end=' ')
if dbhelper.users_support:
if not interactive or (not special_privs and source.get('db-user')):
user = source.get('db-user', os.environ.get('USER', ''))
if interactive:
- print 'as', user
+ print('as', user)
password = source.get('db-password')
else:
- print
+ print()
if special_privs:
- print 'WARNING'
+ print('WARNING')
print ('the user will need the following special access rights '
'on the database:')
- print special_privs
- print
+ print(special_privs)
+ print()
default_user = source.get('db-user', os.environ.get('USER', ''))
- user = raw_input('Connect as user ? [%r]: ' % default_user)
+ user = input('Connect as user ? [%r]: ' % default_user)
user = user.strip() or default_user
if user == source.get('db-user'):
password = source.get('db-password')
@@ -146,7 +150,7 @@
cnx = repoapi.connect(repo, login, password=pwd)
return repo, cnx
except AuthenticationError:
- print '-> Error: wrong user/password.'
+ print('-> Error: wrong user/password.')
# reset cubes else we'll have an assertion error on next retry
config._cubes = None
login, pwd = manager_userpasswd()
@@ -164,9 +168,9 @@
"""
config = self.config
if not automatic:
- print underline_title('Configuring the repository')
+ print(underline_title('Configuring the repository'))
config.input_config('email', inputlevel)
- print '\n'+underline_title('Configuring the sources')
+ print('\n'+underline_title('Configuring the sources'))
sourcesfile = config.sources_file()
# hack to make Method('default_instance_id') usable in db option defs
# (in native.py)
@@ -174,12 +178,12 @@
options=SOURCE_TYPES['native'].options)
if not automatic:
sconfig.input_config(inputlevel=inputlevel)
- print
+ print()
sourcescfg = {'system': sconfig}
if automatic:
# XXX modify a copy
password = generate_password()
- print '-> set administrator account to admin / %s' % password
+ print('-> set administrator account to admin / %s' % password)
USER_OPTIONS[1][1]['default'] = password
sconfig = Configuration(options=USER_OPTIONS)
else:
@@ -197,8 +201,8 @@
CWCTL.run(['db-create', '--config-level', str(inputlevel),
self.config.appid])
else:
- print ('-> nevermind, you can do it later with '
- '"cubicweb-ctl db-create %s".' % self.config.appid)
+ print('-> nevermind, you can do it later with '
+ '"cubicweb-ctl db-create %s".' % self.config.appid)
@contextmanager
@@ -242,26 +246,26 @@
with db_transaction(source, privilege='DROP SCHEMA') as cursor:
helper = get_db_helper(source['db-driver'])
helper.drop_schema(cursor, db_namespace)
- print '-> database schema %s dropped' % db_namespace
+ print('-> database schema %s dropped' % db_namespace)
def _drop_database(self, source):
dbname = source['db-name']
if source['db-driver'] == 'sqlite':
- print 'deleting database file %(db-name)s' % source
+ print('deleting database file %(db-name)s' % source)
os.unlink(source['db-name'])
- print '-> database %(db-name)s dropped.' % source
+ print('-> database %(db-name)s dropped.' % source)
else:
helper = get_db_helper(source['db-driver'])
with db_sys_transaction(source, privilege='DROP DATABASE') as cursor:
- print 'dropping database %(db-name)s' % source
+ print('dropping database %(db-name)s' % source)
cursor.execute('DROP DATABASE "%(db-name)s"' % source)
- print '-> database %(db-name)s dropped.' % source
+ print('-> database %(db-name)s dropped.' % source)
def _drop_user(self, source):
user = source['db-user'] or None
if user is not None:
with db_sys_transaction(source, privilege='DROP USER') as cursor:
- print 'dropping user %s' % user
+ print('dropping user %s' % user)
cursor.execute('DROP USER %s' % user)
def _cleanup_steps(self, source):
@@ -288,7 +292,7 @@
try:
step(source)
except Exception as exc:
- print 'ERROR', exc
+ print('ERROR', exc)
if ASK.confirm('An error occurred. Continue anyway?',
default_is_yes=False):
continue
@@ -357,7 +361,7 @@
ASK.confirm('Database %s already exists. Drop it?' % dbname)):
os.unlink(dbname)
elif self.config.create_db:
- print '\n'+underline_title('Creating the system database')
+ print('\n'+underline_title('Creating the system database'))
# connect on the dbms system base to create our base
dbcnx = _db_sys_cnx(source, 'CREATE/DROP DATABASE and / or USER',
interactive=not automatic)
@@ -368,17 +372,17 @@
if not helper.user_exists(cursor, user) and (automatic or \
ASK.confirm('Create db user %s ?' % user, default_is_yes=False)):
helper.create_user(source['db-user'], source.get('db-password'))
- print '-> user %s created.' % user
+ print('-> user %s created.' % user)
if dbname in helper.list_databases(cursor):
if automatic or ASK.confirm('Database %s already exists -- do you want to drop it ?' % dbname):
cursor.execute('DROP DATABASE "%s"' % dbname)
else:
- print ('you may want to run "cubicweb-ctl db-init '
- '--drop %s" manually to continue.' % config.appid)
+ print('you may want to run "cubicweb-ctl db-init '
+ '--drop %s" manually to continue.' % config.appid)
return
createdb(helper, source, dbcnx, cursor)
dbcnx.commit()
- print '-> database %s created.' % dbname
+ print('-> database %s created.' % dbname)
except BaseException:
dbcnx.rollback()
raise
@@ -400,13 +404,13 @@
try:
helper.create_language(cursor, extlang)
except Exception as exc:
- print '-> ERROR:', exc
- print '-> could not create language %s, some stored procedures might be unusable' % extlang
+ print('-> ERROR:', exc)
+ print('-> could not create language %s, some stored procedures might be unusable' % extlang)
cnx.rollback()
else:
cnx.commit()
- print '-> database for instance %s created and necessary extensions installed.' % appid
- print
+ print('-> database for instance %s created and necessary extensions installed.' % appid)
+ print()
if automatic:
CWCTL.run(['db-init', '--automatic', '--config-level', '0',
config.appid])
@@ -414,8 +418,8 @@
CWCTL.run(['db-init', '--config-level',
str(self.config.config_level), config.appid])
else:
- print ('-> nevermind, you can do it later with '
- '"cubicweb-ctl db-init %s".' % config.appid)
+ print('-> nevermind, you can do it later with '
+ '"cubicweb-ctl db-init %s".' % config.appid)
class InitInstanceCommand(Command):
@@ -452,7 +456,7 @@
def run(self, args):
check_options_consistency(self.config)
- print '\n'+underline_title('Initializing the system database')
+ print('\n'+underline_title('Initializing the system database'))
from cubicweb.server import init_repository
appid = args[0]
config = ServerConfiguration.config_for(appid)
@@ -503,10 +507,10 @@
used = set(n for n, in cnx.execute('Any SN WHERE S is CWSource, S name SN'))
cubes = repo.get_cubes()
while True:
- type = raw_input('source type (%s): '
+ type = input('source type (%s): '
% ', '.join(sorted(SOURCE_TYPES)))
if type not in SOURCE_TYPES:
- print '-> unknown source type, use one of the available types.'
+ print('-> unknown source type, use one of the available types.')
continue
sourcemodule = SOURCE_TYPES[type].module
if not sourcemodule.startswith('cubicweb.'):
@@ -520,23 +524,23 @@
continue
break
while True:
- parser = raw_input('parser type (%s): '
+ parser = input('parser type (%s): '
% ', '.join(sorted(repo.vreg['parsers'])))
if parser in repo.vreg['parsers']:
break
- print '-> unknown parser identifier, use one of the available types.'
+ print('-> unknown parser identifier, use one of the available types.')
while True:
- sourceuri = raw_input('source identifier (a unique name used to '
+ sourceuri = input('source identifier (a unique name used to '
'tell sources apart): ').strip()
if not sourceuri:
- print '-> mandatory.'
+ print('-> mandatory.')
else:
sourceuri = unicode(sourceuri, sys.stdin.encoding)
if sourceuri in used:
- print '-> uri already used, choose another one.'
+ print('-> uri already used, choose another one.')
else:
break
- url = raw_input('source URL (leave empty for none): ').strip()
+ url = input('source URL (leave empty for none): ').strip()
url = unicode(url) if url else None
# XXX configurable inputlevel
sconfig = ask_source_config(config, type, inputlevel=self.config.config_level)
@@ -583,10 +587,10 @@
cnx.rollback()
import traceback
traceback.print_exc()
- print '-> an error occurred:', ex
+ print('-> an error occurred:', ex)
else:
cnx.commit()
- print '-> rights granted to %s on instance %s.' % (appid, user)
+ print('-> rights granted to %s on instance %s.' % (appid, user))
class ResetAdminPasswordCommand(Command):
@@ -617,7 +621,7 @@
try:
adminlogin = sourcescfg['admin']['login']
except KeyError:
- print '-> Error: could not get cubicweb administrator login.'
+ print('-> Error: could not get cubicweb administrator login.')
sys.exit(1)
cnx = source_cnx(sourcescfg['system'])
driver = sourcescfg['system']['db-driver']
@@ -627,9 +631,9 @@
cursor.execute("SELECT * FROM cw_CWUser WHERE cw_login=%(l)s",
{'l': adminlogin})
if not cursor.fetchall():
- print ("-> error: admin user %r specified in sources doesn't exist "
- "in the database" % adminlogin)
- print " fix your sources file before running this command"
+ print("-> error: admin user %r specified in sources doesn't exist "
+ "in the database" % adminlogin)
+ print(" fix your sources file before running this command")
cnx.close()
sys.exit(1)
if self.config.password is None:
@@ -650,10 +654,10 @@
cnx.rollback()
import traceback
traceback.print_exc()
- print '-> an error occurred:', ex
+ print('-> an error occurred:', ex)
else:
cnx.commit()
- print '-> password reset, sources file regenerated.'
+ print('-> password reset, sources file regenerated.')
cnx.close()
@@ -666,17 +670,17 @@
if sudo:
dmpcmd = 'sudo %s' % (dmpcmd)
dmpcmd = 'ssh -t %s "%s"' % (host, dmpcmd)
- print dmpcmd
+ print(dmpcmd)
if os.system(dmpcmd):
raise ExecutionError('Error while dumping the database')
if output is None:
output = filename
cmd = 'scp %s:/tmp/%s %s' % (host, filename, output)
- print cmd
+ print(cmd)
if os.system(cmd):
raise ExecutionError('Error while retrieving the dump at /tmp/%s' % filename)
rmcmd = 'ssh -t %s "rm -f /tmp/%s"' % (host, filename)
- print rmcmd
+ print(rmcmd)
if os.system(rmcmd) and not ASK.confirm(
'An error occurred while deleting remote dump at /tmp/%s. '
'Continue anyway?' % filename):
@@ -686,7 +690,7 @@
def _local_dump(appid, output, format='native'):
config = ServerConfiguration.config_for(appid)
config.quick_start = True
- mih = config.migration_handler(connect=False, verbosity=1)
+ mih = config.migration_handler(verbosity=1)
mih.backup_database(output, askconfirm=False, format=format)
mih.shutdown()
@@ -696,28 +700,28 @@
config.quick_start = True
mih = config.migration_handler(connect=False, verbosity=1)
mih.restore_database(backupfile, drop, askconfirm=False, format=format)
- repo = mih.repo_connect()
+ repo = mih.repo
# version of the database
dbversions = repo.get_versions()
mih.shutdown()
if not dbversions:
- print "bad or missing version information in the database, don't upgrade file system"
+ print("bad or missing version information in the database, don't upgrade file system")
return
# version of installed software
eversion = dbversions['cubicweb']
status = instance_status(config, eversion, dbversions)
# * database version > installed software
if status == 'needsoftupgrade':
- print "** The database of %s is more recent than the installed software!" % config.appid
- print "** Upgrade your software, then migrate the database by running the command"
- print "** 'cubicweb-ctl upgrade %s'" % config.appid
+ print("** The database of %s is more recent than the installed software!" % config.appid)
+ print("** Upgrade your software, then migrate the database by running the command")
+ print("** 'cubicweb-ctl upgrade %s'" % config.appid)
return
# * database version < installed software, an upgrade will be necessary
# anyway, just rewrite vc.conf and warn user he has to upgrade
elif status == 'needapplupgrade':
- print "** The database of %s is older than the installed software." % config.appid
- print "** Migrate the database by running the command"
- print "** 'cubicweb-ctl upgrade %s'" % config.appid
+ print("** The database of %s is older than the installed software." % config.appid)
+ print("** Migrate the database by running the command")
+ print("** 'cubicweb-ctl upgrade %s'" % config.appid)
return
# * database version = installed software, database version = instance fs version
# ok!
@@ -732,12 +736,12 @@
try:
softversion = config.cube_version(cube)
except ConfigurationError:
- print '-> Error: no cube version information for %s, please check that the cube is installed.' % cube
+ print('-> Error: no cube version information for %s, please check that the cube is installed.' % cube)
continue
try:
applversion = vcconf[cube]
except KeyError:
- print '-> Error: no cube version information for %s in version configuration.' % cube
+ print('-> Error: no cube version information for %s in version configuration.' % cube)
continue
if softversion == applversion:
continue
@@ -883,7 +887,7 @@
_local_restore(destappid, output, not self.config.no_drop,
self.config.format)
if self.config.keep_dump:
- print '-> you can get the dump file at', output
+ print('-> you can get the dump file at', output)
else:
os.remove(output)
@@ -1001,9 +1005,9 @@
stats = source.pull_data(cnx, force=True, raise_on_error=True)
finally:
repo.shutdown()
- for key, val in stats.iteritems():
+ for key, val in stats.items():
if val:
- print key, ':', val
+ print(key, ':', val)
@@ -1019,7 +1023,7 @@
for p in ('read', 'add', 'update', 'delete'):
rule = perms.get(p)
if rule:
- perms[p] = tuple(str(x) if isinstance(x, basestring) else x
+ perms[p] = tuple(str(x) if isinstance(x, string_types) else x
for x in rule)
return perms, perms in defaultrelperms or perms in defaulteperms
@@ -1079,7 +1083,7 @@
if self.config.db is not None:
appcfg = ServerConfiguration.config_for(appid)
srccfg = appcfg.read_sources_file()
- for key, value in self.config.db.iteritems():
+ for key, value in self.config.db.items():
if '.' in key:
section, key = key.split('.', 1)
else:
--- a/server/session.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/session.py Thu Nov 12 10:52:28 2015 +0100
@@ -16,6 +16,8 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""Repository users' and internal' sessions."""
+from __future__ import print_function
+
__docformat__ = "restructuredtext en"
import sys
@@ -25,6 +27,8 @@
import functools
from contextlib import contextmanager
+from six import text_type
+
from logilab.common.deprecation import deprecated
from logilab.common.textutils import unormalize
from logilab.common.registry import objectify_predicate
@@ -556,7 +560,7 @@
else:
relations_dict[rtype] = eids
self.repo.glob_add_relations(self, relations_dict)
- for edited in edited_entities.itervalues():
+ for edited in edited_entities.values():
self.repo.glob_update_entity(self, edited)
@@ -769,7 +773,7 @@
def transaction_uuid(self, set=True):
uuid = self.transaction_data.get('tx_uuid')
if set and uuid is None:
- self.transaction_data['tx_uuid'] = uuid = unicode(uuid4().hex)
+ self.transaction_data['tx_uuid'] = uuid = text_type(uuid4().hex)
self.repo.system_source.start_undoable_transaction(self, uuid)
return uuid
@@ -874,7 +878,7 @@
processed = []
self.commit_state = 'precommit'
if debug:
- print self.commit_state, '*' * 20
+ print(self.commit_state, '*' * 20)
try:
with self.running_hooks_ops():
while self.pending_operations:
@@ -882,7 +886,7 @@
operation.processed = 'precommit'
processed.append(operation)
if debug:
- print operation
+ print(operation)
operation.handle_event('precommit_event')
self.pending_operations[:] = processed
self.debug('precommit transaction %s done', self.connectionid)
@@ -899,11 +903,11 @@
# and revertcommit, that will be enough in mont case.
operation.failed = True
if debug:
- print self.commit_state, '*' * 20
+ print(self.commit_state, '*' * 20)
with self.running_hooks_ops():
for operation in reversed(processed):
if debug:
- print operation
+ print(operation)
try:
operation.handle_event('revertprecommit_event')
except BaseException:
@@ -917,12 +921,12 @@
self.cnxset.commit()
self.commit_state = 'postcommit'
if debug:
- print self.commit_state, '*' * 20
+ print(self.commit_state, '*' * 20)
with self.running_hooks_ops():
while self.pending_operations:
operation = self.pending_operations.pop(0)
if debug:
- print operation
+ print(operation)
operation.processed = 'postcommit'
try:
operation.handle_event('postcommit_event')
@@ -1004,7 +1008,7 @@
"""
def __init__(self, user, repo, cnxprops=None, _id=None):
- self.sessionid = _id or make_uid(unormalize(user.login).encode('UTF8'))
+ self.sessionid = _id or make_uid(unormalize(user.login))
self.user = user # XXX repoapi: deprecated and store only a login.
self.repo = repo
self.vreg = repo.vreg
--- a/server/sources/__init__.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/sources/__init__.py Thu Nov 12 10:52:28 2015 +0100
@@ -16,13 +16,18 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""cubicweb server sources support"""
+from __future__ import print_function
__docformat__ = "restructuredtext en"
from time import time
from logging import getLogger
+from base64 import b64decode
+
+from six import text_type
from logilab.common import configuration
+from logilab.common.textutils import unormalize
from logilab.common.deprecation import deprecated
from yams.schema import role_name
@@ -35,25 +40,25 @@
def dbg_st_search(uri, union, varmap, args, cachekey=None, prefix='rql for'):
if server.DEBUG & server.DBG_RQL:
global t
- print ' %s %s source: %s' % (prefix, uri, repr(union.as_string()))
+ print(' %s %s source: %s' % (prefix, uri, repr(union.as_string())))
t = time()
if varmap:
- print ' using varmap', varmap
+ print(' using varmap', varmap)
if server.DEBUG & server.DBG_MORE:
- print ' args', repr(args)
- print ' cache key', cachekey
- print ' solutions', ','.join(str(s.solutions)
- for s in union.children)
+ print(' args', repr(args))
+ print(' cache key', cachekey)
+ print(' solutions', ','.join(str(s.solutions)
+ for s in union.children))
# return true so it can be used as assertion (and so be killed by python -O)
return True
def dbg_results(results):
if server.DEBUG & server.DBG_RQL:
if len(results) > 10:
- print ' -->', results[:10], '...', len(results),
+ print(' -->', results[:10], '...', len(results), end=' ')
else:
- print ' -->', results,
- print 'time: ', time() - t
+ print(' -->', results, end=' ')
+ print('time: ', time() - t)
# return true so it can be used as assertion (and so be killed by python -O)
return True
@@ -104,7 +109,9 @@
self.public_config['use-cwuri-as-url'] = self.use_cwuri_as_url
self.remove_sensitive_information(self.public_config)
self.uri = source_config.pop('uri')
- set_log_methods(self, getLogger('cubicweb.sources.'+self.uri))
+ # unormalize to avoid non-ascii characters in logger's name, this will cause decoding error
+ # on logging
+ set_log_methods(self, getLogger('cubicweb.sources.' + unormalize(unicode(self.uri))))
source_config.pop('type')
self.update_config(None, self.check_conf_dict(eid, source_config,
fail_if_unknown=False))
@@ -140,7 +147,7 @@
pass
@classmethod
- def check_conf_dict(cls, eid, confdict, _=unicode, fail_if_unknown=True):
+ def check_conf_dict(cls, eid, confdict, _=text_type, fail_if_unknown=True):
"""check configuration of source entity. Return config dict properly
typed with defaults set.
"""
@@ -157,7 +164,7 @@
try:
value = configuration._validate(value, optdict, optname)
except Exception as ex:
- msg = unicode(ex) # XXX internationalization
+ msg = text_type(ex) # XXX internationalization
raise ValidationError(eid, {role_name('config', 'subject'): msg})
processed[optname] = value
# cw < 3.10 bw compat
@@ -199,6 +206,12 @@
else:
self.urls = []
+ @staticmethod
+ def decode_extid(extid):
+ if extid is None:
+ return extid
+ return b64decode(extid)
+
# source initialization / finalization #####################################
def set_schema(self, schema):
--- a/server/sources/datafeed.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/sources/datafeed.py Thu Nov 12 10:52:28 2015 +0100
@@ -19,13 +19,16 @@
database
"""
-import urllib2
-import StringIO
+from io import BytesIO
from os.path import exists
from datetime import datetime, timedelta
-from base64 import b64decode
-from cookielib import CookieJar
-import urlparse
+
+from six import text_type
+from six.moves.urllib.parse import urlparse
+from six.moves.urllib.request import Request, build_opener, HTTPCookieProcessor
+from six.moves.urllib.error import HTTPError
+from six.moves.http_cookiejar import CookieJar
+
from lxml import etree
from cubicweb import RegistryNotFound, ObjectNotFound, ValidationError, UnknownEid
@@ -282,7 +285,7 @@
sql = ('SELECT extid, eid, type FROM entities, cw_source_relation '
'WHERE entities.eid=cw_source_relation.eid_from '
'AND cw_source_relation.eid_to=%s' % self.eid)
- return dict((b64decode(uri), (eid, type))
+ return dict((self.decode_extid(uri), (eid, type))
for uri, eid, type in cnx.system_sql(sql).fetchall())
def init_import_log(self, cnx, **kwargs):
@@ -328,7 +331,7 @@
For http URLs, it will try to find a cwclientlib config entry
(if available) and use it as requester.
"""
- purl = urlparse.urlparse(url)
+ purl = urlparse(url)
if purl.scheme == 'file':
return URLLibResponseAdapter(open(url[7:]), url)
@@ -344,7 +347,7 @@
self.source.info('Using cwclientlib for %s' % url)
resp = cnx.get(url)
resp.raise_for_status()
- return URLLibResponseAdapter(StringIO.StringIO(resp.text), url)
+ return URLLibResponseAdapter(BytesIO(resp.text), url)
except (ImportError, ValueError, EnvironmentError) as exc:
# ImportError: not available
# ValueError: no config entry found
@@ -354,11 +357,11 @@
# no chance with cwclientlib, fall back to former implementation
if purl.scheme in ('http', 'https'):
self.source.info('GET %s', url)
- req = urllib2.Request(url)
+ req = Request(url)
return _OPENER.open(req, timeout=self.source.http_timeout)
# url is probably plain content
- return URLLibResponseAdapter(StringIO.StringIO(url), url)
+ return URLLibResponseAdapter(BytesIO(url.encode('ascii')), url)
def add_schema_config(self, schemacfg, checkonly=False):
"""added CWSourceSchemaConfig, modify mapping accordingly"""
@@ -388,11 +391,11 @@
else:
source = self.source
sourceparams['parser'] = self
- if isinstance(uri, unicode):
+ if isinstance(uri, text_type):
uri = uri.encode('utf-8')
try:
- eid = cnx.repo.extid2eid(source, str(uri), etype, cnx,
- sourceparams=sourceparams)
+ eid = cnx.repo.extid2eid(source, uri, etype, cnx,
+ sourceparams=sourceparams)
except ValidationError as ex:
if raise_on_error:
raise
@@ -447,10 +450,10 @@
def handle_deletion(self, config, cnx, myuris):
if config['delete-entities'] and myuris:
byetype = {}
- for extid, (eid, etype) in myuris.iteritems():
+ for extid, (eid, etype) in myuris.items():
if self.is_deleted(extid, etype, eid):
byetype.setdefault(etype, []).append(str(eid))
- for etype, eids in byetype.iteritems():
+ for etype, eids in byetype.items():
self.warning('delete %s %s entities', len(eids), etype)
cnx.execute('DELETE %s X WHERE X eid IN (%s)'
% (etype, ','.join(eids)))
@@ -463,7 +466,7 @@
self.notify_checked(entity)
mdate = attrs.get('modification_date')
if not mdate or mdate > entity.modification_date:
- attrs = dict( (k, v) for k, v in attrs.iteritems()
+ attrs = dict( (k, v) for k, v in attrs.items()
if v != getattr(entity, k))
if attrs:
entity.cw_set(**attrs)
@@ -530,10 +533,10 @@
self.source.debug(str(exc))
# no chance with cwclientlib, fall back to former implementation
- if urlparse.urlparse(url).scheme in ('http', 'https'):
+ if urlparse(url).scheme in ('http', 'https'):
try:
_OPENER.open(url, timeout=self.source.http_timeout)
- except urllib2.HTTPError as ex:
+ except HTTPError as ex:
if ex.code == 404:
return True
return False
@@ -555,15 +558,12 @@
def getcode(self):
return self.code
- def info(self):
- from mimetools import Message
- return Message(StringIO.StringIO())
# use a cookie enabled opener to use session cookie if any
-_OPENER = urllib2.build_opener()
+_OPENER = build_opener()
try:
from logilab.common import urllib2ext
_OPENER.add_handler(urllib2ext.HTTPGssapiAuthHandler())
except ImportError: # python-kerberos not available
pass
-_OPENER.add_handler(urllib2.HTTPCookieProcessor(CookieJar()))
+_OPENER.add_handler(HTTPCookieProcessor(CookieJar()))
--- a/server/sources/ldapfeed.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/sources/ldapfeed.py Thu Nov 12 10:52:28 2015 +0100
@@ -17,14 +17,13 @@
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""cubicweb ldap feed source"""
-from __future__ import division # XXX why?
+from __future__ import division # XXX why?
from datetime import datetime
-import ldap
-from ldap.ldapobject import ReconnectLDAPObject
-from ldap.filter import filter_format
-from ldapurl import LDAPUrl
+from six import PY2, string_types
+
+import ldap3
from logilab.common.configuration import merge_options
@@ -32,15 +31,15 @@
from cubicweb.server import utils
from cubicweb.server.sources import datafeed
-_ = unicode
+from cubicweb import _
# search scopes
-BASE = ldap.SCOPE_BASE
-ONELEVEL = ldap.SCOPE_ONELEVEL
-SUBTREE = ldap.SCOPE_SUBTREE
-LDAP_SCOPES = {'BASE': ldap.SCOPE_BASE,
- 'ONELEVEL': ldap.SCOPE_ONELEVEL,
- 'SUBTREE': ldap.SCOPE_SUBTREE}
+BASE = ldap3.SEARCH_SCOPE_BASE_OBJECT
+ONELEVEL = ldap3.SEARCH_SCOPE_SINGLE_LEVEL
+SUBTREE = ldap3.SEARCH_SCOPE_WHOLE_SUBTREE
+LDAP_SCOPES = {'BASE': BASE,
+ 'ONELEVEL': ONELEVEL,
+ 'SUBTREE': SUBTREE}
# map ldap protocol to their standard port
PROTO_PORT = {'ldap': 389,
@@ -49,6 +48,15 @@
}
+def replace_filter(s):
+ s = s.replace('*', '\\2A')
+ s = s.replace('(', '\\28')
+ s = s.replace(')', '\\29')
+ s = s.replace('\\', '\\5c')
+ s = s.replace('\0', '\\00')
+ return s
+
+
class LDAPFeedSource(datafeed.DataFeedSource):
"""LDAP feed source: unlike ldapuser source, this source is copy based and
will import ldap content (beside passwords for authentication) into the
@@ -61,7 +69,7 @@
('auth-mode',
{'type' : 'choice',
'default': 'simple',
- 'choices': ('simple', 'cram_md5', 'digest_md5', 'gssapi'),
+ 'choices': ('simple', 'digest_md5', 'gssapi'),
'help': 'authentication mode used to authenticate user to the ldap.',
'group': 'ldap-source', 'level': 3,
}),
@@ -183,8 +191,8 @@
self.user_default_groups = typedconfig['user-default-group']
self.user_attrs = {'dn': 'eid', 'modifyTimestamp': 'modification_date'}
self.user_attrs.update(typedconfig['user-attrs-map'])
- self.user_rev_attrs = dict((v, k) for k, v in self.user_attrs.iteritems())
- self.base_filters = [filter_format('(%s=%s)', ('objectClass', o))
+ self.user_rev_attrs = dict((v, k) for k, v in self.user_attrs.items())
+ self.base_filters = ['(objectclass=%s)' % replace_filter(o)
for o in typedconfig['user-classes']]
if typedconfig['user-filter']:
self.base_filters.append(typedconfig['user-filter'])
@@ -193,8 +201,8 @@
self.group_attrs = typedconfig['group-attrs-map']
self.group_attrs = {'dn': 'eid', 'modifyTimestamp': 'modification_date'}
self.group_attrs.update(typedconfig['group-attrs-map'])
- self.group_rev_attrs = dict((v, k) for k, v in self.group_attrs.iteritems())
- self.group_base_filters = [filter_format('(%s=%s)', ('objectClass', o))
+ self.group_rev_attrs = dict((v, k) for k, v in self.group_attrs.items())
+ self.group_base_filters = ['(objectClass=%s)' % replace_filter(o)
for o in typedconfig['group-classes']]
if typedconfig['group-filter']:
self.group_base_filters.append(typedconfig['group-filter'])
@@ -215,9 +223,11 @@
def connection_info(self):
assert len(self.urls) == 1, self.urls
protocol, hostport = self.urls[0].split('://')
- if protocol != 'ldapi' and not ':' in hostport:
- hostport = '%s:%s' % (hostport, PROTO_PORT[protocol])
- return protocol, hostport
+ if protocol != 'ldapi' and ':' in hostport:
+ host, port = hostport.rsplit(':', 1)
+ else:
+ host, port = hostport, PROTO_PORT[protocol]
+ return protocol, host, port
def authenticate(self, cnx, login, password=None, **kwargs):
"""return CWUser eid for the given login/password if this account is
@@ -232,59 +242,43 @@
# You get Authenticated as: 'NT AUTHORITY\ANONYMOUS LOGON'.
# we really really don't want that
raise AuthenticationError()
- searchfilter = [filter_format('(%s=%s)', (self.user_login_attr, login))]
+ searchfilter = ['(%s=%s)' % (replace_filter(self.user_login_attr), replace_filter(login))]
searchfilter.extend(self.base_filters)
searchstr = '(&%s)' % ''.join(searchfilter)
# first search the user
try:
user = self._search(cnx, self.user_base_dn,
self.user_base_scope, searchstr)[0]
- except (IndexError, ldap.SERVER_DOWN):
+ except IndexError:
# no such user
raise AuthenticationError()
# check password by establishing a (unused) connection
try:
self._connect(user, password)
- except ldap.LDAPError as ex:
+ except ldap3.LDAPException as ex:
# Something went wrong, most likely bad credentials
self.info('while trying to authenticate %s: %s', user, ex)
raise AuthenticationError()
except Exception:
self.error('while trying to authenticate %s', user, exc_info=True)
raise AuthenticationError()
- eid = self.repo.extid2eid(self, user['dn'], 'CWUser', cnx, insert=False)
+ eid = self.repo.extid2eid(self, user['dn'].encode('ascii'), 'CWUser', cnx, insert=False)
if eid < 0:
# user has been moved away from this source
raise AuthenticationError()
return eid
def _connect(self, user=None, userpwd=None):
- protocol, hostport = self.connection_info()
- self.info('connecting %s://%s as %s', protocol, hostport,
+ protocol, host, port = self.connection_info()
+ self.info('connecting %s://%s:%s as %s', protocol, host, port,
user and user['dn'] or 'anonymous')
- # don't require server certificate when using ldaps (will
- # enable self signed certs)
- ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_NEVER)
- url = LDAPUrl(urlscheme=protocol, hostport=hostport)
- conn = ReconnectLDAPObject(url.initializeUrl())
- # Set the protocol version - version 3 is preferred
- try:
- conn.set_option(ldap.OPT_PROTOCOL_VERSION, ldap.VERSION3)
- except ldap.LDAPError: # Invalid protocol version, fall back safely
- conn.set_option(ldap.OPT_PROTOCOL_VERSION, ldap.VERSION2)
- # Deny auto-chasing of referrals to be safe, we handle them instead
- # Required for AD
- try:
- conn.set_option(ldap.OPT_REFERRALS, 0)
- except ldap.LDAPError: # Cannot set referrals, so do nothing
- pass
- #conn.set_option(ldap.OPT_NETWORK_TIMEOUT, conn_timeout)
- #conn.timeout = op_timeout
+ server = ldap3.Server(host, port=int(port))
+ conn = ldap3.Connection(server, user=user and user['dn'], client_strategy=ldap3.STRATEGY_SYNC_RESTARTABLE, auto_referrals=False)
# Now bind with the credentials given. Let exceptions propagate out.
if user is None:
# XXX always use simple bind for data connection
if not self.cnx_dn:
- conn.simple_bind_s(self.cnx_dn, self.cnx_pwd)
+ conn.bind()
else:
self._authenticate(conn, {'dn': self.cnx_dn}, self.cnx_pwd)
else:
@@ -294,25 +288,22 @@
return conn
def _auth_simple(self, conn, user, userpwd):
- conn.simple_bind_s(user['dn'], userpwd)
-
- def _auth_cram_md5(self, conn, user, userpwd):
- from ldap import sasl
- auth_token = sasl.cram_md5(user['dn'], userpwd)
- conn.sasl_interactive_bind_s('', auth_token)
+ conn.authentication = ldap3.AUTH_SIMPLE
+ conn.user = user['dn']
+ conn.password = userpwd
+ conn.bind()
def _auth_digest_md5(self, conn, user, userpwd):
- from ldap import sasl
- auth_token = sasl.digest_md5(user['dn'], userpwd)
- conn.sasl_interactive_bind_s('', auth_token)
+ conn.authentication = ldap3.AUTH_SASL
+ conn.sasl_mechanism = 'DIGEST-MD5'
+ # realm, user, password, authz-id
+ conn.sasl_credentials = (None, user['dn'], userpwd, None)
+ conn.bind()
def _auth_gssapi(self, conn, user, userpwd):
- # print XXX not proper sasl/gssapi
- import kerberos
- if not kerberos.checkPassword(user[self.user_login_attr], userpwd):
- raise Exception('BAD login / mdp')
- #from ldap import sasl
- #conn.sasl_interactive_bind_s('', sasl.gssapi())
+ conn.authentication = ldap3.AUTH_SASL
+ conn.sasl_mechanism = 'GSSAPI'
+ conn.bind()
def _search(self, cnx, base, scope,
searchstr='(objectClass=*)', attrs=()):
@@ -322,37 +313,15 @@
if self._conn is None:
self._conn = self._connect()
ldapcnx = self._conn
- try:
- res = ldapcnx.search_s(base, scope, searchstr, attrs)
- except ldap.PARTIAL_RESULTS:
- res = ldapcnx.result(all=0)[1]
- except ldap.NO_SUCH_OBJECT:
- self.info('ldap NO SUCH OBJECT %s %s %s', base, scope, searchstr)
- self._process_no_such_object(cnx, base)
+ if not ldapcnx.search(base, searchstr, search_scope=scope, attributes=attrs):
return []
- # except ldap.REFERRAL as e:
- # ldapcnx = self.handle_referral(e)
- # try:
- # res = ldapcnx.search_s(base, scope, searchstr, attrs)
- # except ldap.PARTIAL_RESULTS:
- # res_type, res = ldapcnx.result(all=0)
result = []
- for rec_dn, rec_dict in res:
- # When used against Active Directory, "rec_dict" may not be
- # be a dictionary in some cases (instead, it can be a list)
- #
- # An example of a useless "res" entry that can be ignored
- # from AD is
- # (None, ['ldap://ForestDnsZones.PORTAL.LOCAL/DC=ForestDnsZones,DC=PORTAL,DC=LOCAL'])
- # This appears to be some sort of internal referral, but
- # we can't handle it, so we need to skip over it.
- try:
- items = rec_dict.iteritems()
- except AttributeError:
+ for rec in ldapcnx.response:
+ if rec['type'] != 'searchResEntry':
continue
- else:
- itemdict = self._process_ldap_item(rec_dn, items)
- result.append(itemdict)
+ items = rec['attributes'].items()
+ itemdict = self._process_ldap_item(rec['dn'], items)
+ result.append(itemdict)
self.debug('ldap built results %s', len(result))
return result
@@ -363,20 +332,21 @@
if self.user_attrs.get(key) == 'upassword': # XXx better password detection
value = value[0].encode('utf-8')
# we only support ldap_salted_sha1 for ldap sources, see: server/utils.py
- if not value.startswith('{SSHA}'):
+ if not value.startswith(b'{SSHA}'):
value = utils.crypt_password(value)
itemdict[key] = Binary(value)
elif self.user_attrs.get(key) == 'modification_date':
itemdict[key] = datetime.strptime(value[0], '%Y%m%d%H%M%SZ')
else:
- value = [unicode(val, 'utf-8', 'replace') for val in value]
+ if PY2:
+ value = [unicode(val, 'utf-8', 'replace') for val in value]
if len(value) == 1:
itemdict[key] = value = value[0]
else:
itemdict[key] = value
# we expect memberUid to be a list of user ids, make sure of it
member = self.group_rev_attrs['member']
- if isinstance(itemdict.get(member), basestring):
+ if isinstance(itemdict.get(member), string_types):
itemdict[member] = [itemdict[member]]
return itemdict
--- a/server/sources/native.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/sources/native.py Thu Nov 12 10:52:28 2015 +0100
@@ -23,13 +23,13 @@
string. This is because it should actually be Bytes but we want an index on
it for fast querying.
"""
+from __future__ import print_function
+
__docformat__ = "restructuredtext en"
-from cPickle import loads, dumps
-import cPickle as pickle
from threading import Lock
from datetime import datetime
-from base64 import b64decode, b64encode
+from base64 import b64encode
from contextlib import contextmanager
from os.path import basename
import re
@@ -38,6 +38,9 @@
import logging
import sys
+from six import PY2, text_type, binary_type, string_types
+from six.moves import range, cPickle as pickle
+
from logilab.common.decorators import cached, clear_cache
from logilab.common.configuration import Method
from logilab.common.shellutils import getlogin
@@ -76,12 +79,12 @@
it's a function just so that it shows up in profiling
"""
if server.DEBUG & server.DBG_SQL:
- print 'exec', query, args
+ print('exec', query, args)
try:
self.cu.execute(str(query), args)
except Exception as ex:
- print "sql: %r\n args: %s\ndbms message: %r" % (
- query, args, ex.args[0])
+ print("sql: %r\n args: %s\ndbms message: %r" % (
+ query, args, ex.args[0]))
raise
def fetchall(self):
@@ -134,7 +137,7 @@
Type of _UndoException message must be `unicode` by design in CubicWeb.
"""
- assert isinstance(self.args[0], unicode)
+ assert isinstance(self.args[0], text_type)
return self.args[0]
@@ -556,7 +559,7 @@
sql, qargs, cbs = self._rql_sqlgen.generate(union, args, varmap)
self._cache[cachekey] = sql, qargs, cbs
args = self.merge_args(args, qargs)
- assert isinstance(sql, basestring), repr(sql)
+ assert isinstance(sql, string_types), repr(sql)
cursor = self.doexec(cnx, sql, args)
results = self.process_result(cursor, cnx, cbs)
assert dbg_results(results)
@@ -611,7 +614,7 @@
self.doexec(cnx, sql, attrs)
if cnx.ertype_supports_undo(entity.cw_etype):
self._record_tx_action(cnx, 'tx_entity_actions', u'C',
- etype=unicode(entity.cw_etype), eid=entity.eid)
+ etype=text_type(entity.cw_etype), eid=entity.eid)
def update_entity(self, cnx, entity):
"""replace an entity in the source"""
@@ -620,8 +623,8 @@
if cnx.ertype_supports_undo(entity.cw_etype):
changes = self._save_attrs(cnx, entity, attrs)
self._record_tx_action(cnx, 'tx_entity_actions', u'U',
- etype=unicode(entity.cw_etype), eid=entity.eid,
- changes=self._binary(dumps(changes)))
+ etype=text_type(entity.cw_etype), eid=entity.eid,
+ changes=self._binary(pickle.dumps(changes)))
sql = self.sqlgen.update(SQL_PREFIX + entity.cw_etype, attrs,
['cw_eid'])
self.doexec(cnx, sql, attrs)
@@ -635,8 +638,8 @@
if (r.final or r.inlined) and not r in VIRTUAL_RTYPES]
changes = self._save_attrs(cnx, entity, attrs)
self._record_tx_action(cnx, 'tx_entity_actions', u'D',
- etype=unicode(entity.cw_etype), eid=entity.eid,
- changes=self._binary(dumps(changes)))
+ etype=text_type(entity.cw_etype), eid=entity.eid,
+ changes=self._binary(pickle.dumps(changes)))
attrs = {'cw_eid': entity.eid}
sql = self.sqlgen.delete(SQL_PREFIX + entity.cw_etype, attrs)
self.doexec(cnx, sql, attrs)
@@ -646,7 +649,7 @@
self._add_relations(cnx, rtype, [(subject, object)], inlined)
if cnx.ertype_supports_undo(rtype):
self._record_tx_action(cnx, 'tx_relation_actions', u'A',
- eid_from=subject, rtype=unicode(rtype), eid_to=object)
+ eid_from=subject, rtype=text_type(rtype), eid_to=object)
def add_relations(self, cnx, rtype, subj_obj_list, inlined=False):
"""add a relations to the source"""
@@ -654,7 +657,7 @@
if cnx.ertype_supports_undo(rtype):
for subject, object in subj_obj_list:
self._record_tx_action(cnx, 'tx_relation_actions', u'A',
- eid_from=subject, rtype=unicode(rtype), eid_to=object)
+ eid_from=subject, rtype=text_type(rtype), eid_to=object)
def _add_relations(self, cnx, rtype, subj_obj_list, inlined=False):
"""add a relation to the source"""
@@ -671,7 +674,7 @@
etypes[etype].append((subject, object))
else:
etypes[etype] = [(subject, object)]
- for subj_etype, subj_obj_list in etypes.iteritems():
+ for subj_etype, subj_obj_list in etypes.items():
attrs = [{'cw_eid': subject, SQL_PREFIX + rtype: object}
for subject, object in subj_obj_list]
sql.append((self.sqlgen.update(SQL_PREFIX + etype, attrs[0],
@@ -686,7 +689,7 @@
self._delete_relation(cnx, subject, rtype, object, rschema.inlined)
if cnx.ertype_supports_undo(rtype):
self._record_tx_action(cnx, 'tx_relation_actions', u'R',
- eid_from=subject, rtype=unicode(rtype), eid_to=object)
+ eid_from=subject, rtype=text_type(rtype), eid_to=object)
def _delete_relation(self, cnx, subject, rtype, object, inlined=False):
"""delete a relation from the source"""
@@ -708,7 +711,7 @@
"""
cursor = cnx.cnxset.cu
if server.DEBUG & server.DBG_SQL:
- print 'exec', query, args, cnx.cnxset.cnx
+ print('exec', query, args, cnx.cnxset.cnx)
try:
# str(query) to avoid error if it's a unicode string
cursor.execute(str(query), args)
@@ -767,7 +770,7 @@
it's a function just so that it shows up in profiling
"""
if server.DEBUG & server.DBG_SQL:
- print 'execmany', query, 'with', len(args), 'arguments', cnx.cnxset.cnx
+ print('execmany', query, 'with', len(args), 'arguments', cnx.cnxset.cnx)
cursor = cnx.cnxset.cu
try:
# str(query) to avoid error if it's a unicode string
@@ -852,10 +855,9 @@
"""return a tuple (type, extid, source) for the entity with id <eid>"""
sql = 'SELECT type, extid, asource FROM entities WHERE eid=%s' % eid
res = self._eid_type_source(cnx, eid, sql)
- if res[-2] is not None:
- if not isinstance(res, list):
- res = list(res)
- res[-2] = b64decode(res[-2])
+ if not isinstance(res, list):
+ res = list(res)
+ res[-2] = self.decode_extid(res[-2])
return res
def eid_type_source_pre_131(self, cnx, eid):
@@ -864,15 +866,14 @@
res = self._eid_type_source(cnx, eid, sql)
if not isinstance(res, list):
res = list(res)
- if res[-1] is not None:
- res[-1] = b64decode(res[-1])
+ res[-1] = self.decode_extid(extid)
res.append("system")
return res
def extid2eid(self, cnx, extid):
"""get eid from an external id. Return None if no record found."""
- assert isinstance(extid, str)
- args = {'x': b64encode(extid)}
+ assert isinstance(extid, binary_type)
+ args = {'x': b64encode(extid).decode('ascii')}
cursor = self.doexec(cnx,
'SELECT eid FROM entities WHERE extid=%(x)s',
args)
@@ -911,10 +912,10 @@
assert cnx.cnxset is not None
# begin by inserting eid/type/source/extid into the entities table
if extid is not None:
- assert isinstance(extid, str)
- extid = b64encode(extid)
- attrs = {'type': unicode(entity.cw_etype), 'eid': entity.eid, 'extid': extid and unicode(extid),
- 'asource': unicode(source.uri)}
+ assert isinstance(extid, binary_type)
+ extid = b64encode(extid).decode('ascii')
+ attrs = {'type': text_type(entity.cw_etype), 'eid': entity.eid, 'extid': extid,
+ 'asource': text_type(source.uri)}
self._handle_insert_entity_sql(cnx, self.sqlgen.insert('entities', attrs), attrs)
# insert core relations: is, is_instance_of and cw_source
try:
@@ -975,13 +976,13 @@
if actionfilters.pop('public', True):
genrestr['txa_public'] = True
# put additional filters in trarestr and/or tearestr
- for key, val in actionfilters.iteritems():
+ for key, val in actionfilters.items():
if key == 'etype':
# filtering on etype implies filtering on entity actions
# only, and with no eid specified
assert actionfilters.get('action', 'C') in 'CUD'
assert not 'eid' in actionfilters
- tearestr['etype'] = unicode(val)
+ tearestr['etype'] = text_type(val)
elif key == 'eid':
# eid filter may apply to 'eid' of tx_entity_actions or to
# 'eid_from' OR 'eid_to' of tx_relation_actions
@@ -992,10 +993,10 @@
trarestr['eid_to'] = val
elif key == 'action':
if val in 'CUD':
- tearestr['txa_action'] = unicode(val)
+ tearestr['txa_action'] = text_type(val)
else:
assert val in 'AR'
- trarestr['txa_action'] = unicode(val)
+ trarestr['txa_action'] = text_type(val)
else:
raise AssertionError('unknow filter %s' % key)
assert trarestr or tearestr, "can't only filter on 'public'"
@@ -1029,11 +1030,11 @@
def tx_info(self, cnx, txuuid):
"""See :class:`cubicweb.repoapi.Connection.transaction_info`"""
- return tx.Transaction(cnx, txuuid, *self._tx_info(cnx, unicode(txuuid)))
+ return tx.Transaction(cnx, txuuid, *self._tx_info(cnx, text_type(txuuid)))
def tx_actions(self, cnx, txuuid, public):
"""See :class:`cubicweb.repoapi.Connection.transaction_actions`"""
- txuuid = unicode(txuuid)
+ txuuid = text_type(txuuid)
self._tx_info(cnx, txuuid)
restr = {'tx_uuid': txuuid}
if public:
@@ -1044,7 +1045,7 @@
'etype', 'eid', 'changes'))
with cnx.ensure_cnx_set:
cu = self.doexec(cnx, sql, restr)
- actions = [tx.EntityAction(a,p,o,et,e,c and loads(self.binary_to_str(c)))
+ actions = [tx.EntityAction(a,p,o,et,e,c and pickle.loads(self.binary_to_str(c)))
for a,p,o,et,e,c in cu.fetchall()]
sql = self.sqlgen.select('tx_relation_actions', restr,
('txa_action', 'txa_public', 'txa_order',
@@ -1168,8 +1169,8 @@
elif eschema.destination(rtype) in ('Bytes', 'Password'):
changes[column] = self._binary(value)
edited[rtype] = Binary(value)
- elif isinstance(value, str):
- edited[rtype] = unicode(value, cnx.encoding, 'replace')
+ elif PY2 and isinstance(value, str):
+ edited[rtype] = text_type(value, cnx.encoding, 'replace')
else:
edited[rtype] = value
# This must only be done after init_entitiy_caches : defered in calling functions
@@ -1210,14 +1211,14 @@
try:
sentity, oentity, rdef = _undo_rel_info(cnx, subj, rtype, obj)
except _UndoException as ex:
- errors.append(unicode(ex))
+ errors.append(text_type(ex))
else:
for role, entity in (('subject', sentity),
('object', oentity)):
try:
_undo_check_relation_target(entity, rdef, role)
except _UndoException as ex:
- errors.append(unicode(ex))
+ errors.append(text_type(ex))
continue
if not errors:
self.repo.hm.call_hooks('before_add_relation', cnx,
@@ -1293,7 +1294,7 @@
try:
sentity, oentity, rdef = _undo_rel_info(cnx, subj, rtype, obj)
except _UndoException as ex:
- errors.append(unicode(ex))
+ errors.append(text_type(ex))
else:
rschema = rdef.rtype
if rschema.inlined:
@@ -1544,7 +1545,7 @@
SQL_PREFIX + 'CWUser',
SQL_PREFIX + 'upassword',
SQL_PREFIX + 'login'),
- {'newhash': self.source._binary(newhash),
+ {'newhash': self.source._binary(newhash.encode('ascii')),
'login': login})
cnx.commit()
return user
@@ -1692,7 +1693,7 @@
self.logger.info('number of rows: %d', rowcount)
blocksize = self.blocksize
if rowcount > 0:
- for i, start in enumerate(xrange(0, rowcount, blocksize)):
+ for i, start in enumerate(range(0, rowcount, blocksize)):
rows = list(itertools.islice(rows_iterator, blocksize))
serialized = self._serialize(table, columns, rows)
archive.writestr('tables/%s.%04d' % (table, i), serialized)
@@ -1713,7 +1714,7 @@
return tuple(columns), rows
def _serialize(self, name, columns, rows):
- return dumps((name, columns, rows), pickle.HIGHEST_PROTOCOL)
+ return pickle.dumps((name, columns, rows), pickle.HIGHEST_PROTOCOL)
def restore(self, backupfile):
archive = zipfile.ZipFile(backupfile, 'r', allowZip64=True)
@@ -1761,7 +1762,7 @@
return sequences, numranges, tables, table_chunks
def read_sequence(self, archive, seq):
- seqname, columns, rows = loads(archive.read('sequences/%s' % seq))
+ seqname, columns, rows = pickle.loads(archive.read('sequences/%s' % seq))
assert seqname == seq
assert len(rows) == 1
assert len(rows[0]) == 1
@@ -1771,7 +1772,7 @@
self.cnx.commit()
def read_numrange(self, archive, numrange):
- rangename, columns, rows = loads(archive.read('numrange/%s' % numrange))
+ rangename, columns, rows = pickle.loads(archive.read('numrange/%s' % numrange))
assert rangename == numrange
assert len(rows) == 1
assert len(rows[0]) == 1
@@ -1786,7 +1787,7 @@
self.cnx.commit()
row_count = 0
for filename in filenames:
- tablename, columns, rows = loads(archive.read(filename))
+ tablename, columns, rows = pickle.loads(archive.read(filename))
assert tablename == table
if not rows:
continue
--- a/server/sources/rql2sql.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/sources/rql2sql.py Thu Nov 12 10:52:28 2015 +0100
@@ -51,6 +51,9 @@
import threading
+from six import PY2
+from six.moves import range
+
from logilab.database import FunctionDescr, SQL_FUNCTIONS_REGISTRY
from rql import BadRQLQuery, CoercionError
@@ -172,7 +175,7 @@
existssols = {}
unstable = set()
invariants = {}
- for vname, var in rqlst.defined_vars.iteritems():
+ for vname, var in rqlst.defined_vars.items():
vtype = newsols[0][vname]
if var._q_invariant or vname in varmap:
# remove invariant variable from solutions to remove duplicates
@@ -187,13 +190,13 @@
thisexistssols = [newsols[0]]
thisexistsvars = set()
existssols[var.scope] = thisexistssols, thisexistsvars
- for i in xrange(len(newsols)-1, 0, -1):
+ for i in range(len(newsols)-1, 0, -1):
if vtype != newsols[i][vname]:
thisexistssols.append(newsols.pop(i))
thisexistsvars.add(vname)
else:
# remember unstable variables
- for i in xrange(1, len(newsols)):
+ for i in range(1, len(newsols)):
if vtype != newsols[i][vname]:
unstable.add(vname)
if invariants:
@@ -205,11 +208,11 @@
newsols = newsols_
# reinsert solutions for invariants
for sol in newsols:
- for invvar, vartype in invariants[id(sol)].iteritems():
+ for invvar, vartype in invariants[id(sol)].items():
sol[invvar] = vartype
for sol in existssols:
try:
- for invvar, vartype in invariants[id(sol)].iteritems():
+ for invvar, vartype in invariants[id(sol)].items():
sol[invvar] = vartype
except KeyError:
continue
@@ -257,7 +260,7 @@
append(term)
if groups:
for vref in term.iget_nodes(VariableRef):
- if not vref in groups:
+ if not any(vref.is_equivalent(g) for g in groups):
groups.append(vref)
def fix_selection_and_group(rqlst, needwrap, selectsortterms,
@@ -273,7 +276,7 @@
(isinstance(term, Function) and
get_func_descr(term.name).aggregat)):
for vref in term.iget_nodes(VariableRef):
- if not vref in groupvrefs:
+ if not any(vref.is_equivalent(group) for group in groupvrefs):
groups.append(vref)
groupvrefs.append(vref)
if needwrap and (groups or having):
@@ -364,7 +367,7 @@
self.done = set()
self.tables = self.subtables.copy()
self.actual_tables = [[]]
- for _, tsql in self.tables.itervalues():
+ for _, tsql in self.tables.values():
self.actual_tables[-1].append(tsql)
self.outer_chains = []
self.outer_tables = {}
@@ -398,7 +401,7 @@
notdone_outside_vars = set()
# when iterating other solutions inner to an EXISTS subquery, we should
# reset variables which have this exists node as scope at each iteration
- for var in exists.stmt.defined_vars.itervalues():
+ for var in exists.stmt.defined_vars.values():
if var.scope is exists:
thisexistsvars.add(var.name)
elif var.name not in self.done:
@@ -600,7 +603,7 @@
self.outer_chains.remove(lchain)
rchain += lchain
self.mark_as_used_in_outer_join(leftalias)
- for alias, (aouter, aconditions, achain) in outer_tables.iteritems():
+ for alias, (aouter, aconditions, achain) in outer_tables.items():
if achain is lchain:
outer_tables[alias] = (aouter, aconditions, rchain)
else:
@@ -1475,7 +1478,7 @@
"""generate SQL name for a function"""
if func.name == 'FTIRANK':
try:
- rel = iter(func.children[0].variable.stinfo['ftirels']).next()
+ rel = next(iter(func.children[0].variable.stinfo['ftirels']))
except KeyError:
raise BadRQLQuery("can't use FTIRANK on variable not used in an"
" 'has_text' relation (eg full-text search)")
@@ -1512,7 +1515,7 @@
return self._mapped_term(constant, '%%(%s)s' % value)[0]
except KeyError:
_id = value
- if isinstance(_id, unicode):
+ if PY2 and isinstance(_id, unicode):
_id = _id.encode()
else:
_id = str(id(constant)).replace('-', '', 1)
@@ -1561,7 +1564,7 @@
# add additional restriction on entities.type column
pts = variable.stinfo['possibletypes']
if len(pts) == 1:
- etype = iter(variable.stinfo['possibletypes']).next()
+ etype = next(iter(variable.stinfo['possibletypes']))
restr = "%s.type='%s'" % (vtablename, etype)
else:
etypes = ','.join("'%s'" % et for et in pts)
@@ -1609,7 +1612,7 @@
def _temp_table_scope(self, select, table):
scope = 9999
- for var, sql in self._varmap.iteritems():
+ for var, sql in self._varmap.items():
# skip "attribute variable" in varmap (such 'T.login')
if not '.' in var and table == sql.split('.', 1)[0]:
try:
@@ -1668,7 +1671,7 @@
except KeyError:
pass
rel = (variable.stinfo.get('principal') or
- iter(variable.stinfo['rhsrelations']).next())
+ next(iter(variable.stinfo['rhsrelations'])))
linkedvar = rel.children[0].variable
if rel.r_type == 'eid':
return linkedvar.accept(self)
--- a/server/sources/storages.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/sources/storages.py Thu Nov 12 10:52:28 2015 +0100
@@ -23,6 +23,10 @@
from contextlib import contextmanager
import tempfile
+from six import PY2, PY3, text_type, binary_type
+
+from logilab.common import nullobject
+
from yams.schema import role_name
from cubicweb import Binary, ValidationError
@@ -44,7 +48,7 @@
query result process of fetched attribute's value and should have the
following prototype::
- callback(self, source, session, value)
+ callback(self, source, cnx, value)
where `value` is the value actually stored in the backend. None values
will be skipped (eg callback won't be called).
@@ -92,24 +96,33 @@
return tempfile.mkstemp(prefix=base, suffix=ext, dir=dirpath)
@contextmanager
-def fsimport(session):
- present = 'fs_importing' in session.transaction_data
- old_value = session.transaction_data.get('fs_importing')
- session.transaction_data['fs_importing'] = True
+def fsimport(cnx):
+ present = 'fs_importing' in cnx.transaction_data
+ old_value = cnx.transaction_data.get('fs_importing')
+ cnx.transaction_data['fs_importing'] = True
yield
if present:
- session.transaction_data['fs_importing'] = old_value
+ cnx.transaction_data['fs_importing'] = old_value
else:
- del session.transaction_data['fs_importing']
+ del cnx.transaction_data['fs_importing']
+
+
+_marker = nullobject()
class BytesFileSystemStorage(Storage):
"""store Bytes attribute value on the file system"""
- def __init__(self, defaultdir, fsencoding='utf-8', wmode=0444):
- if type(defaultdir) is unicode:
- defaultdir = defaultdir.encode(fsencoding)
+ def __init__(self, defaultdir, fsencoding=_marker, wmode=0o444):
+ if PY3:
+ if not isinstance(defaultdir, text_type):
+ raise TypeError('defaultdir must be a unicode object in python 3')
+ if fsencoding is not _marker:
+ raise ValueError('fsencoding is no longer supported in python 3')
+ else:
+ self.fsencoding = fsencoding or 'utf-8'
+ if isinstance(defaultdir, text_type):
+ defaultdir = defaultdir.encode(fsencoding)
self.default_directory = defaultdir
- self.fsencoding = fsencoding
# extra umask to use when creating file
# 0444 as in "only allow read bit in permission"
self._wmode = wmode
@@ -126,7 +139,7 @@
fileobj.close()
- def callback(self, source, session, value):
+ def callback(self, source, cnx, value):
"""sql generator callback when some attribute with a custom storage is
accessed
"""
@@ -145,7 +158,8 @@
binary = entity.cw_edited.pop(attr)
fd, fpath = self.new_fs_path(entity, attr)
# bytes storage used to store file's path
- entity.cw_edited.edited_attribute(attr, Binary(fpath))
+ binary_obj = Binary(fpath if PY2 else fpath.encode('utf-8'))
+ entity.cw_edited.edited_attribute(attr, binary_obj)
self._writecontent(fd, binary)
AddFileOp.get_instance(entity._cw).add_data(fpath)
return binary
@@ -187,7 +201,8 @@
entity.cw_edited.edited_attribute(attr, None)
else:
# register the new location for the file.
- entity.cw_edited.edited_attribute(attr, Binary(fpath))
+ binary_obj = Binary(fpath if PY2 else fpath.encode('utf-8'))
+ entity.cw_edited.edited_attribute(attr, binary_obj)
if oldpath is not None and oldpath != fpath:
# Mark the old file as useless so the file will be removed at
# commit.
@@ -206,16 +221,19 @@
# available. Keeping the extension is useful for example in the case of
# PIL processing that use filename extension to detect content-type, as
# well as providing more understandable file names on the fs.
+ if PY2:
+ attr = attr.encode('ascii')
basename = [str(entity.eid), attr]
name = entity.cw_attr_metadata(attr, 'name')
if name is not None:
- basename.append(name.encode(self.fsencoding))
+ basename.append(name.encode(self.fsencoding) if PY2 else name)
fd, fspath = uniquify_path(self.default_directory,
'_'.join(basename))
if fspath is None:
msg = entity._cw._('failed to uniquify path (%s, %s)') % (
self.default_directory, '_'.join(basename))
raise ValidationError(entity.eid, {role_name(attr, 'subject'): msg})
+ assert isinstance(fspath, str) # bytes on py2, unicode on py3
return fd, fspath
def current_fs_path(self, entity, attr):
@@ -229,34 +247,40 @@
rawvalue = cu.fetchone()[0]
if rawvalue is None: # no previous value
return None
- return sysource._process_value(rawvalue, cu.description[0],
- binarywrap=str)
+ fspath = sysource._process_value(rawvalue, cu.description[0],
+ binarywrap=binary_type)
+ if PY3:
+ fspath = fspath.decode('utf-8')
+ assert isinstance(fspath, str) # bytes on py2, unicode on py3
+ return fspath
def migrate_entity(self, entity, attribute):
"""migrate an entity attribute to the storage"""
entity.cw_edited = EditedEntity(entity, **entity.cw_attr_cache)
self.entity_added(entity, attribute)
- session = entity._cw
- source = session.repo.system_source
+ cnx = entity._cw
+ source = cnx.repo.system_source
attrs = source.preprocess_entity(entity)
sql = source.sqlgen.update('cw_' + entity.cw_etype, attrs,
['cw_eid'])
- source.doexec(session, sql, attrs)
+ source.doexec(cnx, sql, attrs)
entity.cw_edited = None
class AddFileOp(hook.DataOperationMixIn, hook.Operation):
def rollback_event(self):
for filepath in self.get_data():
+ assert isinstance(filepath, str) # bytes on py2, unicode on py3
try:
unlink(filepath)
except Exception as ex:
- self.error('cant remove %s: %s' % (filepath, ex))
+ self.error("can't remove %s: %s" % (filepath, ex))
class DeleteFileOp(hook.DataOperationMixIn, hook.Operation):
def postcommit_event(self):
for filepath in self.get_data():
+ assert isinstance(filepath, str) # bytes on py2, unicode on py3
try:
unlink(filepath)
except Exception as ex:
- self.error('cant remove %s: %s' % (filepath, ex))
+ self.error("can't remove %s: %s" % (filepath, ex))
--- a/server/sqlutils.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/sqlutils.py Thu Nov 12 10:52:28 2015 +0100
@@ -16,6 +16,7 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""SQL utilities functions and classes."""
+from __future__ import print_function
__docformat__ = "restructuredtext en"
@@ -23,10 +24,12 @@
import re
import subprocess
from os.path import abspath
-from itertools import ifilter
from logging import getLogger
from datetime import time, datetime
+from six import string_types, text_type
+from six.moves import filter
+
from logilab import database as db, common as lgc
from logilab.common.shellutils import ProgressBar, DummyProgressBar
from logilab.common.deprecation import deprecated
@@ -44,8 +47,12 @@
SQL_PREFIX = 'cw_'
def _run_command(cmd):
- print ' '.join(cmd)
- return subprocess.call(cmd)
+ if isinstance(cmd, string_types):
+ print(cmd)
+ return subprocess.call(cmd, shell=True)
+ else:
+ print(' '.join(cmd))
+ return subprocess.call(cmd)
def sqlexec(sqlstmts, cursor_or_execute, withpb=True,
@@ -69,7 +76,7 @@
else:
execute = cursor_or_execute
sqlstmts_as_string = False
- if isinstance(sqlstmts, basestring):
+ if isinstance(sqlstmts, string_types):
sqlstmts_as_string = True
sqlstmts = sqlstmts.split(delimiter)
if withpb:
@@ -87,7 +94,7 @@
try:
# some dbapi modules doesn't accept unicode for sql string
execute(str(sql))
- except Exception, err:
+ except Exception as err:
if cnx:
cnx.rollback()
failed.append(sql)
@@ -95,7 +102,7 @@
if cnx:
cnx.commit()
if withpb:
- print
+ print()
if sqlstmts_as_string:
failed = delimiter.join(failed)
return failed
@@ -178,9 +185,9 @@
# for mssql, we need to drop views before tables
if hasattr(dbhelper, 'list_views'):
cmds += ['DROP VIEW %s;' % name
- for name in ifilter(_SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION, dbhelper.list_views(sqlcursor))]
+ for name in filter(_SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION, dbhelper.list_views(sqlcursor))]
cmds += ['DROP TABLE %s;' % name
- for name in ifilter(_SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION, dbhelper.list_tables(sqlcursor))]
+ for name in filter(_SQL_DROP_ALL_USER_TABLES_FILTER_FUNCTION, dbhelper.list_tables(sqlcursor))]
return '\n'.join(cmds)
@@ -370,7 +377,7 @@
def merge_args(self, args, query_args):
if args is not None:
newargs = {}
- for key, val in args.iteritems():
+ for key, val in args.items():
# convert cubicweb binary into db binary
if isinstance(val, Binary):
val = self._binary(val.getvalue())
@@ -441,7 +448,7 @@
attrs = {}
eschema = entity.e_schema
converters = getattr(self.dbhelper, 'TYPE_CONVERTERS', {})
- for attr, value in entity.cw_edited.iteritems():
+ for attr, value in entity.cw_edited.items():
if value is not None and eschema.subjrels[attr].final:
atype = str(entity.e_schema.destination(attr))
if atype in converters:
@@ -481,7 +488,7 @@
if value is not None:
self.values.add(value)
def finalize(self):
- return ', '.join(unicode(v) for v in self.values)
+ return ', '.join(text_type(v) for v in self.values)
cnx.create_aggregate("GROUP_CONCAT", 1, group_concat)
--- a/server/ssplanner.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/ssplanner.py Thu Nov 12 10:52:28 2015 +0100
@@ -19,6 +19,8 @@
__docformat__ = "restructuredtext en"
+from six import text_type
+
from rql.stmts import Union, Select
from rql.nodes import Constant, Relation
@@ -54,7 +56,7 @@
value = rhs.eval(plan.args)
eschema = edef.entity.e_schema
attrtype = eschema.subjrels[rtype].objects(eschema)[0]
- if attrtype == 'Password' and isinstance(value, unicode):
+ if attrtype == 'Password' and isinstance(value, text_type):
value = value.encode('UTF8')
edef.edited_attribute(rtype, value)
elif str(rhs) in to_build:
@@ -306,7 +308,7 @@
if varmap is None:
return varmap
maprepr = {}
- for var, sql in varmap.iteritems():
+ for var, sql in varmap.items():
table, col = sql.split('.')
maprepr[var] = '%s.%s' % (tablesinorder[table], col)
return maprepr
@@ -527,7 +529,7 @@
result[i] = newrow
# update entities
repo.glob_add_relations(cnx, relations)
- for eid, edited in edefs.iteritems():
+ for eid, edited in edefs.items():
repo.glob_update_entity(cnx, edited)
return result
--- a/server/test/data-migractions/cubes/fakeemail/schema.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/test/data-migractions/cubes/fakeemail/schema.py Thu Nov 12 10:52:28 2015 +0100
@@ -5,7 +5,7 @@
:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
# pylint: disable-msg=E0611,F0401
from yams.buildobjs import (SubjectRelation, RelationType, EntityType,
--- a/server/test/data-migractions/migratedapp/schema.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/test/data-migractions/migratedapp/schema.py Thu Nov 12 10:52:28 2015 +0100
@@ -21,6 +21,7 @@
SubjectRelation, Bytes,
RichString, String, Int, Boolean, Datetime, Date, Float)
from yams.constraints import SizeConstraint, UniqueConstraint
+from cubicweb import _
from cubicweb.schema import (WorkflowableEntityType, RQLConstraint,
RQLVocabularyConstraint,
ERQLExpression, RRQLExpression)
--- a/server/test/data-migractions/schema.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/test/data-migractions/schema.py Thu Nov 12 10:52:28 2015 +0100
@@ -24,6 +24,8 @@
RQLConstraint, RQLUniqueConstraint,
RQLVocabularyConstraint,
ERQLExpression, RRQLExpression)
+from cubicweb import _
+
class Affaire(WorkflowableEntityType):
__permissions__ = {
@@ -85,7 +87,7 @@
object = 'SubDivision'
from cubicweb.schemas.base import CWUser
-CWUser.get_relations('login').next().fulltextindexed = True
+next(CWUser.get_relations('login')).fulltextindexed = True
class Note(WorkflowableEntityType):
date = String(maxsize=10)
--- a/server/test/data-schema2sql/schema/State.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/test/data-schema2sql/schema/State.py Thu Nov 12 10:52:28 2015 +0100
@@ -19,7 +19,7 @@
SubjectRelation, Int, String, Boolean)
from yams.constraints import SizeConstraint, UniqueConstraint
-from __init__ import RESTRICTED_RTYPE_PERMS
+from . import RESTRICTED_RTYPE_PERMS
class State(EntityType):
"""used to associate simple states to an entity
--- a/server/test/data/schema.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/test/data/schema.py Thu Nov 12 10:52:28 2015 +0100
@@ -24,6 +24,7 @@
RQLConstraint, RQLUniqueConstraint,
RQLVocabularyConstraint,
ERQLExpression, RRQLExpression)
+from cubicweb import _
class Affaire(WorkflowableEntityType):
__permissions__ = {
@@ -85,7 +86,7 @@
object = 'SubDivision'
from cubicweb.schemas.base import CWUser
-CWUser.get_relations('login').next().fulltextindexed = True
+next(CWUser.get_relations('login')).fulltextindexed = True
class Note(WorkflowableEntityType):
date = String(maxsize=10)
--- a/server/test/datacomputed/migratedapp/schema.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/test/datacomputed/migratedapp/schema.py Thu Nov 12 10:52:28 2015 +0100
@@ -59,3 +59,8 @@
class renamed(ComputedRelation):
rule = 'S employees E, O concerns E'
+
+
+class perm_changes(ComputedRelation):
+ __permissions__ = {'read': ('managers',)}
+ rule = 'S employees E, O concerns E'
--- a/server/test/datacomputed/schema.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/test/datacomputed/schema.py Thu Nov 12 10:52:28 2015 +0100
@@ -58,3 +58,8 @@
class to_be_renamed(ComputedRelation):
rule = 'S employees E, O concerns E'
+
+
+class perm_changes(ComputedRelation):
+ __permissions__ = {'read': ('managers', 'users')}
+ rule = 'S employees E, O concerns E'
--- a/server/test/requirements.txt Thu Mar 06 15:55:33 2014 +0100
+++ b/server/test/requirements.txt Thu Nov 12 10:52:28 2015 +0100
@@ -1,4 +1,5 @@
psycopg2
+ldap3
cubicweb-basket
cubicweb-card
cubicweb-comment
--- a/server/test/unittest_checkintegrity.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/test/unittest_checkintegrity.py Thu Nov 12 10:52:28 2015 +0100
@@ -17,7 +17,13 @@
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
import sys
-from StringIO import StringIO
+
+from six import PY2
+if PY2:
+ from StringIO import StringIO
+else:
+ from io import StringIO
+
from logilab.common.testlib import TestCase, unittest_main
from cubicweb.devtools import get_test_db_handler, TestServerConfiguration
--- a/server/test/unittest_datafeed.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/test/unittest_datafeed.py Thu Nov 12 10:52:28 2015 +0100
@@ -1,3 +1,4 @@
+# coding: utf-8
# copyright 2011-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
@@ -16,7 +17,6 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
-import mimetools
from datetime import timedelta
from contextlib import contextmanager
@@ -28,7 +28,7 @@
def setup_database(self):
with self.admin_access.repo_cnx() as cnx:
with self.base_parser(cnx):
- cnx.create_entity('CWSource', name=u'myfeed', type=u'datafeed',
+ cnx.create_entity('CWSource', name=u'ô myfeed', type=u'datafeed',
parser=u'testparser', url=u'ignored',
config=u'synchronization-interval=1min')
cnx.commit()
@@ -48,21 +48,23 @@
entity.cw_edited.update(sourceparams['item'])
with self.temporary_appobjects(AParser):
- if 'myfeed' in self.repo.sources_by_uri:
- yield self.repo.sources_by_uri['myfeed']._get_parser(session)
+ if u'ô myfeed' in self.repo.sources_by_uri:
+ yield self.repo.sources_by_uri[u'ô myfeed']._get_parser(session)
else:
yield
def test(self):
- self.assertIn('myfeed', self.repo.sources_by_uri)
- dfsource = self.repo.sources_by_uri['myfeed']
+ self.assertIn(u'ô myfeed', self.repo.sources_by_uri)
+ dfsource = self.repo.sources_by_uri[u'ô myfeed']
self.assertNotIn('use_cwuri_as_url', dfsource.__dict__)
- self.assertEqual({'type': u'datafeed', 'uri': u'myfeed', 'use-cwuri-as-url': True},
+ self.assertEqual({'type': u'datafeed', 'uri': u'ô myfeed', 'use-cwuri-as-url': True},
dfsource.public_config)
self.assertEqual(dfsource.use_cwuri_as_url, True)
self.assertEqual(dfsource.latest_retrieval, None)
self.assertEqual(dfsource.synchro_interval, timedelta(seconds=60))
self.assertFalse(dfsource.fresh())
+ # ensure source's logger name has been unormalized
+ self.assertEqual(dfsource.info.__self__.name, 'cubicweb.sources.o myfeed')
with self.repo.internal_cnx() as cnx:
with self.base_parser(cnx):
@@ -78,17 +80,17 @@
self.assertEqual(entity.title, 'cubicweb.org')
self.assertEqual(entity.content, 'the cw web site')
self.assertEqual(entity.cwuri, 'http://www.cubicweb.org/')
- self.assertEqual(entity.cw_source[0].name, 'myfeed')
+ self.assertEqual(entity.cw_source[0].name, u'ô myfeed')
self.assertEqual(entity.cw_metainformation(),
{'type': 'Card',
- 'source': {'uri': 'myfeed', 'type': 'datafeed', 'use-cwuri-as-url': True},
- 'extid': 'http://www.cubicweb.org/'}
+ 'source': {'uri': u'ô myfeed', 'type': 'datafeed', 'use-cwuri-as-url': True},
+ 'extid': b'http://www.cubicweb.org/'}
)
self.assertEqual(entity.absolute_url(), 'http://www.cubicweb.org/')
# test repo cache keys
self.assertEqual(self.repo._type_source_cache[entity.eid],
- ('Card', 'http://www.cubicweb.org/', 'myfeed'))
- self.assertEqual(self.repo._extid_cache['http://www.cubicweb.org/'],
+ ('Card', b'http://www.cubicweb.org/', u'ô myfeed'))
+ self.assertEqual(self.repo._extid_cache[b'http://www.cubicweb.org/'],
entity.eid)
# test repull
stats = dfsource.pull_data(cnx, force=True)
@@ -101,19 +103,18 @@
self.assertEqual(stats['created'], set())
self.assertEqual(stats['updated'], set((entity.eid,)))
self.assertEqual(self.repo._type_source_cache[entity.eid],
- ('Card', 'http://www.cubicweb.org/', 'myfeed'))
- self.assertEqual(self.repo._extid_cache['http://www.cubicweb.org/'],
+ ('Card', b'http://www.cubicweb.org/', u'ô myfeed'))
+ self.assertEqual(self.repo._extid_cache[b'http://www.cubicweb.org/'],
entity.eid)
self.assertEqual(dfsource.source_cwuris(cnx),
- {'http://www.cubicweb.org/': (entity.eid, 'Card')}
- )
+ {b'http://www.cubicweb.org/': (entity.eid, 'Card')})
self.assertTrue(dfsource.latest_retrieval)
self.assertTrue(dfsource.fresh())
# test_rename_source
with self.admin_access.repo_cnx() as cnx:
- cnx.execute('SET S name "myrenamedfeed" WHERE S is CWSource, S name "myfeed"')
+ cnx.entity_from_eid(dfsource.eid).cw_set(name=u"myrenamedfeed")
cnx.commit()
entity = cnx.execute('Card X').get_entity(0, 0)
self.assertEqual(entity.cwuri, 'http://www.cubicweb.org/')
@@ -121,11 +122,11 @@
self.assertEqual(entity.cw_metainformation(),
{'type': 'Card',
'source': {'uri': 'myrenamedfeed', 'type': 'datafeed', 'use-cwuri-as-url': True},
- 'extid': 'http://www.cubicweb.org/'}
+ 'extid': b'http://www.cubicweb.org/'}
)
self.assertEqual(self.repo._type_source_cache[entity.eid],
- ('Card', 'http://www.cubicweb.org/', 'myrenamedfeed'))
- self.assertEqual(self.repo._extid_cache['http://www.cubicweb.org/'],
+ ('Card', b'http://www.cubicweb.org/', 'myrenamedfeed'))
+ self.assertEqual(self.repo._extid_cache[b'http://www.cubicweb.org/'],
entity.eid)
# test_delete_source
@@ -140,7 +141,6 @@
value = parser.retrieve_url('a string')
self.assertEqual(200, value.getcode())
self.assertEqual('a string', value.geturl())
- self.assertIsInstance(value.info(), mimetools.Message)
class DataFeedConfigTC(CubicWebTC):
--- a/server/test/unittest_ldapsource.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/test/unittest_ldapsource.py Thu Nov 12 10:52:28 2015 +0100
@@ -16,6 +16,7 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""cubicweb.server.sources.ldapusers unit and functional tests"""
+from __future__ import print_function
import os
import sys
@@ -25,6 +26,9 @@
import subprocess
import tempfile
+from six import string_types
+from six.moves import range
+
from cubicweb import AuthenticationError
from cubicweb.devtools.testlib import CubicWebTC
from cubicweb.devtools.repotest import RQLGeneratorTC
@@ -49,8 +53,8 @@
slapddir = tempfile.mkdtemp('cw-unittest-ldap')
config = cls.config
slapdconf = join(config.apphome, "slapd.conf")
- confin = file(join(config.apphome, "slapd.conf.in")).read()
- confstream = file(slapdconf, 'w')
+ confin = open(join(config.apphome, "slapd.conf.in")).read()
+ confstream = open(slapdconf, 'w')
confstream.write(confin % {'apphome': config.apphome, 'testdir': slapddir})
confstream.close()
# fill ldap server with some data
@@ -61,13 +65,13 @@
slapproc = subprocess.Popen(cmdline, stdout=PIPE, stderr=PIPE)
stdout, stderr = slapproc.communicate()
if slapproc.returncode:
- print >> sys.stderr, ('slapadd returned with status: %s'
- % slapproc.returncode)
+ print('slapadd returned with status: %s'
+ % slapproc.returncode, file=sys.stderr)
sys.stdout.write(stdout)
sys.stderr.write(stderr)
#ldapuri = 'ldapi://' + join(basedir, "ldapi").replace('/', '%2f')
- port = get_available_port(xrange(9000, 9100))
+ port = get_available_port(range(9000, 9100))
host = 'localhost:%s' % port
ldapuri = 'ldap://%s' % host
cmdline = ["/usr/sbin/slapd", "-f", slapdconf, "-h", ldapuri, "-d", "0"]
@@ -94,8 +98,8 @@
os.kill(cls.slapd_process.pid, signal.SIGTERM)
stdout, stderr = cls.slapd_process.communicate()
if cls.slapd_process.returncode:
- print >> sys.stderr, ('slapd returned with status: %s'
- % cls.slapd_process.returncode)
+ print('slapd returned with status: %s'
+ % cls.slapd_process.returncode, file=sys.stderr)
sys.stdout.write(stdout)
sys.stderr.write(stderr)
config.info('DONE')
@@ -149,8 +153,8 @@
add an LDAP entity
"""
modcmd = ['dn: %s'%dn, 'changetype: add']
- for key, values in mods.iteritems():
- if isinstance(values, basestring):
+ for key, values in mods.items():
+ if isinstance(values, string_types):
values = [values]
for value in values:
modcmd.append('%s: %s'%(key, value))
@@ -168,9 +172,9 @@
modify one or more attributes of an LDAP entity
"""
modcmd = ['dn: %s'%dn, 'changetype: modify']
- for (kind, key), values in mods.iteritems():
+ for (kind, key), values in mods.items():
modcmd.append('%s: %s' % (kind, key))
- if isinstance(values, basestring):
+ if isinstance(values, string_types):
values = [values]
for value in values:
modcmd.append('%s: %s'%(key, value))
@@ -183,7 +187,7 @@
'cn=admin,dc=cubicweb,dc=test', '-w', 'cw']
PIPE = subprocess.PIPE
p = subprocess.Popen(updatecmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
- p.stdin.write('\n'.join(modcmd))
+ p.stdin.write('\n'.join(modcmd).encode('ascii'))
p.stdin.close()
if p.wait():
raise RuntimeError("ldap update failed: %s"%('\n'.join(p.stderr.readlines())))
--- a/server/test/unittest_migractions.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/test/unittest_migractions.py Thu Nov 12 10:52:28 2015 +0100
@@ -54,7 +54,8 @@
class MigrationConfig(cubicweb.devtools.TestServerConfiguration):
default_sources = cubicweb.devtools.DEFAULT_PSQL_SOURCES
- CUBES_PATH = [osp.join(HERE, 'data-migractions', 'cubes')]
+ CUBES_PATH = cubicweb.devtools.TestServerConfiguration.CUBES_PATH + [
+ osp.join(HERE, 'data-migractions', 'cubes')]
class MigrationTC(CubicWebTC):
@@ -151,7 +152,7 @@
orderdict2 = dict(mh.rqlexec('Any RTN, O WHERE X name "Note", RDEF from_entity X, '
'RDEF relation_type RT, RDEF ordernum O, RT name RTN'))
whateverorder = migrschema['whatever'].rdef('Note', 'Int').order
- for k, v in orderdict.iteritems():
+ for k, v in orderdict.items():
if v >= whateverorder:
orderdict[k] = v+1
orderdict['whatever'] = whateverorder
@@ -579,7 +580,7 @@
def test_add_drop_cube_and_deps(self):
with self.mh() as (cnx, mh):
schema = self.repo.schema
- self.assertEqual(sorted((str(s), str(o)) for s, o in schema['see_also'].rdefs.iterkeys()),
+ self.assertEqual(sorted((str(s), str(o)) for s, o in schema['see_also'].rdefs),
sorted([('EmailThread', 'EmailThread'), ('Folder', 'Folder'),
('Bookmark', 'Bookmark'), ('Bookmark', 'Note'),
('Note', 'Note'), ('Note', 'Bookmark')]))
@@ -593,7 +594,7 @@
for ertype in ('Email', 'EmailThread', 'EmailPart', 'File',
'sender', 'in_thread', 'reply_to', 'data_format'):
self.assertNotIn(ertype, schema)
- self.assertEqual(sorted(schema['see_also'].rdefs.iterkeys()),
+ self.assertEqual(sorted(schema['see_also'].rdefs),
sorted([('Folder', 'Folder'),
('Bookmark', 'Bookmark'),
('Bookmark', 'Note'),
@@ -612,12 +613,12 @@
for ertype in ('Email', 'EmailThread', 'EmailPart', 'File',
'sender', 'in_thread', 'reply_to', 'data_format'):
self.assertIn(ertype, schema)
- self.assertEqual(sorted(schema['see_also'].rdefs.iterkeys()),
- sorted([('EmailThread', 'EmailThread'), ('Folder', 'Folder'),
- ('Bookmark', 'Bookmark'),
- ('Bookmark', 'Note'),
- ('Note', 'Note'),
- ('Note', 'Bookmark')]))
+ self.assertEqual(sorted(schema['see_also'].rdefs),
+ sorted([('EmailThread', 'EmailThread'), ('Folder', 'Folder'),
+ ('Bookmark', 'Bookmark'),
+ ('Bookmark', 'Note'),
+ ('Note', 'Note'),
+ ('Note', 'Bookmark')]))
self.assertEqual(sorted(schema['see_also'].subjects()), ['Bookmark', 'EmailThread', 'Folder', 'Note'])
self.assertEqual(sorted(schema['see_also'].objects()), ['Bookmark', 'EmailThread', 'Folder', 'Note'])
from cubes.fakeemail.__pkginfo__ import version as email_version
@@ -784,6 +785,20 @@
self.assertEqual(self.schema['whatever'].subjects(), ('Company',))
self.assertFalse(self.table_sql(mh, 'whatever_relation'))
+ def test_computed_relation_sync_schema_props_perms_security(self):
+ with self.mh() as (cnx, mh):
+ rdef = next(iter(self.schema['perm_changes'].rdefs.values()))
+ self.assertEqual(rdef.permissions,
+ {'add': (), 'delete': (),
+ 'read': ('managers', 'users')})
+ mh.cmd_sync_schema_props_perms('perm_changes')
+ self.assertEqual(self.schema['perm_changes'].permissions,
+ {'read': ('managers',)})
+ rdef = next(iter(self.schema['perm_changes'].rdefs.values()))
+ self.assertEqual(rdef.permissions,
+ {'add': (), 'delete': (),
+ 'read': ('managers',)})
+
def test_computed_relation_sync_schema_props_perms_on_rdef(self):
self.assertIn('whatever', self.schema)
with self.mh() as (cnx, mh):
--- a/server/test/unittest_postgres.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/test/unittest_postgres.py Thu Nov 12 10:52:28 2015 +0100
@@ -19,8 +19,11 @@
from datetime import datetime
from threading import Thread
+from six.moves import range
+
from logilab.common.testlib import SkipTest
+import logilab.database as lgdb
from cubicweb import ValidationError
from cubicweb.devtools import PostgresApptestConfiguration, startpgcluster, stoppgcluster
from cubicweb.devtools.testlib import CubicWebTC
@@ -49,13 +52,21 @@
class PostgresFTITC(CubicWebTC):
configcls = PostgresTimeoutConfiguration
+ @classmethod
+ def setUpClass(cls):
+ cls.orig_connect_hooks = lgdb.SQL_CONNECT_HOOKS['postgres'][:]
+
+ @classmethod
+ def tearDownClass(cls):
+ lgdb.SQL_CONNECT_HOOKS['postgres'] = cls.orig_connect_hooks
+
def test_eid_range(self):
# concurrent allocation of eid ranges
source = self.session.repo.sources_by_uri['system']
range1 = []
range2 = []
def allocate_eid_ranges(session, target):
- for x in xrange(1, 10):
+ for x in range(1, 10):
eid = source.create_eid(session, count=x)
target.extend(range(eid-x, eid))
--- a/server/test/unittest_querier.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/test/unittest_querier.py Thu Nov 12 10:52:28 2015 +0100
@@ -21,6 +21,7 @@
from datetime import date, datetime, timedelta, tzinfo
+from six import PY2, integer_types, binary_type, text_type
from logilab.common.testlib import TestCase, unittest_main
from rql import BadRQLQuery, RQLSyntaxError
@@ -129,8 +130,8 @@
def assertRQLEqual(self, expected, got):
from rql import parse
- self.assertMultiLineEqual(unicode(parse(expected)),
- unicode(parse(got)))
+ self.assertMultiLineEqual(text_type(parse(expected)),
+ text_type(parse(got)))
def test_preprocess_security(self):
s = self.user_groups_session('users')
@@ -178,46 +179,46 @@
' Comment, Division, Email, EmailPart, EmailThread, ExternalUri, File, Folder, '
' Frozable, Note, Old, Personne, RQLExpression, Societe, State, SubDivision, '
' SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)')
- self.assertListEqual(sorted(solutions),
- sorted([{'X': 'BaseTransition', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'Bookmark', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'Card', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'Comment', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'Division', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'CWCache', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'CWComputedRType', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'CWConstraint', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'CWConstraintType', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'CWEType', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'CWAttribute', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'CWGroup', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'CWRelation', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'CWPermission', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'CWProperty', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'CWRType', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'CWSource', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'CWUniqueTogetherConstraint', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'CWUser', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'Email', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'EmailPart', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'EmailThread', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'ExternalUri', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'File', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'Folder', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'Frozable', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'Note', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'Old', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'Personne', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'RQLExpression', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'Societe', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'State', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'SubDivision', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'SubWorkflowExitPoint', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'Tag', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'Transition', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'TrInfo', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'Workflow', 'ETN': 'String', 'ET': 'CWEType'},
- {'X': 'WorkflowTransition', 'ETN': 'String', 'ET': 'CWEType'}]))
+ self.assertCountEqual(solutions,
+ [{'X': 'BaseTransition', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'Bookmark', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'Card', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'Comment', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'Division', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'CWCache', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'CWComputedRType', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'CWConstraint', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'CWConstraintType', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'CWEType', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'CWAttribute', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'CWGroup', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'CWRelation', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'CWPermission', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'CWProperty', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'CWRType', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'CWSource', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'CWUniqueTogetherConstraint', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'CWUser', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'Email', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'EmailPart', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'EmailThread', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'ExternalUri', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'File', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'Folder', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'Frozable', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'Note', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'Old', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'Personne', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'RQLExpression', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'Societe', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'State', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'SubDivision', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'SubWorkflowExitPoint', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'Tag', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'Transition', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'TrInfo', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'Workflow', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'WorkflowTransition', 'ETN': 'String', 'ET': 'CWEType'}])
rql, solutions = partrqls[2]
self.assertEqual(rql,
'Any ETN,X WHERE X is ET, ET name ETN, EXISTS(%(D)s use_email X), '
@@ -263,8 +264,9 @@
self.assertEqual(rset.description[0][0], 'Datetime')
rset = self.qexecute('Any %(x)s', {'x': 1})
self.assertEqual(rset.description[0][0], 'Int')
- rset = self.qexecute('Any %(x)s', {'x': 1L})
- self.assertEqual(rset.description[0][0], 'Int')
+ if PY2:
+ rset = self.qexecute('Any %(x)s', {'x': long(1)})
+ self.assertEqual(rset.description[0][0], 'Int')
rset = self.qexecute('Any %(x)s', {'x': True})
self.assertEqual(rset.description[0][0], 'Boolean')
rset = self.qexecute('Any %(x)s', {'x': 1.0})
@@ -307,10 +309,6 @@
setUpClass = classmethod(setUpClass)
tearDownClass = classmethod(tearDownClass)
- def test_encoding_pb(self):
- self.assertRaises(RQLSyntaxError, self.qexecute,
- 'Any X WHERE X is CWRType, X name "öwned_by"')
-
def test_unknown_eid(self):
# should return an empty result set
self.assertFalse(self.qexecute('Any X WHERE X eid 99999999'))
@@ -318,15 +316,15 @@
def test_typed_eid(self):
# should return an empty result set
rset = self.qexecute('Any X WHERE X eid %(x)s', {'x': '1'})
- self.assertIsInstance(rset[0][0], (int, long))
+ self.assertIsInstance(rset[0][0], integer_types)
def test_bytes_storage(self):
feid = self.qexecute('INSERT File X: X data_name "foo.pdf", '
'X data_format "text/plain", X data %(data)s',
- {'data': Binary("xxx")})[0][0]
+ {'data': Binary(b"xxx")})[0][0]
fdata = self.qexecute('Any D WHERE X data D, X eid %(x)s', {'x': feid})[0][0]
self.assertIsInstance(fdata, Binary)
- self.assertEqual(fdata.getvalue(), 'xxx')
+ self.assertEqual(fdata.getvalue(), b'xxx')
# selection queries tests #################################################
@@ -886,18 +884,18 @@
def test_select_constant(self):
rset = self.qexecute('Any X, "toto" ORDERBY X WHERE X is CWGroup')
self.assertEqual(rset.rows,
- map(list, zip((2,3,4,5), ('toto','toto','toto','toto',))))
- self.assertIsInstance(rset[0][1], unicode)
+ [list(x) for x in zip((2,3,4,5), ('toto','toto','toto','toto',))])
+ self.assertIsInstance(rset[0][1], text_type)
self.assertEqual(rset.description,
- zip(('CWGroup', 'CWGroup', 'CWGroup', 'CWGroup'),
- ('String', 'String', 'String', 'String',)))
+ list(zip(('CWGroup', 'CWGroup', 'CWGroup', 'CWGroup'),
+ ('String', 'String', 'String', 'String',))))
rset = self.qexecute('Any X, %(value)s ORDERBY X WHERE X is CWGroup', {'value': 'toto'})
self.assertEqual(rset.rows,
- map(list, zip((2,3,4,5), ('toto','toto','toto','toto',))))
- self.assertIsInstance(rset[0][1], unicode)
+ list(map(list, zip((2,3,4,5), ('toto','toto','toto','toto',)))))
+ self.assertIsInstance(rset[0][1], text_type)
self.assertEqual(rset.description,
- zip(('CWGroup', 'CWGroup', 'CWGroup', 'CWGroup'),
- ('String', 'String', 'String', 'String',)))
+ list(zip(('CWGroup', 'CWGroup', 'CWGroup', 'CWGroup'),
+ ('String', 'String', 'String', 'String',))))
rset = self.qexecute('Any X,GN WHERE X is CWUser, G is CWGroup, X login "syt", '
'X in_group G, G name GN')
@@ -1015,7 +1013,7 @@
self.assertEqual(len(rset.rows), 1)
self.assertEqual(rset.description, [('Personne',)])
rset = self.qexecute('Personne X WHERE X nom "bidule"')
- self.assert_(rset.rows)
+ self.assertTrue(rset.rows)
self.assertEqual(rset.description, [('Personne',)])
def test_insert_1_multiple(self):
@@ -1029,20 +1027,20 @@
rset = self.qexecute("INSERT Personne X, Personne Y: X nom 'bidule', Y nom 'tutu'")
self.assertEqual(rset.description, [('Personne', 'Personne')])
rset = self.qexecute('Personne X WHERE X nom "bidule" or X nom "tutu"')
- self.assert_(rset.rows)
+ self.assertTrue(rset.rows)
self.assertEqual(rset.description, [('Personne',), ('Personne',)])
def test_insert_3(self):
self.qexecute("INSERT Personne X: X nom Y WHERE U login 'admin', U login Y")
rset = self.qexecute('Personne X WHERE X nom "admin"')
- self.assert_(rset.rows)
+ self.assertTrue(rset.rows)
self.assertEqual(rset.description, [('Personne',)])
def test_insert_4(self):
self.qexecute("INSERT Societe Y: Y nom 'toto'")
self.qexecute("INSERT Personne X: X nom 'bidule', X travaille Y WHERE Y nom 'toto'")
rset = self.qexecute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y')
- self.assert_(rset.rows)
+ self.assertTrue(rset.rows)
self.assertEqual(rset.description, [('Personne', 'Societe',)])
def test_insert_4bis(self):
@@ -1057,17 +1055,17 @@
def test_insert_4ter(self):
peid = self.qexecute("INSERT Personne X: X nom 'bidule'")[0][0]
seid = self.qexecute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X eid %(x)s",
- {'x': unicode(peid)})[0][0]
+ {'x': text_type(peid)})[0][0]
self.assertEqual(len(self.qexecute('Any X, Y WHERE X travaille Y')), 1)
self.qexecute("INSERT Personne X: X nom 'chouette', X travaille Y WHERE Y eid %(x)s",
- {'x': unicode(seid)})
+ {'x': text_type(seid)})
self.assertEqual(len(self.qexecute('Any X, Y WHERE X travaille Y')), 2)
def test_insert_5(self):
self.qexecute("INSERT Personne X: X nom 'bidule'")
self.qexecute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X nom 'bidule'")
rset = self.qexecute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y')
- self.assert_(rset.rows)
+ self.assertTrue(rset.rows)
self.assertEqual(rset.description, [('Personne', 'Societe',)])
def test_insert_5bis(self):
@@ -1075,20 +1073,20 @@
self.qexecute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X eid %(x)s",
{'x': peid})
rset = self.qexecute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y')
- self.assert_(rset.rows)
+ self.assertTrue(rset.rows)
self.assertEqual(rset.description, [('Personne', 'Societe',)])
def test_insert_6(self):
self.qexecute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto', X travaille Y")
rset = self.qexecute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y')
- self.assert_(rset.rows)
+ self.assertTrue(rset.rows)
self.assertEqual(rset.description, [('Personne', 'Societe',)])
def test_insert_7(self):
self.qexecute("INSERT Personne X, Societe Y: X nom N, Y nom 'toto', "
"X travaille Y WHERE U login 'admin', U login N")
rset = self.qexecute('Any X, Y WHERE X nom "admin", Y nom "toto", X travaille Y')
- self.assert_(rset.rows)
+ self.assertTrue(rset.rows)
self.assertEqual(rset.description, [('Personne', 'Societe',)])
def test_insert_7_2(self):
@@ -1103,7 +1101,7 @@
self.qexecute("INSERT Societe Y, Personne X: Y nom N, X nom 'toto', X travaille Y "
"WHERE U login 'admin', U login N")
rset = self.qexecute('Any X, Y WHERE X nom "toto", Y nom "admin", X travaille Y')
- self.assert_(rset.rows)
+ self.assertTrue(rset.rows)
self.assertEqual(rset.description, [('Personne', 'Societe',)])
def test_insert_9(self):
@@ -1267,7 +1265,7 @@
rset = self.qexecute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto'")
eid1, eid2 = rset[0][0], rset[0][1]
self.qexecute("SET X travaille Y WHERE X eid %(x)s, Y eid %(y)s",
- {'x': unicode(eid1), 'y': unicode(eid2)})
+ {'x': text_type(eid1), 'y': text_type(eid2)})
rset = self.qexecute('Any X, Y WHERE X travaille Y')
self.assertEqual(len(rset.rows), 1)
@@ -1317,7 +1315,7 @@
eid1, eid2 = rset[0][0], rset[0][1]
rset = self.qexecute("SET X travaille Y WHERE X eid %(x)s, Y eid %(y)s, "
"NOT EXISTS(Z ecrit_par X)",
- {'x': unicode(eid1), 'y': unicode(eid2)})
+ {'x': text_type(eid1), 'y': text_type(eid2)})
self.assertEqual(tuplify(rset.rows), [(eid1, eid2)])
def test_update_query_error(self):
@@ -1364,7 +1362,7 @@
cursor = cnx.cnxset.cu
cursor.execute("SELECT %supassword from %sCWUser WHERE %slogin='bob'"
% (SQL_PREFIX, SQL_PREFIX, SQL_PREFIX))
- passwd = str(cursor.fetchone()[0])
+ passwd = binary_type(cursor.fetchone()[0])
self.assertEqual(passwd, crypt_password('toto', passwd))
rset = self.qexecute("Any X WHERE X is CWUser, X login 'bob', X upassword %(pwd)s",
{'pwd': Binary(passwd)})
@@ -1377,11 +1375,11 @@
{'pwd': 'toto'})
self.assertEqual(rset.description[0][0], 'CWUser')
rset = cnx.execute("SET X upassword %(pwd)s WHERE X is CWUser, X login 'bob'",
- {'pwd': 'tutu'})
+ {'pwd': b'tutu'})
cursor = cnx.cnxset.cu
cursor.execute("SELECT %supassword from %sCWUser WHERE %slogin='bob'"
% (SQL_PREFIX, SQL_PREFIX, SQL_PREFIX))
- passwd = str(cursor.fetchone()[0])
+ passwd = binary_type(cursor.fetchone()[0])
self.assertEqual(passwd, crypt_password('tutu', passwd))
rset = cnx.execute("Any X WHERE X is CWUser, X login 'bob', X upassword %(pwd)s",
{'pwd': Binary(passwd)})
--- a/server/test/unittest_repository.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/test/unittest_repository.py Thu Nov 12 10:52:28 2015 +0100
@@ -22,6 +22,8 @@
import time
import logging
+from six.moves import range
+
from yams.constraints import UniqueConstraint
from yams import register_base_type, unregister_base_type
@@ -77,7 +79,7 @@
def test_connect(self):
cnxid = self.repo.connect(self.admlogin, password=self.admpassword)
- self.assert_(cnxid)
+ self.assertTrue(cnxid)
self.repo.close(cnxid)
self.assertRaises(AuthenticationError,
self.repo.connect, self.admlogin, password='nimportnawak')
@@ -100,7 +102,7 @@
cnx.commit()
repo = self.repo
cnxid = repo.connect(u"barnabé", password=u"héhéhé".encode('UTF8'))
- self.assert_(cnxid)
+ self.assertTrue(cnxid)
repo.close(cnxid)
def test_rollback_on_execute_validation_error(self):
@@ -145,7 +147,7 @@
def test_close(self):
repo = self.repo
cnxid = repo.connect(self.admlogin, password=self.admpassword)
- self.assert_(cnxid)
+ self.assertTrue(cnxid)
repo.close(cnxid)
def test_check_session(self):
@@ -192,7 +194,7 @@
constraints = schema.rschema('relation_type').rdef('CWAttribute', 'CWRType').constraints
self.assertEqual(len(constraints), 1)
cstr = constraints[0]
- self.assert_(isinstance(cstr, RQLConstraint))
+ self.assertIsInstance(cstr, RQLConstraint)
self.assertEqual(cstr.expression, 'O final TRUE')
ownedby = schema.rschema('owned_by')
@@ -589,11 +591,11 @@
with self.admin_access.repo_cnx() as cnx:
personnes = []
t0 = time.time()
- for i in xrange(2000):
+ for i in range(2000):
p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M')
personnes.append(p)
abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M')
- for j in xrange(0, 2000, 100):
+ for j in range(0, 2000, 100):
abraham.cw_set(personne_composite=personnes[j:j+100])
t1 = time.time()
self.info('creation: %.2gs', (t1 - t0))
@@ -610,7 +612,7 @@
def test_add_relation_non_inlined(self):
with self.admin_access.repo_cnx() as cnx:
personnes = []
- for i in xrange(2000):
+ for i in range(2000):
p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M')
personnes.append(p)
cnx.commit()
@@ -619,7 +621,7 @@
personne_composite=personnes[:100])
t1 = time.time()
self.info('creation: %.2gs', (t1 - t0))
- for j in xrange(100, 2000, 100):
+ for j in range(100, 2000, 100):
abraham.cw_set(personne_composite=personnes[j:j+100])
t2 = time.time()
self.info('more relations: %.2gs', (t2-t1))
@@ -630,7 +632,7 @@
def test_add_relation_inlined(self):
with self.admin_access.repo_cnx() as cnx:
personnes = []
- for i in xrange(2000):
+ for i in range(2000):
p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M')
personnes.append(p)
cnx.commit()
@@ -639,7 +641,7 @@
personne_inlined=personnes[:100])
t1 = time.time()
self.info('creation: %.2gs', (t1 - t0))
- for j in xrange(100, 2000, 100):
+ for j in range(100, 2000, 100):
abraham.cw_set(personne_inlined=personnes[j:j+100])
t2 = time.time()
self.info('more relations: %.2gs', (t2-t1))
@@ -652,7 +654,7 @@
""" to be compared with test_session_add_relations"""
with self.admin_access.repo_cnx() as cnx:
personnes = []
- for i in xrange(2000):
+ for i in range(2000):
p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M')
personnes.append(p)
abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M')
@@ -669,7 +671,7 @@
""" to be compared with test_session_add_relation"""
with self.admin_access.repo_cnx() as cnx:
personnes = []
- for i in xrange(2000):
+ for i in range(2000):
p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M')
personnes.append(p)
abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M')
@@ -686,7 +688,7 @@
""" to be compared with test_session_add_relations"""
with self.admin_access.repo_cnx() as cnx:
personnes = []
- for i in xrange(2000):
+ for i in range(2000):
p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M')
personnes.append(p)
abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M')
@@ -703,7 +705,7 @@
""" to be compared with test_session_add_relation"""
with self.admin_access.repo_cnx() as cnx:
personnes = []
- for i in xrange(2000):
+ for i in range(2000):
p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M')
personnes.append(p)
abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M')
--- a/server/test/unittest_rql2sql.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/test/unittest_rql2sql.py Thu Nov 12 10:52:28 2015 +0100
@@ -16,6 +16,7 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""unit tests for module cubicweb.server.sources.rql2sql"""
+from __future__ import print_function
import sys
import os
@@ -1246,13 +1247,13 @@
except Exception as ex:
if 'r' in locals():
try:
- print (r%args).strip()
+ print((r%args).strip())
except KeyError:
- print 'strange, missing substitution'
- print r, nargs
- print '!='
- print sql.strip()
- print 'RQL:', rql
+ print('strange, missing substitution')
+ print(r, nargs)
+ print('!=')
+ print(sql.strip())
+ print('RQL:', rql)
raise
def _parse(self, rqls):
@@ -1269,11 +1270,11 @@
r, args, cbs = self.o.generate(rqlst, args)
self.assertEqual((r.strip(), args), sql)
except Exception as ex:
- print rql
+ print(rql)
if 'r' in locals():
- print r.strip()
- print '!='
- print sql[0].strip()
+ print(r.strip())
+ print('!=')
+ print(sql[0].strip())
raise
return
--- a/server/test/unittest_rqlannotation.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/test/unittest_rqlannotation.py Thu Nov 12 10:52:28 2015 +0100
@@ -64,7 +64,7 @@
rqlst = self._prepare(cnx, 'Any A,B,C WHERE A eid 12,A comment B, '
'A ?wf_info_for C')
self.assertEqual(rqlst.defined_vars['A']._q_invariant, False)
- self.assert_(rqlst.defined_vars['B'].stinfo['attrvar'])
+ self.assertTrue(rqlst.defined_vars['B'].stinfo['attrvar'])
self.assertEqual(rqlst.defined_vars['C']._q_invariant, False)
self.assertEqual(rqlst.solutions, [{'A': 'TrInfo', 'B': 'String', 'C': 'Affaire'},
{'A': 'TrInfo', 'B': 'String', 'C': 'CWUser'},
@@ -87,7 +87,7 @@
'Y nom NX, X eid XE, not Y eid XE')
self.assertEqual(rqlst.defined_vars['X']._q_invariant, False)
self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False)
- self.assert_(rqlst.defined_vars['XE'].stinfo['attrvar'])
+ self.assertTrue(rqlst.defined_vars['XE'].stinfo['attrvar'])
def test_0_8(self):
with self.session.new_cnx() as cnx:
--- a/server/test/unittest_schemaserial.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/test/unittest_schemaserial.py Thu Nov 12 10:52:28 2015 +0100
@@ -292,7 +292,7 @@
{'cardinality': u'?1',
'defaultval': None,
'description': u'',
- 'extra_props': '{"jungle_speed": 42}',
+ 'extra_props': b'{"jungle_speed": 42}',
'formula': None,
'indexed': False,
'oe': None,
--- a/server/test/unittest_security.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/test/unittest_security.py Thu Nov 12 10:52:28 2015 +0100
@@ -17,6 +17,8 @@
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""functional tests for server'security"""
+from six.moves import range
+
from logilab.common.testlib import unittest_main
from cubicweb.devtools.testlib import CubicWebTC
@@ -33,7 +35,7 @@
with self.admin_access.client_cnx() as cnx:
self.create_user(cnx, u'iaminusersgrouponly')
hash = _CRYPTO_CTX.encrypt('oldpassword', scheme='des_crypt')
- self.create_user(cnx, u'oldpassword', password=Binary(hash))
+ self.create_user(cnx, u'oldpassword', password=Binary(hash.encode('ascii')))
class LowLevelSecurityFunctionTC(BaseSecurityTC):
@@ -79,17 +81,20 @@
it will be updated on next login
"""
with self.repo.internal_cnx() as cnx:
- oldhash = str(cnx.system_sql("SELECT cw_upassword FROM cw_CWUser "
- "WHERE cw_login = 'oldpassword'").fetchone()[0])
+ oldhash = cnx.system_sql("SELECT cw_upassword FROM cw_CWUser "
+ "WHERE cw_login = 'oldpassword'").fetchone()[0]
+ oldhash = self.repo.system_source.binary_to_str(oldhash)
self.repo.close(self.repo.connect('oldpassword', password='oldpassword'))
- newhash = str(cnx.system_sql("SELECT cw_upassword FROM cw_CWUser "
- "WHERE cw_login = 'oldpassword'").fetchone()[0])
+ newhash = cnx.system_sql("SELECT cw_upassword FROM cw_CWUser "
+ "WHERE cw_login = 'oldpassword'").fetchone()[0]
+ newhash = self.repo.system_source.binary_to_str(newhash)
self.assertNotEqual(oldhash, newhash)
- self.assertTrue(newhash.startswith('$6$'))
+ self.assertTrue(newhash.startswith(b'$6$'))
self.repo.close(self.repo.connect('oldpassword', password='oldpassword'))
- self.assertEqual(newhash,
- str(cnx.system_sql("SELECT cw_upassword FROM cw_CWUser WHERE "
- "cw_login = 'oldpassword'").fetchone()[0]))
+ newnewhash = cnx.system_sql("SELECT cw_upassword FROM cw_CWUser WHERE "
+ "cw_login = 'oldpassword'").fetchone()[0]
+ newnewhash = self.repo.system_source.binary_to_str(newnewhash)
+ self.assertEqual(newhash, newnewhash)
class SecurityRewritingTC(BaseSecurityTC):
@@ -293,7 +298,7 @@
ueid = self.create_user(cnx, u'user').eid
with self.new_access(u'user').repo_cnx() as cnx:
cnx.execute('SET X upassword %(passwd)s WHERE X eid %(x)s',
- {'x': ueid, 'passwd': 'newpwd'})
+ {'x': ueid, 'passwd': b'newpwd'})
cnx.commit()
self.repo.close(self.repo.connect('user', password='newpwd'))
@@ -302,7 +307,7 @@
ueid = self.create_user(cnx, u'otheruser').eid
with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx:
cnx.execute('SET X upassword %(passwd)s WHERE X eid %(x)s',
- {'x': ueid, 'passwd': 'newpwd'})
+ {'x': ueid, 'passwd': b'newpwd'})
self.assertRaises(Unauthorized, cnx.commit)
# read security test
@@ -559,7 +564,7 @@
rset = cnx.execute('CWUser X')
self.assertEqual([[anon.eid]], rset.rows)
# anonymous user can read groups (necessary to check allowed transitions for instance)
- self.assert_(cnx.execute('CWGroup X'))
+ self.assertTrue(cnx.execute('CWGroup X'))
# should only be able to read the anonymous user, not another one
self.assertRaises(Unauthorized,
cnx.execute, 'CWUser X WHERE X eid %(x)s', {'x': admineid})
@@ -666,7 +671,7 @@
rset = cnx.execute('Any X, U WHERE X is EmailAddress, U use_email X')
msg = ['Preexisting email readable by anon found!']
tmpl = ' - "%s" used by user "%s"'
- for i in xrange(len(rset)):
+ for i in range(len(rset)):
email, user = rset.get_entity(i, 0), rset.get_entity(i, 1)
msg.append(tmpl % (email.dc_title(), user.dc_title()))
raise RuntimeError('\n'.join(msg))
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/server/test/unittest_serverctl.py Thu Nov 12 10:52:28 2015 +0100
@@ -0,0 +1,25 @@
+import os.path as osp
+import shutil
+
+from cubicweb.devtools import testlib
+from cubicweb.server.serverctl import _local_dump, DBDumpCommand
+from cubicweb.server.serverconfig import ServerConfiguration
+
+class ServerCTLTC(testlib.CubicWebTC):
+ def setUp(self):
+ super(ServerCTLTC, self).setUp()
+ self.orig_config_for = ServerConfiguration.config_for
+ ServerConfiguration.config_for = staticmethod(lambda appid: self.config)
+
+ def tearDown(self):
+ ServerConfiguration.config_for = self.orig_config_for
+ super(ServerCTLTC, self).tearDown()
+
+ def test_dump(self):
+ DBDumpCommand(None).run([self.appid])
+ shutil.rmtree(osp.join(self.config.apphome, 'backup'))
+
+
+if __name__ == '__main__':
+ from unittest import main
+ main()
--- a/server/test/unittest_storage.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/test/unittest_storage.py Thu Nov 12 10:52:28 2015 +0100
@@ -17,12 +17,15 @@
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""unit tests for module cubicweb.server.sources.storages"""
+from six import PY2
+
from logilab.common.testlib import unittest_main, tag, Tags
from cubicweb.devtools.testlib import CubicWebTC
from glob import glob
import os
import os.path as osp
+import sys
import shutil
import tempfile
@@ -57,24 +60,26 @@
def setup_database(self):
self.tempdir = tempfile.mkdtemp()
bfs_storage = storages.BytesFileSystemStorage(self.tempdir)
+ self.bfs_storage = bfs_storage
storages.set_attribute_storage(self.repo, 'File', 'data', bfs_storage)
storages.set_attribute_storage(self.repo, 'BFSSTestable', 'opt_attr', bfs_storage)
def tearDown(self):
super(StorageTC, self).tearDown()
storages.unset_attribute_storage(self.repo, 'File', 'data')
+ del self.bfs_storage
shutil.rmtree(self.tempdir)
- def create_file(self, cnx, content='the-data'):
+ def create_file(self, cnx, content=b'the-data'):
return cnx.create_entity('File', data=Binary(content),
data_format=u'text/plain',
data_name=u'foo.pdf')
def fspath(self, cnx, entity):
fspath = cnx.execute('Any fspath(D) WHERE F eid %(f)s, F data D',
- {'f': entity.eid})[0][0]
- return fspath.getvalue()
+ {'f': entity.eid})[0][0].getvalue()
+ return fspath if PY2 else fspath.decode('utf-8')
def test_bfss_wrong_fspath_usage(self):
with self.admin_access.repo_cnx() as cnx:
@@ -109,7 +114,7 @@
# add f1 back to the entity cache with req as _cw
f1 = req.entity_from_eid(f1.eid)
- f1.cw_set(data=Binary('the new data'))
+ f1.cw_set(data=Binary(b'the new data'))
cnx.rollback()
self.assertEqual(open(expected_filepath).read(), 'the-data')
f1.cw_delete()
@@ -132,7 +137,7 @@
with self.admin_access.repo_cnx() as cnx:
cnx.transaction_data['fs_importing'] = True
filepath = osp.abspath(__file__)
- f1 = cnx.create_entity('File', data=Binary(filepath),
+ f1 = cnx.create_entity('File', data=Binary(filepath.encode(sys.getfilesystemencoding())),
data_format=u'text/plain', data_name=u'foo')
self.assertEqual(self.fspath(cnx, f1), filepath)
@@ -185,8 +190,8 @@
self.assertEqual(len(rset), 2)
self.assertEqual(rset[0][0], f1.eid)
self.assertEqual(rset[1][0], f1.eid)
- self.assertEqual(rset[0][1].getvalue(), 'the-data')
- self.assertEqual(rset[1][1].getvalue(), 'the-data')
+ self.assertEqual(rset[0][1].getvalue(), b'the-data')
+ self.assertEqual(rset[1][1].getvalue(), b'the-data')
rset = cnx.execute('Any X,LENGTH(D) WHERE X eid %(x)s, X data D',
{'x': f1.eid})
self.assertEqual(len(rset), 1)
@@ -212,31 +217,31 @@
with self.admin_access.repo_cnx() as cnx:
cnx.transaction_data['fs_importing'] = True
filepath = osp.abspath(__file__)
- f1 = cnx.create_entity('File', data=Binary(filepath),
+ f1 = cnx.create_entity('File', data=Binary(filepath.encode(sys.getfilesystemencoding())),
data_format=u'text/plain', data_name=u'foo')
cw_value = f1.data.getvalue()
- fs_value = file(filepath).read()
+ fs_value = open(filepath, 'rb').read()
if cw_value != fs_value:
self.fail('cw value %r is different from file content' % cw_value)
@tag('update')
def test_bfss_update_with_existing_data(self):
with self.admin_access.repo_cnx() as cnx:
- f1 = cnx.create_entity('File', data=Binary('some data'),
+ f1 = cnx.create_entity('File', data=Binary(b'some data'),
data_format=u'text/plain', data_name=u'foo')
# NOTE: do not use cw_set() which would automatically
# update f1's local dict. We want the pure rql version to work
cnx.execute('SET F data %(d)s WHERE F eid %(f)s',
- {'d': Binary('some other data'), 'f': f1.eid})
- self.assertEqual(f1.data.getvalue(), 'some other data')
+ {'d': Binary(b'some other data'), 'f': f1.eid})
+ self.assertEqual(f1.data.getvalue(), b'some other data')
cnx.commit()
f2 = cnx.execute('Any F WHERE F eid %(f)s, F is File', {'f': f1.eid}).get_entity(0, 0)
- self.assertEqual(f2.data.getvalue(), 'some other data')
+ self.assertEqual(f2.data.getvalue(), b'some other data')
@tag('update', 'extension', 'commit')
def test_bfss_update_with_different_extension_commited(self):
with self.admin_access.repo_cnx() as cnx:
- f1 = cnx.create_entity('File', data=Binary('some data'),
+ f1 = cnx.create_entity('File', data=Binary(b'some data'),
data_format=u'text/plain', data_name=u'foo.txt')
# NOTE: do not use cw_set() which would automatically
# update f1's local dict. We want the pure rql version to work
@@ -246,7 +251,7 @@
self.assertEqual(osp.splitext(old_path)[1], '.txt')
cnx.execute('SET F data %(d)s, F data_name %(dn)s, '
'F data_format %(df)s WHERE F eid %(f)s',
- {'d': Binary('some other data'), 'f': f1.eid,
+ {'d': Binary(b'some other data'), 'f': f1.eid,
'dn': u'bar.jpg', 'df': u'image/jpeg'})
cnx.commit()
# the new file exists with correct extension
@@ -260,7 +265,7 @@
@tag('update', 'extension', 'rollback')
def test_bfss_update_with_different_extension_rolled_back(self):
with self.admin_access.repo_cnx() as cnx:
- f1 = cnx.create_entity('File', data=Binary('some data'),
+ f1 = cnx.create_entity('File', data=Binary(b'some data'),
data_format=u'text/plain', data_name=u'foo.txt')
# NOTE: do not use cw_set() which would automatically
# update f1's local dict. We want the pure rql version to work
@@ -271,7 +276,7 @@
self.assertEqual(osp.splitext(old_path)[1], '.txt')
cnx.execute('SET F data %(d)s, F data_name %(dn)s, '
'F data_format %(df)s WHERE F eid %(f)s',
- {'d': Binary('some other data'),
+ {'d': Binary(b'some other data'),
'f': f1.eid,
'dn': u'bar.jpg',
'df': u'image/jpeg'})
@@ -290,7 +295,7 @@
@tag('update', 'NULL')
def test_bfss_update_to_None(self):
with self.admin_access.repo_cnx() as cnx:
- f = cnx.create_entity('Affaire', opt_attr=Binary('toto'))
+ f = cnx.create_entity('Affaire', opt_attr=Binary(b'toto'))
cnx.commit()
f.cw_set(opt_attr=None)
cnx.commit()
@@ -298,17 +303,17 @@
@tag('fs_importing', 'update')
def test_bfss_update_with_fs_importing(self):
with self.admin_access.repo_cnx() as cnx:
- f1 = cnx.create_entity('File', data=Binary('some data'),
+ f1 = cnx.create_entity('File', data=Binary(b'some data'),
data_format=u'text/plain',
data_name=u'foo')
old_fspath = self.fspath(cnx, f1)
cnx.transaction_data['fs_importing'] = True
new_fspath = osp.join(self.tempdir, 'newfile.txt')
- file(new_fspath, 'w').write('the new data')
+ open(new_fspath, 'w').write('the new data')
cnx.execute('SET F data %(d)s WHERE F eid %(f)s',
- {'d': Binary(new_fspath), 'f': f1.eid})
+ {'d': Binary(new_fspath.encode(sys.getfilesystemencoding())), 'f': f1.eid})
cnx.commit()
- self.assertEqual(f1.data.getvalue(), 'the new data')
+ self.assertEqual(f1.data.getvalue(), b'the new data')
self.assertEqual(self.fspath(cnx, f1), new_fspath)
self.assertFalse(osp.isfile(old_fspath))
--- a/server/test/unittest_undo.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/test/unittest_undo.py Thu Nov 12 10:52:28 2015 +0100
@@ -17,6 +17,8 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
+from six import text_type
+
from cubicweb import ValidationError
from cubicweb.devtools.testlib import CubicWebTC
import cubicweb.server.session
@@ -255,7 +257,7 @@
"%s doesn't exist anymore." % g.eid])
with self.assertRaises(ValidationError) as cm:
cnx.commit()
- cm.exception.translate(unicode)
+ cm.exception.translate(text_type)
self.assertEqual(cm.exception.entity, self.totoeid)
self.assertEqual(cm.exception.errors,
{'in_group-subject': u'at least one relation in_group is '
@@ -461,7 +463,7 @@
# problem occurs in string manipulation for python < 2.6
def test___unicode__method(self):
u = _UndoException(u"voilà ")
- self.assertIsInstance(unicode(u), unicode)
+ self.assertIsInstance(text_type(u), text_type)
if __name__ == '__main__':
--- a/server/test/unittest_utils.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/test/unittest_utils.py Thu Nov 12 10:52:28 2015 +0100
@@ -26,13 +26,13 @@
def test_crypt(self):
for hash in (
utils.crypt_password('xxx'), # default sha512
- 'ab$5UsKFxRKKN.d8iBIFBnQ80', # custom md5
- 'ab4Vlm81ZUHlg', # DES
+ b'ab$5UsKFxRKKN.d8iBIFBnQ80', # custom md5
+ b'ab4Vlm81ZUHlg', # DES
):
self.assertEqual(utils.crypt_password('xxx', hash), hash)
self.assertEqual(utils.crypt_password(u'xxx', hash), hash)
- self.assertEqual(utils.crypt_password(u'xxx', unicode(hash)), hash)
- self.assertEqual(utils.crypt_password('yyy', hash), '')
+ self.assertEqual(utils.crypt_password(u'xxx', hash.decode('ascii')), hash.decode('ascii'))
+ self.assertEqual(utils.crypt_password('yyy', hash), b'')
# accept any password for empty hashes (is it a good idea?)
self.assertEqual(utils.crypt_password('xxx', ''), '')
--- a/server/utils.py Thu Mar 06 15:55:33 2014 +0100
+++ b/server/utils.py Thu Nov 12 10:52:28 2015 +0100
@@ -16,6 +16,7 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""Some utilities for the CubicWeb server."""
+from __future__ import print_function
__docformat__ = "restructuredtext en"
@@ -24,6 +25,9 @@
from threading import Timer, Thread
from getpass import getpass
+from six import PY2, text_type
+from six.moves import input
+
from passlib.utils import handlers as uh, to_hash_str
from passlib.context import CryptContext
@@ -60,7 +64,7 @@
"""return the encrypted password using the given salt or a generated one
"""
if salt is None:
- return _CRYPTO_CTX.encrypt(passwd)
+ return _CRYPTO_CTX.encrypt(passwd).encode('ascii')
# empty hash, accept any password for backwards compat
if salt == '':
return salt
@@ -70,7 +74,7 @@
except ValueError: # e.g. couldn't identify hash
pass
# wrong password
- return ''
+ return b''
def eschema_eid(cnx, eschema):
@@ -81,7 +85,7 @@
if eschema.eid is None:
eschema.eid = cnx.execute(
'Any X WHERE X is CWEType, X name %(name)s',
- {'name': unicode(eschema)})[0][0]
+ {'name': text_type(eschema)})[0][0]
return eschema.eid
@@ -92,17 +96,18 @@
passwdmsg='password'):
if not user:
if msg:
- print msg
+ print(msg)
while not user:
- user = raw_input('login: ')
- user = unicode(user, sys.stdin.encoding)
+ user = input('login: ')
+ if PY2:
+ user = unicode(user, sys.stdin.encoding)
passwd = getpass('%s: ' % passwdmsg)
if confirm:
while True:
passwd2 = getpass('confirm password: ')
if passwd == passwd2:
break
- print 'password doesn\'t match'
+ print('password doesn\'t match')
passwd = getpass('password: ')
# XXX decode password using stdin encoding then encode it using appl'encoding
return user, passwd
--- a/setup.py Thu Mar 06 15:55:33 2014 +0100
+++ b/setup.py Thu Nov 12 10:52:28 2015 +0100
@@ -42,7 +42,7 @@
from __pkginfo__ import modname, version, license, description, web, \
author, author_email
-long_description = file('README').read()
+long_description = open('README').read()
# import optional features
import __pkginfo__
@@ -51,7 +51,7 @@
for entry in ("__depends__",): # "__recommends__"):
requires.update(getattr(__pkginfo__, entry, {}))
install_requires = [("%s %s" % (d, v and v or "")).strip()
- for d, v in requires.iteritems()]
+ for d, v in requires.items()]
else:
install_requires = []
--- a/skeleton/DISTNAME.spec.tmpl Thu Mar 06 15:55:33 2014 +0100
+++ b/skeleton/DISTNAME.spec.tmpl Thu Nov 12 10:52:28 2015 +0100
@@ -21,6 +21,7 @@
BuildRequires: %%{python} %%{python}-setuptools
Requires: cubicweb >= %(version)s
+Requires: %{python}-six >= 1.4.0
%%description
%(longdesc)s
--- a/skeleton/debian/control.tmpl Thu Mar 06 15:55:33 2014 +0100
+++ b/skeleton/debian/control.tmpl Thu Nov 12 10:52:28 2015 +0100
@@ -12,6 +12,7 @@
Architecture: all
Depends:
cubicweb-common (>= %(version)s),
+ python-six (>= 1.4.0),
${python:Depends},
${misc:Depends},
Description: %(shortdesc)s
--- a/skeleton/debian/rules Thu Mar 06 15:55:33 2014 +0100
+++ b/skeleton/debian/rules Thu Nov 12 10:52:28 2015 +0100
@@ -5,10 +5,5 @@
%:
dh $@ --with python2
-override_dh_auto_install:
- dh_auto_install
- # remove generated .egg-info file
- rm -rf debian/*/usr/lib/python*
-
override_dh_python2:
dh_python2 -i /usr/share/cubicweb
--- a/skeleton/setup.py Thu Mar 06 15:55:33 2014 +0100
+++ b/skeleton/setup.py Thu Nov 12 10:52:28 2015 +0100
@@ -44,7 +44,7 @@
author, author_email, classifiers
if exists('README'):
- long_description = file('README').read()
+ long_description = open('README').read()
else:
long_description = ''
@@ -55,7 +55,7 @@
for entry in ("__depends__",): # "__recommends__"):
requires.update(getattr(__pkginfo__, entry, {}))
install_requires = [("%s %s" % (d, v and v or "")).strip()
- for d, v in requires.iteritems()]
+ for d, v in requires.items()]
else:
install_requires = []
--- a/sobjects/__init__.py Thu Mar 06 15:55:33 2014 +0100
+++ b/sobjects/__init__.py Thu Nov 12 10:52:28 2015 +0100
@@ -20,11 +20,11 @@
import os.path as osp
def registration_callback(vreg):
- vreg.register_all(globals().itervalues(), __name__)
+ vreg.register_all(globals().values(), __name__)
global URL_MAPPING
URL_MAPPING = {}
if vreg.config.apphome:
url_mapping_file = osp.join(vreg.config.apphome, 'urlmapping.py')
if osp.exists(url_mapping_file):
- URL_MAPPING = eval(file(url_mapping_file).read())
+ URL_MAPPING = eval(open(url_mapping_file).read())
vreg.info('using url mapping %s from %s', URL_MAPPING, url_mapping_file)
--- a/sobjects/cwxmlparser.py Thu Mar 06 15:55:33 2014 +0100
+++ b/sobjects/cwxmlparser.py Thu Nov 12 10:52:28 2015 +0100
@@ -32,9 +32,11 @@
"""
from datetime import datetime, time
-import urlparse
import urllib
+from six import text_type
+from six.moves.urllib.parse import urlparse, urlunparse, parse_qs, urlencode
+
from logilab.common.date import todate, totime
from logilab.common.textutils import splitstrip, text_to_dict
from logilab.common.decorators import classproperty
@@ -50,7 +52,7 @@
# XXX see cubicweb.cwvreg.YAMS_TO_PY
# XXX see cubicweb.web.views.xmlrss.SERIALIZERS
DEFAULT_CONVERTERS = BASE_CONVERTERS.copy()
-DEFAULT_CONVERTERS['String'] = unicode
+DEFAULT_CONVERTERS['String'] = text_type
DEFAULT_CONVERTERS['Password'] = lambda x: x.encode('utf8')
def convert_date(ustr):
return todate(datetime.strptime(ustr, '%Y-%m-%d'))
@@ -124,7 +126,7 @@
def list_actions(self):
reg = self._cw.vreg['components']
- return sorted(clss[0].action for rid, clss in reg.iteritems()
+ return sorted(clss[0].action for rid, clss in reg.items()
if rid.startswith('cw.entityxml.action.'))
# mapping handling #########################################################
@@ -204,7 +206,7 @@
* `rels` is for relations and structured as
{role: {relation: [(related item, related rels)...]}
"""
- entity = self.extid2entity(str(item['cwuri']), item['cwtype'],
+ entity = self.extid2entity(item['cwuri'].encode('ascii'), item['cwtype'],
cwsource=item['cwsource'], item=item,
raise_on_error=raise_on_error)
if entity is None:
@@ -220,7 +222,7 @@
def process_relations(self, entity, rels):
etype = entity.cw_etype
- for (rtype, role, action), rules in self.source.mapping.get(etype, {}).iteritems():
+ for (rtype, role, action), rules in self.source.mapping.get(etype, {}).items():
try:
related_items = rels[role][rtype]
except KeyError:
@@ -242,14 +244,14 @@
def normalize_url(self, url):
"""overridden to add vid=xml if vid is not set in the qs"""
url = super(CWEntityXMLParser, self).normalize_url(url)
- purl = urlparse.urlparse(url)
+ purl = urlparse(url)
if purl.scheme in ('http', 'https'):
- params = urlparse.parse_qs(purl.query)
+ params = parse_qs(purl.query)
if 'vid' not in params:
params['vid'] = ['xml']
purl = list(purl)
- purl[4] = urllib.urlencode(params, doseq=True)
- return urlparse.urlunparse(purl)
+ purl[4] = urlencode(params, doseq=True)
+ return urlunparse(purl)
return url
def complete_url(self, url, etype=None, known_relations=None):
@@ -263,8 +265,8 @@
If `known_relations` is given, it should be a dictionary of already
known relations, so they don't get queried again.
"""
- purl = urlparse.urlparse(url)
- params = urlparse.parse_qs(purl.query)
+ purl = urlparse(url)
+ params = parse_qs(purl.query)
if etype is None:
etype = purl.path.split('/')[-1]
try:
@@ -277,8 +279,8 @@
continue
relations.add('%s-%s' % (rtype, role))
purl = list(purl)
- purl[4] = urllib.urlencode(params, doseq=True)
- return urlparse.urlunparse(purl)
+ purl[4] = urlencode(params, doseq=True)
+ return urlunparse(purl)
def complete_item(self, item, rels):
try:
@@ -314,7 +316,7 @@
"""
node = self.node
item = dict(node.attrib.items())
- item['cwtype'] = unicode(node.tag)
+ item['cwtype'] = text_type(node.tag)
item.setdefault('cwsource', None)
try:
item['eid'] = int(item['eid'])
@@ -331,7 +333,7 @@
related += self.parser.parse_etree(child)
elif child.text:
# attribute
- item[child.tag] = unicode(child.text)
+ item[child.tag] = text_type(child.text)
else:
# None attribute (empty tag)
item[child.tag] = None
--- a/sobjects/ldapparser.py Thu Mar 06 15:55:33 2014 +0100
+++ b/sobjects/ldapparser.py Thu Nov 12 10:52:28 2015 +0100
@@ -20,6 +20,8 @@
unlike ldapuser source, this source is copy based and will import ldap content
(beside passwords for authentication) into the system source.
"""
+from six.moves import map, filter
+
from logilab.common.decorators import cached, cachedproperty
from logilab.common.shellutils import generate_password
@@ -48,8 +50,8 @@
def user_source_entities_by_extid(self):
source = self.source
if source.user_base_dn.strip():
- attrs = map(str, source.user_attrs.keys())
- return dict((userdict['dn'], userdict)
+ attrs = list(map(str, source.user_attrs.keys()))
+ return dict((userdict['dn'].encode('ascii'), userdict)
for userdict in source._search(self._cw,
source.user_base_dn,
source.user_base_scope,
@@ -61,7 +63,7 @@
def group_source_entities_by_extid(self):
source = self.source
if source.group_base_dn.strip():
- attrs = map(str, ['modifyTimestamp'] + source.group_attrs.keys())
+ attrs = list(map(str, ['modifyTimestamp'] + list(source.group_attrs.keys())))
return dict((groupdict['dn'], groupdict)
for groupdict in source._search(self._cw,
source.group_base_dn,
@@ -87,10 +89,10 @@
def process(self, url, raise_on_error=False):
"""IDataFeedParser main entry point"""
self.debug('processing ldapfeed source %s %s', self.source, self.searchfilterstr)
- for userdict in self.user_source_entities_by_extid.itervalues():
+ for userdict in self.user_source_entities_by_extid.values():
self._process('CWUser', userdict)
self.debug('processing ldapfeed source %s %s', self.source, self.searchgroupfilterstr)
- for groupdict in self.group_source_entities_by_extid.itervalues():
+ for groupdict in self.group_source_entities_by_extid.values():
self._process('CWGroup', groupdict, raise_on_error=raise_on_error)
def handle_deletion(self, config, cnx, myuris):
@@ -99,11 +101,11 @@
return
if myuris:
byetype = {}
- for extid, (eid, etype) in myuris.iteritems():
+ for extid, (eid, etype) in myuris.items():
if self.is_deleted(extid, etype, eid):
byetype.setdefault(etype, []).append(str(eid))
- for etype, eids in byetype.iteritems():
+ for etype, eids in byetype.items():
if etype != 'CWUser':
continue
self.info('deactivate %s %s entities', len(eids), etype)
@@ -123,7 +125,7 @@
self.info('user %s reactivated', entity.login)
mdate = attrs.get('modification_date')
if not mdate or mdate > entity.modification_date:
- attrs = dict( (k, v) for k, v in attrs.iteritems()
+ attrs = dict( (k, v) for k, v in attrs.items()
if v != getattr(entity, k))
if attrs:
entity.cw_set(**attrs)
@@ -135,9 +137,9 @@
if tdict is None:
tdict = {}
if etype == 'CWUser':
- items = self.source.user_attrs.iteritems()
+ items = self.source.user_attrs.items()
elif etype == 'CWGroup':
- items = self.source.group_attrs.iteritems()
+ items = self.source.group_attrs.items()
for sattr, tattr in items:
if tattr not in self.non_attribute_keys:
try:
@@ -174,8 +176,8 @@
# all CWUsers must be treated before CWGroups to have the in_group relation
# set correctly in _associate_ldapusers
elif etype == 'CWUser':
- groups = filter(None, [self._get_group(name)
- for name in self.source.user_default_groups])
+ groups = list(filter(None, [self._get_group(name)
+ for name in self.source.user_default_groups]))
if groups:
entity.cw_set(in_group=groups)
self._process_email(entity, sourceparams)
@@ -184,7 +186,7 @@
def is_deleted(self, extidplus, etype, eid):
try:
- extid, _ = extidplus.rsplit('@@', 1)
+ extid, _ = extidplus.rsplit(b'@@', 1)
except ValueError:
# for some reason extids here tend to come in both forms, e.g:
# dn, dn@@Babar
@@ -204,14 +206,14 @@
{'addr': emailaddr})
if not rset:
# not found, create it. first forge an external id
- emailextid = userdict['dn'] + '@@' + emailaddr.encode('utf-8')
+ emailextid = userdict['dn'] + '@@' + emailaddr
email = self.extid2entity(emailextid, 'EmailAddress',
address=emailaddr)
entity.cw_set(use_email=email)
elif self.sourceuris:
# pop from sourceuris anyway, else email may be removed by the
# source once import is finished
- uri = userdict['dn'] + '@@' + emailaddr.encode('utf-8')
+ uri = userdict['dn'] + '@@' + emailaddr
self.sourceuris.pop(uri, None)
# XXX else check use_email relation?
--- a/sobjects/notification.py Thu Mar 06 15:55:33 2014 +0100
+++ b/sobjects/notification.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,10 +18,12 @@
"""some views to handle notification on data changes"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from itertools import repeat
+from six import text_type
+
from logilab.common.textutils import normalize_text
from logilab.common.deprecation import class_renamed, class_moved, deprecated
from logilab.common.registry import yes
@@ -181,8 +183,8 @@
def context(self, **kwargs):
entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0)
- for key, val in kwargs.iteritems():
- if val and isinstance(val, unicode) and val.strip():
+ for key, val in kwargs.items():
+ if val and isinstance(val, text_type) and val.strip():
kwargs[key] = self._cw._(val)
kwargs.update({'user': self.user_data['login'],
'eid': entity.eid,
@@ -255,7 +257,7 @@
def format_value(value):
- if isinstance(value, unicode):
+ if isinstance(value, text_type):
return u'"%s"' % value
return value
--- a/sobjects/services.py Thu Mar 06 15:55:33 2014 +0100
+++ b/sobjects/services.py Thu Nov 12 10:52:28 2015 +0100
@@ -19,6 +19,8 @@
import threading
+from six import text_type
+
from yams.schema import role_name
from cubicweb import ValidationError
from cubicweb.server import Service
@@ -94,9 +96,9 @@
results = {}
counters, ocounters, garbage = gc_info(lookupclasses,
viewreferrersclasses=())
- values = sorted(counters.iteritems(), key=lambda x: x[1], reverse=True)
+ values = sorted(counters.items(), key=lambda x: x[1], reverse=True)
results['lookupclasses'] = values
- values = sorted(ocounters.iteritems(), key=lambda x: x[1], reverse=True)[:nmax]
+ values = sorted(ocounters.items(), key=lambda x: x[1], reverse=True)[:nmax]
results['referenced'] = values
results['unreachable'] = garbage
return results
@@ -129,7 +131,7 @@
qname = role_name('login', 'subject')
raise ValidationError(None, {qname: errmsg % login})
- if isinstance(password, unicode):
+ if isinstance(password, text_type):
# password should *always* be utf8 encoded
password = password.encode('UTF8')
cwuserkwargs['login'] = login
--- a/sobjects/supervising.py Thu Mar 06 15:55:33 2014 +0100
+++ b/sobjects/supervising.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,7 +18,7 @@
"""some hooks and views to handle supervising of any data changes"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from cubicweb import UnknownEid
from cubicweb.predicates import none_rset
@@ -128,13 +128,15 @@
# XXX print changes
self.w(u' %s' % changedescr.entity.absolute_url())
- def delete_entity(self, (eid, etype, title)):
+ def delete_entity(self, args):
+ eid, etype, title = args
msg = self._cw._('deleted %(etype)s #%(eid)s (%(title)s)')
etype = display_name(self._cw, etype).lower()
self.w(msg % locals())
- def change_state(self, (entity, fromstate, tostate)):
+ def change_state(self, args):
_ = self._cw._
+ entity, fromstate, tostate = args
msg = _('changed state of %(etype)s #%(eid)s (%(title)s)')
self.w(u'%s\n' % (msg % self._entity_context(entity)))
self.w(_(' from state %(fromstate)s to state %(tostate)s\n' %
--- a/sobjects/test/unittest_cwxmlparser.py Thu Mar 06 15:55:33 2014 +0100
+++ b/sobjects/test/unittest_cwxmlparser.py Thu Nov 12 10:52:28 2015 +0100
@@ -17,7 +17,8 @@
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
from datetime import datetime
-from urlparse import urlsplit, parse_qsl
+
+from six.moves.urllib.parse import urlsplit, parse_qsl
from cubicweb.devtools.testlib import CubicWebTC
from cubicweb.sobjects.cwxmlparser import CWEntityXMLParser
@@ -214,8 +215,8 @@
with self.admin_access.web_request() as req:
user = req.execute('CWUser X WHERE X login "sthenault"').get_entity(0, 0)
- self.assertEqual(user.creation_date, datetime(2010, 01, 22, 10, 27, 59))
- self.assertEqual(user.modification_date, datetime(2011, 01, 25, 14, 14, 06))
+ self.assertEqual(user.creation_date, datetime(2010, 1, 22, 10, 27, 59))
+ self.assertEqual(user.modification_date, datetime(2011, 1, 25, 14, 14, 6))
self.assertEqual(user.cwuri, 'http://pouet.org/5')
self.assertEqual(user.cw_source[0].name, 'myfeed')
self.assertEqual(user.absolute_url(), 'http://pouet.org/5')
@@ -299,8 +300,8 @@
with self.repo.internal_cnx() as cnx:
stats = dfsource.pull_data(cnx, force=True, raise_on_error=True)
user = cnx.execute('CWUser X WHERE X login "sthenault"').get_entity(0, 0)
- self.assertEqual(user.creation_date, datetime(2010, 01, 22, 10, 27, 59))
- self.assertEqual(user.modification_date, datetime(2011, 01, 25, 14, 14, 06))
+ self.assertEqual(user.creation_date, datetime(2010, 1, 22, 10, 27, 59))
+ self.assertEqual(user.modification_date, datetime(2011, 1, 25, 14, 14, 6))
self.assertEqual(user.cwuri, 'http://pouet.org/5')
self.assertEqual(user.cw_source[0].name, 'myfeed')
--- a/sobjects/test/unittest_supervising.py Thu Mar 06 15:55:33 2014 +0100
+++ b/sobjects/test/unittest_supervising.py Thu Nov 12 10:52:28 2015 +0100
@@ -77,7 +77,7 @@
# check prepared email
op._prepare_email()
self.assertEqual(len(op.to_send), 1)
- self.assert_(op.to_send[0][0])
+ self.assertTrue(op.to_send[0][0])
self.assertEqual(op.to_send[0][1], ['test@logilab.fr'])
cnx.commit()
# some other changes #######
--- a/spa2rql.py Thu Mar 06 15:55:33 2014 +0100
+++ b/spa2rql.py Thu Nov 12 10:52:28 2015 +0100
@@ -146,9 +146,9 @@
def finalize(self):
"""return corresponding rql query (string) / args (dict)"""
- for varname, ptypes in self.possible_types.iteritems():
+ for varname, ptypes in self.possible_types.items():
if len(ptypes) == 1:
- self.restrictions.append('%s is %s' % (varname, iter(ptypes).next()))
+ self.restrictions.append('%s is %s' % (varname, next(iter(ptypes))))
unions = []
for releq, subjvar, obj in self.union_params:
thisunions = []
--- a/test/data/schema.py Thu Mar 06 15:55:33 2014 +0100
+++ b/test/data/schema.py Thu Nov 12 10:52:28 2015 +0100
@@ -23,7 +23,7 @@
RQLConstraint, RQLVocabularyConstraint)
-_ = unicode
+from cubicweb import _
class Personne(EntityType):
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/test/unittest_binary.py Thu Nov 12 10:52:28 2015 +0100
@@ -0,0 +1,54 @@
+from six import PY2
+
+from unittest import TestCase
+from tempfile import NamedTemporaryFile
+import os.path as osp
+
+from logilab.common.shellutils import tempdir
+from cubicweb import Binary
+
+
+class BinaryTC(TestCase):
+ def test_init(self):
+ Binary()
+ Binary(b'toto')
+ Binary(bytearray(b'toto'))
+ if PY2:
+ Binary(buffer('toto'))
+ else:
+ Binary(memoryview(b'toto'))
+ with self.assertRaises((AssertionError, TypeError)):
+ # TypeError is raised by BytesIO if python runs with -O
+ Binary(u'toto')
+
+ def test_write(self):
+ b = Binary()
+ b.write(b'toto')
+ b.write(bytearray(b'toto'))
+ if PY2:
+ b.write(buffer('toto'))
+ else:
+ b.write(memoryview(b'toto'))
+ with self.assertRaises((AssertionError, TypeError)):
+ # TypeError is raised by BytesIO if python runs with -O
+ b.write(u'toto')
+
+ def test_gzpickle_roundtrip(self):
+ old = (u'foo', b'bar', 42, {})
+ new = Binary.zpickle(old).unzpickle()
+ self.assertEqual(old, new)
+ self.assertIsNot(old, new)
+
+ def test_from_file_to_file(self):
+ with tempdir() as dpath:
+ fpath = osp.join(dpath, 'binary.bin')
+ with open(fpath, 'wb') as fobj:
+ Binary(b'binaryblob').to_file(fobj)
+
+ bobj = Binary.from_file(fpath)
+ self.assertEqual(bobj.getvalue(), b'binaryblob')
+
+
+if __name__ == '__main__':
+ from unittest import main
+ main()
--- a/test/unittest_cwctl.py Thu Mar 06 15:55:33 2014 +0100
+++ b/test/unittest_cwctl.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,7 +18,10 @@
import sys
import os
from os.path import join
-from cStringIO import StringIO
+from io import StringIO, BytesIO
+
+from six import PY2
+
from logilab.common.testlib import TestCase, unittest_main
from cubicweb.cwconfig import CubicWebConfiguration
@@ -30,7 +33,7 @@
class CubicWebCtlTC(TestCase):
def setUp(self):
- self.stream = StringIO()
+ self.stream = BytesIO() if PY2 else StringIO()
sys.stdout = self.stream
def tearDown(self):
sys.stdout = sys.__stdout__
@@ -57,7 +60,7 @@
funcname=None)
for script, args in scripts.items():
scriptname = os.path.join(self.datadir, 'scripts', script)
- self.assert_(os.path.exists(scriptname))
+ self.assertTrue(os.path.exists(scriptname))
mih.cmd_process_script(scriptname, None, scriptargs=args)
--- a/test/unittest_entity.py Thu Mar 06 15:55:33 2014 +0100
+++ b/test/unittest_entity.py Thu Nov 12 10:52:28 2015 +0100
@@ -20,6 +20,8 @@
from datetime import datetime
+from six import text_type
+
from logilab.common import tempattr
from logilab.common.decorators import clear_cache
@@ -644,7 +646,7 @@
def test_printable_value_bytes(self):
with self.admin_access.web_request() as req:
- e = req.create_entity('FakeFile', data=Binary('lambda x: 1'), data_format=u'text/x-python',
+ e = req.create_entity('FakeFile', data=Binary(b'lambda x: 1'), data_format=u'text/x-python',
data_encoding=u'ascii', data_name=u'toto.py')
from cubicweb import mttransforms
if mttransforms.HAS_PYGMENTS_TRANSFORMS:
@@ -663,8 +665,10 @@
<span style="color: #C00000;">lambda</span> <span style="color: #000000;">x</span><span style="color: #0000C0;">:</span> <span style="color: #0080C0;">1</span>
</pre>''')
- e = req.create_entity('FakeFile', data=Binary('*héhéhé*'), data_format=u'text/rest',
- data_encoding=u'utf-8', data_name=u'toto.txt')
+ e = req.create_entity('FakeFile',
+ data=Binary(u'*héhéhé*'.encode('utf-8')),
+ data_format=u'text/rest',
+ data_encoding=u'utf-8', data_name=u'toto.txt')
self.assertEqual(e.printable_value('data'),
u'<p><em>héhéhé</em></p>')
@@ -717,7 +721,7 @@
e = self.vreg['etypes'].etype_class('FakeFile')(req)
e.cw_attr_cache['description'] = 'du <em>html</em>'
e.cw_attr_cache['description_format'] = 'text/html'
- e.cw_attr_cache['data'] = Binary('some <em>data</em>')
+ e.cw_attr_cache['data'] = Binary(b'some <em>data</em>')
e.cw_attr_cache['data_name'] = 'an html file'
e.cw_attr_cache['data_format'] = 'text/html'
e.cw_attr_cache['data_encoding'] = 'ascii'
@@ -769,11 +773,11 @@
# ambiguity test
person2 = req.create_entity('Personne', prenom=u'remi', nom=u'doe')
person.cw_clear_all_caches()
- self.assertEqual(person.rest_path(), unicode(person.eid))
- self.assertEqual(person2.rest_path(), unicode(person2.eid))
+ self.assertEqual(person.rest_path(), text_type(person.eid))
+ self.assertEqual(person2.rest_path(), text_type(person2.eid))
# unique attr with None value (nom in this case)
friend = req.create_entity('Ami', prenom=u'bob')
- self.assertEqual(friend.rest_path(), unicode(friend.eid))
+ self.assertEqual(friend.rest_path(), text_type(friend.eid))
# 'ref' below is created without the unique but not required
# attribute, make sur that the unique _and_ required 'ean' is used
# as the rest attribute
--- a/test/unittest_migration.py Thu Mar 06 15:55:33 2014 +0100
+++ b/test/unittest_migration.py Thu Nov 12 10:52:28 2015 +0100
@@ -1,4 +1,4 @@
-# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -22,7 +22,7 @@
from cubicweb.devtools import TestServerConfiguration
from cubicweb.cwconfig import CubicWebConfiguration
-from cubicweb.migration import MigrationHelper, filter_scripts
+from cubicweb.migration import MigrationHelper, filter_scripts, version_strictly_lower
from cubicweb.server.migractions import ServerMigrationHelper
@@ -76,8 +76,6 @@
def test_filter_scripts_for_mode(self):
config = CubicWebConfiguration('data')
config.verbosity = 0
- self.assertNotIsInstance(config.migration_handler(), ServerMigrationHelper)
- self.assertIsInstance(config.migration_handler(), MigrationHelper)
config = self.config
config.__class__.name = 'repository'
self.assertListEqual(filter_scripts(config, TMIGRDIR, (0,0,4), (0,1,0)),
@@ -91,6 +89,10 @@
((0, 1 ,0), TMIGRDIR+'0.1.0_repository.py')])
config.__class__.name = 'repository'
+ def test_version_strictly_lower(self):
+ self.assertTrue(version_strictly_lower(None, '1.0.0'))
+ self.assertFalse(version_strictly_lower('1.0.0', None))
+
from cubicweb.devtools import ApptestConfiguration, get_test_db_handler
--- a/test/unittest_predicates.py Thu Mar 06 15:55:33 2014 +0100
+++ b/test/unittest_predicates.py Thu Nov 12 10:52:28 2015 +0100
@@ -26,8 +26,9 @@
from cubicweb import Binary
from cubicweb.devtools.testlib import CubicWebTC
from cubicweb.predicates import (is_instance, adaptable, match_kwargs, match_user_groups,
- multi_lines_rset, score_entity, is_in_state,
- rql_condition, relation_possible, match_form_params)
+ multi_lines_rset, score_entity, is_in_state,
+ rql_condition, relation_possible, match_form_params,
+ paginated_rset)
from cubicweb.selectors import on_transition # XXX on_transition is deprecated
from cubicweb.view import EntityAdapter
from cubicweb.web import action
@@ -37,7 +38,7 @@
class ImplementsTC(CubicWebTC):
def test_etype_priority(self):
with self.admin_access.web_request() as req:
- f = req.create_entity('FakeFile', data_name=u'hop.txt', data=Binary('hop'),
+ f = req.create_entity('FakeFile', data_name=u'hop.txt', data=Binary(b'hop'),
data_format=u'text/plain')
rset = f.as_rset()
anyscore = is_instance('Any')(f.__class__, req, rset=rset)
@@ -487,6 +488,33 @@
self.assertEqual(str(cm.exception),
"match_form_params() positional arguments must be strings")
+class PaginatedTC(CubicWebTC):
+ """tests for paginated_rset predicate"""
+
+ def setup_database(self):
+ with self.admin_access.repo_cnx() as cnx:
+ for i in xrange(30):
+ cnx.execute('INSERT CWGroup G: G name "group{}"'.format(i))
+ cnx.commit()
+
+ def test_paginated_rset(self):
+ default_nb_pages = 1
+ web_request = self.admin_access.web_request
+ with web_request() as req:
+ rset = req.execute('Any G WHERE G is CWGroup')
+ self.assertEqual(len(rset), 34)
+ with web_request(vid='list', page_size='10') as req:
+ self.assertEqual(paginated_rset()(None, req, rset), default_nb_pages)
+ with web_request(vid='list', page_size='20') as req:
+ self.assertEqual(paginated_rset()(None, req, rset), default_nb_pages)
+ with web_request(vid='list', page_size='50') as req:
+ self.assertEqual(paginated_rset()(None, req, rset), 0)
+ with web_request(vid='list', page_size='10/') as req:
+ self.assertEqual(paginated_rset()(None, req, rset), 0)
+ with web_request(vid='list', page_size='.1') as req:
+ self.assertEqual(paginated_rset()(None, req, rset), 0)
+ with web_request(vid='list', page_size='not_an_int') as req:
+ self.assertEqual(paginated_rset()(None, req, rset), 0)
if __name__ == '__main__':
unittest_main()
--- a/test/unittest_rqlrewrite.py Thu Mar 06 15:55:33 2014 +0100
+++ b/test/unittest_rqlrewrite.py Thu Nov 12 10:52:28 2015 +0100
@@ -16,6 +16,8 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
+from six import string_types
+
from logilab.common.testlib import unittest_main, TestCase
from logilab.common.testlib import mock_object
from yams import BadSchemaDefinition
@@ -67,9 +69,9 @@
rewriter = _prepare_rewriter(rqlrewrite.RQLRewriter, kwargs)
snippets = []
for v, exprs in sorted(snippets_map.items()):
- rqlexprs = [isinstance(snippet, basestring)
- and mock_object(snippet_rqlst=parse('Any X WHERE '+snippet).children[0],
- expression='Any X WHERE '+snippet)
+ rqlexprs = [isinstance(snippet, string_types)
+ and mock_object(snippet_rqlst=parse(u'Any X WHERE '+snippet).children[0],
+ expression=u'Any X WHERE '+snippet)
or snippet
for snippet in exprs]
snippets.append((dict([v]), rqlexprs))
@@ -90,7 +92,7 @@
selects.append(stmt)
assert node in selects, (node, selects)
for stmt in selects:
- for var in stmt.defined_vars.itervalues():
+ for var in stmt.defined_vars.values():
assert var.stinfo['references']
vrefmap = vrefmaps[stmt]
assert not (var.stinfo['references'] ^ vrefmap[var.name]), (node.as_string(), var, var.stinfo['references'], vrefmap[var.name])
@@ -108,90 +110,90 @@
def test_base_var(self):
constraint = ('X in_state S, U in_group G, P require_state S,'
'P name "read", P require_group G')
- rqlst = parse('Card C')
+ rqlst = parse(u'Card C')
rewrite(rqlst, {('C', 'X'): (constraint,)}, {})
self.assertEqual(rqlst.as_string(),
- u"Any C WHERE C is Card, B eid %(D)s, "
- "EXISTS(C in_state A, B in_group E, F require_state A, "
- "F name 'read', F require_group E, A is State, E is CWGroup, F is CWPermission)")
+ u'Any C WHERE C is Card, B eid %(D)s, '
+ 'EXISTS(C in_state A, B in_group E, F require_state A, '
+ 'F name "read", F require_group E, A is State, E is CWGroup, F is CWPermission)')
def test_multiple_var(self):
card_constraint = ('X in_state S, U in_group G, P require_state S,'
'P name "read", P require_group G')
affaire_constraints = ('X ref LIKE "PUBLIC%"', 'U in_group G, G name "public"')
kwargs = {'u':2}
- rqlst = parse('Any S WHERE S documented_by C, C eid %(u)s')
+ rqlst = parse(u'Any S WHERE S documented_by C, C eid %(u)s')
rewrite(rqlst, {('C', 'X'): (card_constraint,), ('S', 'X'): affaire_constraints},
kwargs)
self.assertMultiLineEqual(
rqlst.as_string(),
- "Any S WHERE S documented_by C, C eid %(u)s, B eid %(D)s, "
- "EXISTS(C in_state A, B in_group E, F require_state A, "
- "F name 'read', F require_group E, A is State, E is CWGroup, F is CWPermission), "
- "(EXISTS(S ref LIKE 'PUBLIC%')) OR (EXISTS(B in_group G, G name 'public', G is CWGroup)), "
- "S is Affaire")
+ u'Any S WHERE S documented_by C, C eid %(u)s, B eid %(D)s, '
+ 'EXISTS(C in_state A, B in_group E, F require_state A, '
+ 'F name "read", F require_group E, A is State, E is CWGroup, F is CWPermission), '
+ '(EXISTS(S ref LIKE "PUBLIC%")) OR (EXISTS(B in_group G, G name "public", G is CWGroup)), '
+ 'S is Affaire')
self.assertIn('D', kwargs)
def test_or(self):
constraint = '(X identity U) OR (X in_state ST, CL identity U, CL in_state ST, ST name "subscribed")'
- rqlst = parse('Any S WHERE S owned_by C, C eid %(u)s, S is in (CWUser, CWGroup)')
+ rqlst = parse(u'Any S WHERE S owned_by C, C eid %(u)s, S is in (CWUser, CWGroup)')
rewrite(rqlst, {('C', 'X'): (constraint,)}, {'u':1})
self.assertEqual(rqlst.as_string(),
- "Any S WHERE S owned_by C, C eid %(u)s, S is IN(CWUser, CWGroup), A eid %(B)s, "
- "EXISTS((C identity A) OR (C in_state D, E identity A, "
- "E in_state D, D name 'subscribed'), D is State, E is CWUser)")
+ 'Any S WHERE S owned_by C, C eid %(u)s, S is IN(CWUser, CWGroup), A eid %(B)s, '
+ 'EXISTS((C identity A) OR (C in_state D, E identity A, '
+ 'E in_state D, D name "subscribed"), D is State, E is CWUser)')
def test_simplified_rqlst(self):
constraint = ('X in_state S, U in_group G, P require_state S,'
'P name "read", P require_group G')
- rqlst = parse('Any 2') # this is the simplified rql st for Any X WHERE X eid 12
+ rqlst = parse(u'Any 2') # this is the simplified rql st for Any X WHERE X eid 12
rewrite(rqlst, {('2', 'X'): (constraint,)}, {})
self.assertEqual(rqlst.as_string(),
- u"Any 2 WHERE B eid %(C)s, "
- "EXISTS(2 in_state A, B in_group D, E require_state A, "
- "E name 'read', E require_group D, A is State, D is CWGroup, E is CWPermission)")
+ u'Any 2 WHERE B eid %(C)s, '
+ 'EXISTS(2 in_state A, B in_group D, E require_state A, '
+ 'E name "read", E require_group D, A is State, D is CWGroup, E is CWPermission)')
def test_optional_var_1(self):
constraint = ('X in_state S, U in_group G, P require_state S,'
'P name "read", P require_group G')
- rqlst = parse('Any A,C WHERE A documented_by C?')
+ rqlst = parse(u'Any A,C WHERE A documented_by C?')
rewrite(rqlst, {('C', 'X'): (constraint,)}, {})
self.assertEqual(rqlst.as_string(),
- "Any A,C WHERE A documented_by C?, A is Affaire "
- "WITH C BEING "
- "(Any C WHERE EXISTS(C in_state B, D in_group F, G require_state B, G name 'read', "
- "G require_group F), D eid %(A)s, C is Card)")
+ u'Any A,C WHERE A documented_by C?, A is Affaire '
+ 'WITH C BEING '
+ '(Any C WHERE EXISTS(C in_state B, D in_group F, G require_state B, G name "read", '
+ 'G require_group F), D eid %(A)s, C is Card)')
def test_optional_var_2(self):
constraint = ('X in_state S, U in_group G, P require_state S,'
'P name "read", P require_group G')
- rqlst = parse('Any A,C,T WHERE A documented_by C?, C title T')
+ rqlst = parse(u'Any A,C,T WHERE A documented_by C?, C title T')
rewrite(rqlst, {('C', 'X'): (constraint,)}, {})
self.assertEqual(rqlst.as_string(),
- "Any A,C,T WHERE A documented_by C?, A is Affaire "
- "WITH C,T BEING "
- "(Any C,T WHERE C title T, EXISTS(C in_state B, D in_group F, "
- "G require_state B, G name 'read', G require_group F), "
- "D eid %(A)s, C is Card)")
+ u'Any A,C,T WHERE A documented_by C?, A is Affaire '
+ 'WITH C,T BEING '
+ '(Any C,T WHERE C title T, EXISTS(C in_state B, D in_group F, '
+ 'G require_state B, G name "read", G require_group F), '
+ 'D eid %(A)s, C is Card)')
def test_optional_var_3(self):
constraint1 = ('X in_state S, U in_group G, P require_state S,'
'P name "read", P require_group G')
constraint2 = 'X in_state S, S name "public"'
- rqlst = parse('Any A,C,T WHERE A documented_by C?, C title T')
+ rqlst = parse(u'Any A,C,T WHERE A documented_by C?, C title T')
rewrite(rqlst, {('C', 'X'): (constraint1, constraint2)}, {})
self.assertEqual(rqlst.as_string(),
- "Any A,C,T WHERE A documented_by C?, A is Affaire "
- "WITH C,T BEING (Any C,T WHERE C title T, "
- "(EXISTS(C in_state B, D in_group F, G require_state B, G name 'read', G require_group F)) "
- "OR (EXISTS(C in_state E, E name 'public')), "
- "D eid %(A)s, C is Card)")
+ u'Any A,C,T WHERE A documented_by C?, A is Affaire '
+ 'WITH C,T BEING (Any C,T WHERE C title T, '
+ '(EXISTS(C in_state B, D in_group F, G require_state B, G name "read", G require_group F)) '
+ 'OR (EXISTS(C in_state E, E name "public")), '
+ 'D eid %(A)s, C is Card)')
def test_optional_var_4(self):
constraint1 = 'A created_by U, X documented_by A'
constraint2 = 'A created_by U, X concerne A'
constraint3 = 'X created_by U'
- rqlst = parse('Any X,LA,Y WHERE LA? documented_by X, LA concerne Y')
+ rqlst = parse(u'Any X,LA,Y WHERE LA? documented_by X, LA concerne Y')
rewrite(rqlst, {('LA', 'X'): (constraint1, constraint2),
('X', 'X'): (constraint3,),
('Y', 'X'): (constraint3,)}, {})
@@ -208,7 +210,7 @@
# see test of the same name in RewriteFullTC: original problem is
# unreproducible here because it actually lies in
# RQLRewriter.insert_local_checks
- rqlst = parse('Any A,AR,X,CD WHERE A concerne X?, A ref AR, A eid %(a)s, X creation_date CD')
+ rqlst = parse(u'Any A,AR,X,CD WHERE A concerne X?, A ref AR, A eid %(a)s, X creation_date CD')
rewrite(rqlst, {('X', 'X'): ('X created_by U',),}, {'a': 3})
self.assertEqual(rqlst.as_string(),
u'Any A,AR,X,CD WHERE A concerne X?, A ref AR, A eid %(a)s WITH X,CD BEING (Any X,CD WHERE X creation_date CD, EXISTS(X created_by B), B eid %(A)s, X is IN(Division, Note, Societe))')
@@ -216,7 +218,7 @@
def test_optional_var_inlined(self):
c1 = ('X require_permission P')
c2 = ('X inlined_card O, O require_permission P')
- rqlst = parse('Any C,A,R WHERE A? inlined_card C, A ref R')
+ rqlst = parse(u'Any C,A,R WHERE A? inlined_card C, A ref R')
rewrite(rqlst, {('C', 'X'): (c1,),
('A', 'X'): (c2,),
}, {})
@@ -231,7 +233,7 @@
# def test_optional_var_inlined_has_perm(self):
# c1 = ('X require_permission P')
# c2 = ('X inlined_card O, U has_read_permission O')
- # rqlst = parse('Any C,A,R WHERE A? inlined_card C, A ref R')
+ # rqlst = parse(u'Any C,A,R WHERE A? inlined_card C, A ref R')
# rewrite(rqlst, {('C', 'X'): (c1,),
# ('A', 'X'): (c2,),
# }, {})
@@ -241,7 +243,7 @@
def test_optional_var_inlined_imbricated_error(self):
c1 = ('X require_permission P')
c2 = ('X inlined_card O, O require_permission P')
- rqlst = parse('Any C,A,R,A2,R2 WHERE A? inlined_card C, A ref R,A2? inlined_card C, A2 ref R2')
+ rqlst = parse(u'Any C,A,R,A2,R2 WHERE A? inlined_card C, A ref R,A2? inlined_card C, A2 ref R2')
self.assertRaises(BadSchemaDefinition,
rewrite, rqlst, {('C', 'X'): (c1,),
('A', 'X'): (c2,),
@@ -251,7 +253,7 @@
def test_optional_var_inlined_linked(self):
c1 = ('X require_permission P')
c2 = ('X inlined_card O, O require_permission P')
- rqlst = parse('Any A,W WHERE A inlined_card C?, C inlined_note N, '
+ rqlst = parse(u'Any A,W WHERE A inlined_card C?, C inlined_note N, '
'N inlined_affaire W')
rewrite(rqlst, {('C', 'X'): (c1,)}, {})
self.assertEqual(rqlst.as_string(),
@@ -265,70 +267,70 @@
# relation used in the rql expression can be ignored and S replaced by
# the variable from the incoming query
snippet = ('X in_state S, S name "hop"')
- rqlst = parse('Card C WHERE C in_state STATE')
+ rqlst = parse(u'Card C WHERE C in_state STATE')
rewrite(rqlst, {('C', 'X'): (snippet,)}, {})
self.assertEqual(rqlst.as_string(),
- "Any C WHERE C in_state STATE, C is Card, "
- "EXISTS(STATE name 'hop'), STATE is State")
+ 'Any C WHERE C in_state STATE, C is Card, '
+ 'EXISTS(STATE name "hop"), STATE is State')
def test_relation_optimization_1_rhs(self):
snippet = ('TW subworkflow_exit X, TW name "hop"')
- rqlst = parse('WorkflowTransition C WHERE C subworkflow_exit EXIT')
+ rqlst = parse(u'WorkflowTransition C WHERE C subworkflow_exit EXIT')
rewrite(rqlst, {('EXIT', 'X'): (snippet,)}, {})
self.assertEqual(rqlst.as_string(),
- "Any C WHERE C subworkflow_exit EXIT, C is WorkflowTransition, "
- "EXISTS(C name 'hop'), EXIT is SubWorkflowExitPoint")
+ 'Any C WHERE C subworkflow_exit EXIT, C is WorkflowTransition, '
+ 'EXISTS(C name "hop"), EXIT is SubWorkflowExitPoint')
def test_relation_optimization_2_lhs(self):
# optional relation can be shared if also optional in the snippet
snippet = ('X in_state S?, S name "hop"')
- rqlst = parse('Card C WHERE C in_state STATE?')
+ rqlst = parse(u'Card C WHERE C in_state STATE?')
rewrite(rqlst, {('C', 'X'): (snippet,)}, {})
self.assertEqual(rqlst.as_string(),
- "Any C WHERE C in_state STATE?, C is Card, "
- "EXISTS(STATE name 'hop'), STATE is State")
+ 'Any C WHERE C in_state STATE?, C is Card, '
+ 'EXISTS(STATE name "hop"), STATE is State')
def test_relation_optimization_2_rhs(self):
snippet = ('TW? subworkflow_exit X, TW name "hop"')
- rqlst = parse('SubWorkflowExitPoint EXIT WHERE C? subworkflow_exit EXIT')
+ rqlst = parse(u'SubWorkflowExitPoint EXIT WHERE C? subworkflow_exit EXIT')
rewrite(rqlst, {('EXIT', 'X'): (snippet,)}, {})
self.assertEqual(rqlst.as_string(),
- "Any EXIT WHERE C? subworkflow_exit EXIT, EXIT is SubWorkflowExitPoint, "
- "EXISTS(C name 'hop'), C is WorkflowTransition")
+ 'Any EXIT WHERE C? subworkflow_exit EXIT, EXIT is SubWorkflowExitPoint, '
+ 'EXISTS(C name "hop"), C is WorkflowTransition')
def test_relation_optimization_3_lhs(self):
# optional relation in the snippet but not in the orig tree can be shared
snippet = ('X in_state S?, S name "hop"')
- rqlst = parse('Card C WHERE C in_state STATE')
+ rqlst = parse(u'Card C WHERE C in_state STATE')
rewrite(rqlst, {('C', 'X'): (snippet,)}, {})
self.assertEqual(rqlst.as_string(),
- "Any C WHERE C in_state STATE, C is Card, "
- "EXISTS(STATE name 'hop'), STATE is State")
+ 'Any C WHERE C in_state STATE, C is Card, '
+ 'EXISTS(STATE name "hop"), STATE is State')
def test_relation_optimization_3_rhs(self):
snippet = ('TW? subworkflow_exit X, TW name "hop"')
- rqlst = parse('WorkflowTransition C WHERE C subworkflow_exit EXIT')
+ rqlst = parse(u'WorkflowTransition C WHERE C subworkflow_exit EXIT')
rewrite(rqlst, {('EXIT', 'X'): (snippet,)}, {})
self.assertEqual(rqlst.as_string(),
- "Any C WHERE C subworkflow_exit EXIT, C is WorkflowTransition, "
- "EXISTS(C name 'hop'), EXIT is SubWorkflowExitPoint")
+ 'Any C WHERE C subworkflow_exit EXIT, C is WorkflowTransition, '
+ 'EXISTS(C name "hop"), EXIT is SubWorkflowExitPoint')
def test_relation_non_optimization_1_lhs(self):
# but optional relation in the orig tree but not in the snippet can't be shared
snippet = ('X in_state S, S name "hop"')
- rqlst = parse('Card C WHERE C in_state STATE?')
+ rqlst = parse(u'Card C WHERE C in_state STATE?')
rewrite(rqlst, {('C', 'X'): (snippet,)}, {})
self.assertEqual(rqlst.as_string(),
- "Any C WHERE C in_state STATE?, C is Card, "
- "EXISTS(C in_state A, A name 'hop', A is State), STATE is State")
+ 'Any C WHERE C in_state STATE?, C is Card, '
+ 'EXISTS(C in_state A, A name "hop", A is State), STATE is State')
def test_relation_non_optimization_1_rhs(self):
snippet = ('TW subworkflow_exit X, TW name "hop"')
- rqlst = parse('SubWorkflowExitPoint EXIT WHERE C? subworkflow_exit EXIT')
+ rqlst = parse(u'SubWorkflowExitPoint EXIT WHERE C? subworkflow_exit EXIT')
rewrite(rqlst, {('EXIT', 'X'): (snippet,)}, {})
self.assertEqual(rqlst.as_string(),
- "Any EXIT WHERE C? subworkflow_exit EXIT, EXIT is SubWorkflowExitPoint, "
- "EXISTS(A subworkflow_exit EXIT, A name 'hop', A is WorkflowTransition), "
- "C is WorkflowTransition")
+ 'Any EXIT WHERE C? subworkflow_exit EXIT, EXIT is SubWorkflowExitPoint, '
+ 'EXISTS(A subworkflow_exit EXIT, A name "hop", A is WorkflowTransition), '
+ 'C is WorkflowTransition')
def test_relation_non_optimization_2(self):
"""See #3024730"""
@@ -336,7 +338,7 @@
# previously inserted, else this may introduce duplicated results, as N
# will then be shared by multiple EXISTS and so at SQL generation time,
# the table will be in the FROM clause of the outermost query
- rqlst = parse('Any A,C WHERE A inlined_card C')
+ rqlst = parse(u'Any A,C WHERE A inlined_card C')
rewrite(rqlst, {('A', 'X'): ('X inlined_card C, C inlined_note N, N owned_by U',),
('C', 'X'): ('X inlined_note N, N owned_by U',)}, {})
self.assertEqual(rqlst.as_string(),
@@ -348,35 +350,35 @@
def test_unsupported_constraint_1(self):
# CWUser doesn't have require_permission
trinfo_constraint = ('X wf_info_for Y, Y require_permission P, P name "read"')
- rqlst = parse('Any U,T WHERE U is CWUser, T wf_info_for U')
+ rqlst = parse(u'Any U,T WHERE U is CWUser, T wf_info_for U')
self.assertRaises(Unauthorized, rewrite, rqlst, {('T', 'X'): (trinfo_constraint,)}, {})
def test_unsupported_constraint_2(self):
trinfo_constraint = ('X wf_info_for Y, Y require_permission P, P name "read"')
- rqlst = parse('Any U,T WHERE U is CWUser, T wf_info_for U')
+ rqlst = parse(u'Any U,T WHERE U is CWUser, T wf_info_for U')
rewrite(rqlst, {('T', 'X'): (trinfo_constraint, 'X wf_info_for Y, Y in_group G, G name "managers"')}, {})
self.assertEqual(rqlst.as_string(),
- u"Any U,T WHERE U is CWUser, T wf_info_for U, "
- "EXISTS(U in_group B, B name 'managers', B is CWGroup), T is TrInfo")
+ u'Any U,T WHERE U is CWUser, T wf_info_for U, '
+ 'EXISTS(U in_group B, B name "managers", B is CWGroup), T is TrInfo')
def test_unsupported_constraint_3(self):
self.skipTest('raise unauthorized for now')
trinfo_constraint = ('X wf_info_for Y, Y require_permission P, P name "read"')
- rqlst = parse('Any T WHERE T wf_info_for X')
+ rqlst = parse(u'Any T WHERE T wf_info_for X')
rewrite(rqlst, {('T', 'X'): (trinfo_constraint, 'X in_group G, G name "managers"')}, {})
self.assertEqual(rqlst.as_string(),
u'XXX dunno what should be generated')
def test_add_ambiguity_exists(self):
constraint = ('X concerne Y')
- rqlst = parse('Affaire X')
+ rqlst = parse(u'Affaire X')
rewrite(rqlst, {('X', 'X'): (constraint,)}, {})
self.assertEqual(rqlst.as_string(),
u"Any X WHERE X is Affaire, ((EXISTS(X concerne A, A is Division)) OR (EXISTS(X concerne C, C is Societe))) OR (EXISTS(X concerne B, B is Note))")
def test_add_ambiguity_outerjoin(self):
constraint = ('X concerne Y')
- rqlst = parse('Any X,C WHERE X? documented_by C')
+ rqlst = parse(u'Any X,C WHERE X? documented_by C')
rewrite(rqlst, {('X', 'X'): (constraint,)}, {})
# ambiguity are kept in the sub-query, no need to be resolved using OR
self.assertEqual(rqlst.as_string(),
@@ -385,76 +387,76 @@
def test_rrqlexpr_nonexistant_subject_1(self):
constraint = RRQLExpression('S owned_by U')
- rqlst = parse('Card C')
+ rqlst = parse(u'Card C')
rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'SU')
self.assertEqual(rqlst.as_string(),
u"Any C WHERE C is Card, A eid %(B)s, EXISTS(C owned_by A)")
- rqlst = parse('Card C')
+ rqlst = parse(u'Card C')
rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'OU')
self.assertEqual(rqlst.as_string(),
u"Any C WHERE C is Card")
- rqlst = parse('Card C')
+ rqlst = parse(u'Card C')
rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'SOU')
self.assertEqual(rqlst.as_string(),
u"Any C WHERE C is Card, A eid %(B)s, EXISTS(C owned_by A)")
def test_rrqlexpr_nonexistant_subject_2(self):
constraint = RRQLExpression('S owned_by U, O owned_by U, O is Card')
- rqlst = parse('Card C')
+ rqlst = parse(u'Card C')
rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'SU')
self.assertEqual(rqlst.as_string(),
'Any C WHERE C is Card, A eid %(B)s, EXISTS(C owned_by A)')
- rqlst = parse('Card C')
+ rqlst = parse(u'Card C')
rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'OU')
self.assertEqual(rqlst.as_string(),
'Any C WHERE C is Card, B eid %(D)s, EXISTS(A owned_by B, A is Card)')
- rqlst = parse('Card C')
+ rqlst = parse(u'Card C')
rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'SOU')
self.assertEqual(rqlst.as_string(),
'Any C WHERE C is Card, A eid %(B)s, EXISTS(C owned_by A, D owned_by A, D is Card)')
def test_rrqlexpr_nonexistant_subject_3(self):
constraint = RRQLExpression('U in_group G, G name "users"')
- rqlst = parse('Card C')
+ rqlst = parse(u'Card C')
rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'SU')
self.assertEqual(rqlst.as_string(),
u'Any C WHERE C is Card, A eid %(B)s, EXISTS(A in_group D, D name "users", D is CWGroup)')
def test_rrqlexpr_nonexistant_subject_4(self):
constraint = RRQLExpression('U in_group G, G name "users", S owned_by U')
- rqlst = parse('Card C')
+ rqlst = parse(u'Card C')
rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'SU')
self.assertEqual(rqlst.as_string(),
u'Any C WHERE C is Card, A eid %(B)s, EXISTS(A in_group D, D name "users", C owned_by A, D is CWGroup)')
- rqlst = parse('Card C')
+ rqlst = parse(u'Card C')
rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'OU')
self.assertEqual(rqlst.as_string(),
u'Any C WHERE C is Card, A eid %(B)s, EXISTS(A in_group D, D name "users", D is CWGroup)')
def test_rrqlexpr_nonexistant_subject_5(self):
constraint = RRQLExpression('S owned_by Z, O owned_by Z, O is Card')
- rqlst = parse('Card C')
+ rqlst = parse(u'Card C')
rewrite(rqlst, {('C', 'S'): (constraint,)}, {}, 'S')
self.assertEqual(rqlst.as_string(),
u"Any C WHERE C is Card, EXISTS(C owned_by A, A is CWUser)")
def test_rqlexpr_not_relation_1_1(self):
constraint = ERQLExpression('X owned_by Z, Z login "hop"', 'X')
- rqlst = parse('Affaire A WHERE NOT EXISTS(A documented_by C)')
+ rqlst = parse(u'Affaire A WHERE NOT EXISTS(A documented_by C)')
rewrite(rqlst, {('C', 'X'): (constraint,)}, {}, 'X')
self.assertEqual(rqlst.as_string(),
u'Any A WHERE NOT EXISTS(A documented_by C, EXISTS(C owned_by B, B login "hop", B is CWUser), C is Card), A is Affaire')
def test_rqlexpr_not_relation_1_2(self):
constraint = ERQLExpression('X owned_by Z, Z login "hop"', 'X')
- rqlst = parse('Affaire A WHERE NOT EXISTS(A documented_by C)')
+ rqlst = parse(u'Affaire A WHERE NOT EXISTS(A documented_by C)')
rewrite(rqlst, {('A', 'X'): (constraint,)}, {}, 'X')
self.assertEqual(rqlst.as_string(),
u'Any A WHERE NOT EXISTS(A documented_by C, C is Card), A is Affaire, EXISTS(A owned_by B, B login "hop", B is CWUser)')
def test_rqlexpr_not_relation_2(self):
constraint = ERQLExpression('X owned_by Z, Z login "hop"', 'X')
- rqlst = rqlhelper.parse('Affaire A WHERE NOT A documented_by C', annotate=False)
+ rqlst = rqlhelper.parse(u'Affaire A WHERE NOT A documented_by C', annotate=False)
rewrite(rqlst, {('C', 'X'): (constraint,)}, {}, 'X')
self.assertEqual(rqlst.as_string(),
u'Any A WHERE NOT EXISTS(A documented_by C, EXISTS(C owned_by B, B login "hop", B is CWUser), C is Card), A is Affaire')
@@ -463,7 +465,7 @@
c1 = ERQLExpression('X owned_by Z, Z login "hop"', 'X')
c2 = ERQLExpression('X owned_by Z, Z login "hip"', 'X')
c3 = ERQLExpression('X owned_by Z, Z login "momo"', 'X')
- rqlst = rqlhelper.parse('Any A WHERE A documented_by C?', annotate=False)
+ rqlst = rqlhelper.parse(u'Any A WHERE A documented_by C?', annotate=False)
rewrite(rqlst, {('C', 'X'): (c1, c2, c3)}, {}, 'X')
self.assertEqual(rqlst.as_string(),
u'Any A WHERE A documented_by C?, A is Affaire '
@@ -484,12 +486,12 @@
# 4. this variable require a rewrite
c_bad = ERQLExpression('X documented_by R, A in_state R')
- rqlst = parse('Any A, R WHERE A ref R, S is Affaire')
+ rqlst = parse(u'Any A, R WHERE A ref R, S is Affaire')
rewrite(rqlst, {('A', 'X'): (c_ok, c_bad)}, {})
def test_nonregr_is_instance_of(self):
user_expr = ERQLExpression('NOT X in_group AF, AF name "guests"')
- rqlst = parse('Any O WHERE S use_email O, S is CWUser, O is_instance_of EmailAddress')
+ rqlst = parse(u'Any O WHERE S use_email O, S is CWUser, O is_instance_of EmailAddress')
rewrite(rqlst, {('S', 'X'): (user_expr,)}, {})
self.assertEqual(rqlst.as_string(),
'Any O WHERE S use_email O, S is CWUser, O is EmailAddress, '
@@ -600,7 +602,7 @@
# Basic tests
def test_base_rule(self):
rules = {'participated_in': 'S contributor O'}
- rqlst = rqlhelper.parse('Any X WHERE X participated_in S')
+ rqlst = rqlhelper.parse(u'Any X WHERE X participated_in S')
rule_rewrite(rqlst, rules)
self.assertEqual('Any X WHERE X contributor S',
rqlst.as_string())
@@ -609,7 +611,7 @@
rules = {'illustrator_of': ('C is Contribution, C contributor S, '
'C manifestation O, C role R, '
'R name "illustrator"')}
- rqlst = rqlhelper.parse('Any A,B WHERE A illustrator_of B')
+ rqlst = rqlhelper.parse(u'Any A,B WHERE A illustrator_of B')
rule_rewrite(rqlst, rules)
self.assertEqual('Any A,B WHERE C is Contribution, '
'C contributor A, C manifestation B, '
@@ -620,7 +622,7 @@
rules = {'illustrator_of': ('C is Contribution, C contributor S, '
'C manifestation O, C role R, '
'R name "illustrator"')}
- rqlst = rqlhelper.parse('Any A WHERE EXISTS(A illustrator_of B)')
+ rqlst = rqlhelper.parse(u'Any A WHERE EXISTS(A illustrator_of B)')
rule_rewrite(rqlst, rules)
self.assertEqual('Any A WHERE EXISTS(C is Contribution, '
'C contributor A, C manifestation B, '
@@ -631,7 +633,7 @@
def test_rewrite2(self):
rules = {'illustrator_of': 'C is Contribution, C contributor S, '
'C manifestation O, C role R, R name "illustrator"'}
- rqlst = rqlhelper.parse('Any A,B WHERE A illustrator_of B, C require_permission R, S'
+ rqlst = rqlhelper.parse(u'Any A,B WHERE A illustrator_of B, C require_permission R, S'
'require_state O')
rule_rewrite(rqlst, rules)
self.assertEqual('Any A,B WHERE C require_permission R, S require_state O, '
@@ -642,7 +644,7 @@
def test_rewrite3(self):
rules = {'illustrator_of': 'C is Contribution, C contributor S, '
'C manifestation O, C role R, R name "illustrator"'}
- rqlst = rqlhelper.parse('Any A,B WHERE E require_permission T, A illustrator_of B')
+ rqlst = rqlhelper.parse(u'Any A,B WHERE E require_permission T, A illustrator_of B')
rule_rewrite(rqlst, rules)
self.assertEqual('Any A,B WHERE E require_permission T, '
'C is Contribution, C contributor A, C manifestation B, '
@@ -652,7 +654,7 @@
def test_rewrite4(self):
rules = {'illustrator_of': 'C is Contribution, C contributor S, '
'C manifestation O, C role R, R name "illustrator"'}
- rqlst = rqlhelper.parse('Any A,B WHERE C require_permission R, A illustrator_of B')
+ rqlst = rqlhelper.parse(u'Any A,B WHERE C require_permission R, A illustrator_of B')
rule_rewrite(rqlst, rules)
self.assertEqual('Any A,B WHERE C require_permission R, '
'D is Contribution, D contributor A, D manifestation B, '
@@ -662,7 +664,7 @@
def test_rewrite5(self):
rules = {'illustrator_of': 'C is Contribution, C contributor S, '
'C manifestation O, C role R, R name "illustrator"'}
- rqlst = rqlhelper.parse('Any A,B WHERE C require_permission R, A illustrator_of B, '
+ rqlst = rqlhelper.parse(u'Any A,B WHERE C require_permission R, A illustrator_of B, '
'S require_state O')
rule_rewrite(rqlst, rules)
self.assertEqual('Any A,B WHERE C require_permission R, S require_state O, '
@@ -674,7 +676,7 @@
def test_rewrite_with(self):
rules = {'illustrator_of': 'C is Contribution, C contributor S, '
'C manifestation O, C role R, R name "illustrator"'}
- rqlst = rqlhelper.parse('Any A,B WITH A, B BEING(Any X, Y WHERE X illustrator_of Y)')
+ rqlst = rqlhelper.parse(u'Any A,B WITH A, B BEING(Any X, Y WHERE X illustrator_of Y)')
rule_rewrite(rqlst, rules)
self.assertEqual('Any A,B WITH A,B BEING '
'(Any X,Y WHERE A is Contribution, A contributor X, '
@@ -684,7 +686,7 @@
def test_rewrite_with2(self):
rules = {'illustrator_of': 'C is Contribution, C contributor S, '
'C manifestation O, C role R, R name "illustrator"'}
- rqlst = rqlhelper.parse('Any A,B WHERE T require_permission C WITH A, B BEING(Any X, Y WHERE X illustrator_of Y)')
+ rqlst = rqlhelper.parse(u'Any A,B WHERE T require_permission C WITH A, B BEING(Any X, Y WHERE X illustrator_of Y)')
rule_rewrite(rqlst, rules)
self.assertEqual('Any A,B WHERE T require_permission C '
'WITH A,B BEING (Any X,Y WHERE A is Contribution, '
@@ -693,7 +695,7 @@
def test_rewrite_with3(self):
rules = {'participated_in': 'S contributor O'}
- rqlst = rqlhelper.parse('Any A,B WHERE A participated_in B '
+ rqlst = rqlhelper.parse(u'Any A,B WHERE A participated_in B '
'WITH A, B BEING(Any X,Y WHERE X contributor Y)')
rule_rewrite(rqlst, rules)
self.assertEqual('Any A,B WHERE A contributor B WITH A,B BEING '
@@ -703,7 +705,7 @@
def test_rewrite_with4(self):
rules = {'illustrator_of': 'C is Contribution, C contributor S, '
'C manifestation O, C role R, R name "illustrator"'}
- rqlst = rqlhelper.parse('Any A,B WHERE A illustrator_of B '
+ rqlst = rqlhelper.parse(u'Any A,B WHERE A illustrator_of B '
'WITH A, B BEING(Any X, Y WHERE X illustrator_of Y)')
rule_rewrite(rqlst, rules)
self.assertEqual('Any A,B WHERE C is Contribution, '
@@ -717,7 +719,7 @@
def test_rewrite_union(self):
rules = {'illustrator_of': 'C is Contribution, C contributor S, '
'C manifestation O, C role R, R name "illustrator"'}
- rqlst = rqlhelper.parse('(Any A,B WHERE A illustrator_of B) UNION'
+ rqlst = rqlhelper.parse(u'(Any A,B WHERE A illustrator_of B) UNION'
'(Any X,Y WHERE X is CWUser, Z manifestation Y)')
rule_rewrite(rqlst, rules)
self.assertEqual('(Any A,B WHERE C is Contribution, '
@@ -728,7 +730,7 @@
def test_rewrite_union2(self):
rules = {'illustrator_of': 'C is Contribution, C contributor S, '
'C manifestation O, C role R, R name "illustrator"'}
- rqlst = rqlhelper.parse('(Any Y WHERE Y match W) UNION '
+ rqlst = rqlhelper.parse(u'(Any Y WHERE Y match W) UNION '
'(Any A WHERE A illustrator_of B) UNION '
'(Any Y WHERE Y is ArtWork)')
rule_rewrite(rqlst, rules)
@@ -742,7 +744,7 @@
def test_rewrite_exists(self):
rules = {'illustrator_of': 'C is Contribution, C contributor S, '
'C manifestation O, C role R, R name "illustrator"'}
- rqlst = rqlhelper.parse('(Any A,B WHERE A illustrator_of B, '
+ rqlst = rqlhelper.parse(u'(Any A,B WHERE A illustrator_of B, '
'EXISTS(B is ArtWork))')
rule_rewrite(rqlst, rules)
self.assertEqual('Any A,B WHERE EXISTS(B is ArtWork), '
@@ -753,7 +755,7 @@
def test_rewrite_exists2(self):
rules = {'illustrator_of': 'C is Contribution, C contributor S, '
'C manifestation O, C role R, R name "illustrator"'}
- rqlst = rqlhelper.parse('(Any A,B WHERE B contributor A, EXISTS(A illustrator_of W))')
+ rqlst = rqlhelper.parse(u'(Any A,B WHERE B contributor A, EXISTS(A illustrator_of W))')
rule_rewrite(rqlst, rules)
self.assertEqual('Any A,B WHERE B contributor A, '
'EXISTS(C is Contribution, C contributor A, C manifestation W, '
@@ -763,7 +765,7 @@
def test_rewrite_exists3(self):
rules = {'illustrator_of': 'C is Contribution, C contributor S, '
'C manifestation O, C role R, R name "illustrator"'}
- rqlst = rqlhelper.parse('(Any A,B WHERE A illustrator_of B, EXISTS(A illustrator_of W))')
+ rqlst = rqlhelper.parse(u'(Any A,B WHERE A illustrator_of B, EXISTS(A illustrator_of W))')
rule_rewrite(rqlst, rules)
self.assertEqual('Any A,B WHERE EXISTS(C is Contribution, C contributor A, '
'C manifestation W, C role D, D name "illustrator"), '
@@ -774,7 +776,7 @@
# Test for GROUPBY
def test_rewrite_groupby(self):
rules = {'participated_in': 'S contributor O'}
- rqlst = rqlhelper.parse('Any SUM(SA) GROUPBY S WHERE P participated_in S, P manifestation SA')
+ rqlst = rqlhelper.parse(u'Any SUM(SA) GROUPBY S WHERE P participated_in S, P manifestation SA')
rule_rewrite(rqlst, rules)
self.assertEqual('Any SUM(SA) GROUPBY S WHERE P manifestation SA, P contributor S',
rqlst.as_string())
--- a/test/unittest_rset.py Thu Mar 06 15:55:33 2014 +0100
+++ b/test/unittest_rset.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,8 +18,9 @@
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""unit tests for module cubicweb.utils"""
-from urlparse import urlsplit
-import pickle
+from six import string_types
+from six.moves import cPickle as pickle
+from six.moves.urllib.parse import urlsplit
from rql import parse
@@ -100,7 +101,11 @@
def test_pickle(self):
del self.rset.req
- self.assertEqual(len(pickle.dumps(self.rset)), 376)
+ rs2 = pickle.loads(pickle.dumps(self.rset))
+ self.assertEqual(self.rset.rows, rs2.rows)
+ self.assertEqual(self.rset.rowcount, rs2.rowcount)
+ self.assertEqual(self.rset.rql, rs2.rql)
+ self.assertEqual(self.rset.description, rs2.description)
def test_build_url(self):
with self.admin_access.web_request() as req:
@@ -274,7 +279,7 @@
"""make sure syntax tree is cached"""
rqlst1 = self.rset.syntax_tree()
rqlst2 = self.rset.syntax_tree()
- self.assert_(rqlst1 is rqlst2)
+ self.assertIs(rqlst1, rqlst2)
def test_get_entity_simple(self):
with self.admin_access.web_request() as req:
@@ -550,19 +555,32 @@
def test_str(self):
with self.admin_access.web_request() as req:
rset = req.execute('(Any X,N WHERE X is CWGroup, X name N)')
- self.assertIsInstance(str(rset), basestring)
+ self.assertIsInstance(str(rset), string_types)
self.assertEqual(len(str(rset).splitlines()), 1)
def test_repr(self):
with self.admin_access.web_request() as req:
rset = req.execute('(Any X,N WHERE X is CWGroup, X name N)')
- self.assertIsInstance(repr(rset), basestring)
+ self.assertIsInstance(repr(rset), string_types)
self.assertTrue(len(repr(rset).splitlines()) > 1)
rset = req.execute('(Any X WHERE X is CWGroup, X name "managers")')
- self.assertIsInstance(str(rset), basestring)
+ self.assertIsInstance(str(rset), string_types)
self.assertEqual(len(str(rset).splitlines()), 1)
+ def test_slice(self):
+ rs = ResultSet([[12000, 'adim', u'Adim chez les pinguins'],
+ [12000, 'adim', u'Jardiner facile'],
+ [13000, 'syt', u'Le carrelage en 42 leçons'],
+ [14000, 'nico', u'La tarte tatin en 15 minutes'],
+ [14000, 'nico', u"L'épluchage du castor commun"]],
+ 'Any U, L, T WHERE U is CWUser, U login L,'\
+ 'D created_by U, D title T',
+ description=[['CWUser', 'String', 'String']] * 5)
+ self.assertEqual(rs[1::2],
+ [[12000, 'adim', u'Jardiner facile'],
+ [14000, 'nico', u'La tarte tatin en 15 minutes']])
+
def test_nonregr_symmetric_relation(self):
# see https://www.cubicweb.org/ticket/4739253
with self.admin_access.client_cnx() as cnx:
--- a/test/unittest_schema.py Thu Mar 06 15:55:33 2014 +0100
+++ b/test/unittest_schema.py Thu Nov 12 10:52:28 2015 +0100
@@ -105,10 +105,9 @@
#
# isinstance(cstr, RQLConstraint)
# -> expected to return RQLConstraint instances but not
- # RRQLVocabularyConstraint and QLUniqueConstraint
+ # RQLVocabularyConstraint and RQLUniqueConstraint
self.assertFalse(issubclass(RQLUniqueConstraint, RQLVocabularyConstraint))
self.assertFalse(issubclass(RQLUniqueConstraint, RQLConstraint))
- self.assertTrue(issubclass(RQLConstraint, RQLVocabularyConstraint))
def test_entity_perms(self):
self.assertEqual(eperson.get_groups('read'), set(('managers', 'users', 'guests')))
@@ -158,7 +157,7 @@
def test_knownValues_load_schema(self):
schema = loader.load(config)
- self.assert_(isinstance(schema, CubicWebSchema))
+ self.assertIsInstance(schema, CubicWebSchema)
self.assertEqual(schema.name, 'data')
entities = sorted([str(e) for e in schema.entities()])
expected_entities = ['Ami', 'BaseTransition', 'BigInt', 'Bookmark', 'Boolean', 'Bytes', 'Card',
@@ -273,11 +272,13 @@
config = TestConfiguration('data', apphome=join(dirname(__file__), 'data_schemareader'))
config.bootstrap_cubes()
schema = loader.load(config)
- self.assertEqual(schema['in_group'].rdefs.values()[0].permissions,
+ rdef = next(iter(schema['in_group'].rdefs.values()))
+ self.assertEqual(rdef.permissions,
{'read': ('managers',),
'add': ('managers',),
'delete': ('managers',)})
- self.assertEqual(schema['cw_for_source'].rdefs.values()[0].permissions,
+ rdef = next(iter(schema['cw_for_source'].rdefs.values()))
+ self.assertEqual(rdef.permissions,
{'read': ('managers', 'users'),
'add': ('managers',),
'delete': ('managers',)})
@@ -355,11 +356,11 @@
# check object/subject type
self.assertEqual([('Person','Service')],
- schema['produces_and_buys'].rdefs.keys())
+ list(schema['produces_and_buys'].rdefs.keys()))
self.assertEqual([('Person','Service')],
- schema['produces_and_buys2'].rdefs.keys())
+ list(schema['produces_and_buys2'].rdefs.keys()))
self.assertCountEqual([('Company', 'Service'), ('Person', 'Service')],
- schema['reproduce'].rdefs.keys())
+ list(schema['reproduce'].rdefs.keys()))
# check relation definitions are marked infered
rdef = schema['produces_and_buys'].rdefs[('Person','Service')]
self.assertTrue(rdef.infered)
@@ -426,7 +427,9 @@
def test(self):
self.assertEqual(normalize_expression('X bla Y,Y blur Z , Z zigoulou X '),
- 'X bla Y, Y blur Z, Z zigoulou X')
+ 'X bla Y, Y blur Z, Z zigoulou X')
+ self.assertEqual(normalize_expression('X bla Y, Y name "x,y"'),
+ 'X bla Y, Y name "x,y"')
class RQLExpressionTC(TestCase):
@@ -553,7 +556,7 @@
self.set_description('composite rdefs for %s' % etype)
yield self.assertEqual, self.composites[etype], \
sorted([(r.rtype.type, r.subject.type, r.object.type, role)
- for r, role in sorted(schema[etype].composite_rdef_roles)])
+ for r, role in schema[etype].composite_rdef_roles])
if __name__ == '__main__':
--- a/test/unittest_utils.py Thu Mar 06 15:55:33 2014 +0100
+++ b/test/unittest_utils.py Thu Nov 12 10:52:28 2015 +0100
@@ -21,6 +21,7 @@
import decimal
import datetime
+from six.moves import range
from logilab.common.testlib import TestCase, DocTest, unittest_main
@@ -67,7 +68,7 @@
def test_querycache(self):
c = QueryCache(ceiling=20)
# write only
- for x in xrange(10):
+ for x in range(10):
c[x] = x
self.assertEqual(c._usage_report(),
{'transientcount': 0,
@@ -75,7 +76,7 @@
'permanentcount': 0})
c = QueryCache(ceiling=10)
# we should also get a warning
- for x in xrange(20):
+ for x in range(20):
c[x] = x
self.assertEqual(c._usage_report(),
{'transientcount': 0,
@@ -83,8 +84,8 @@
'permanentcount': 0})
# write + reads
c = QueryCache(ceiling=20)
- for n in xrange(4):
- for x in xrange(10):
+ for n in range(4):
+ for x in range(10):
c[x] = x
c[x]
self.assertEqual(c._usage_report(),
@@ -92,8 +93,8 @@
'itemcount': 10,
'permanentcount': 0})
c = QueryCache(ceiling=20)
- for n in xrange(17):
- for x in xrange(10):
+ for n in range(17):
+ for x in range(10):
c[x] = x
c[x]
self.assertEqual(c._usage_report(),
@@ -101,8 +102,8 @@
'itemcount': 10,
'permanentcount': 10})
c = QueryCache(ceiling=20)
- for n in xrange(17):
- for x in xrange(10):
+ for n in range(17):
+ for x in range(10):
c[x] = x
if n % 2:
c[x]
@@ -115,7 +116,7 @@
class UStringIOTC(TestCase):
def test_boolean_value(self):
- self.assert_(UStringIO())
+ self.assertTrue(UStringIO())
class RepeatListTC(TestCase):
@@ -169,14 +170,14 @@
def test_append(self):
l = SizeConstrainedList(10)
- for i in xrange(12):
+ for i in range(12):
l.append(i)
- self.assertEqual(l, range(2, 12))
+ self.assertEqual(l, list(range(2, 12)))
def test_extend(self):
- testdata = [(range(5), range(5)),
- (range(10), range(10)),
- (range(12), range(2, 12)),
+ testdata = [(list(range(5)), list(range(5))),
+ (list(range(10)), list(range(10))),
+ (list(range(12)), list(range(2, 12))),
]
for extension, expected in testdata:
l = SizeConstrainedList(10)
--- a/toolsutils.py Thu Mar 06 15:55:33 2014 +0100
+++ b/toolsutils.py Thu Nov 12 10:52:28 2015 +0100
@@ -16,6 +16,7 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""some utilities for cubicweb command line tools"""
+from __future__ import print_function
__docformat__ = "restructuredtext en"
@@ -37,6 +38,8 @@
def symlink(*args):
raise NotImplementedError
+from six import add_metaclass
+
from logilab.common.clcommands import Command as BaseCommand
from logilab.common.shellutils import ASK
@@ -62,29 +65,29 @@
"""create a directory if it doesn't exist yet"""
try:
makedirs(directory)
- print '-> created directory %s' % directory
+ print('-> created directory %s' % directory)
except OSError as ex:
import errno
if ex.errno != errno.EEXIST:
raise
- print '-> no need to create existing directory %s' % directory
+ print('-> no need to create existing directory %s' % directory)
def create_symlink(source, target):
"""create a symbolic link"""
if exists(target):
remove(target)
symlink(source, target)
- print '[symlink] %s <-- %s' % (target, source)
+ print('[symlink] %s <-- %s' % (target, source))
def create_copy(source, target):
import shutil
- print '[copy] %s <-- %s' % (target, source)
+ print('[copy] %s <-- %s' % (target, source))
shutil.copy2(source, target)
def rm(whatever):
import shutil
shutil.rmtree(whatever)
- print '-> removed %s' % whatever
+ print('-> removed %s' % whatever)
def show_diffs(appl_file, ref_file, askconfirm=True):
"""interactivly replace the old file with the new file according to
@@ -95,8 +98,8 @@
diffs = pipe.stdout.read()
if diffs:
if askconfirm:
- print
- print diffs
+ print()
+ print(diffs)
action = ASK.ask('Replace ?', ('Y', 'n', 'q'), 'Y').lower()
else:
action = 'y'
@@ -106,17 +109,17 @@
except IOError:
os.system('chmod a+w %s' % appl_file)
shutil.copyfile(ref_file, appl_file)
- print 'replaced'
+ print('replaced')
elif action == 'q':
sys.exit(0)
else:
copy_file = appl_file + '.default'
- copy = file(copy_file, 'w')
+ copy = open(copy_file, 'w')
copy.write(open(ref_file).read())
copy.close()
- print 'keep current version, the new file has been written to', copy_file
+ print('keep current version, the new file has been written to', copy_file)
else:
- print 'no diff between %s and %s' % (appl_file, ref_file)
+ print('no diff between %s and %s' % (appl_file, ref_file))
SKEL_EXCLUDE = ('*.py[co]', '*.orig', '*~', '*_flymake.py')
def copy_skeleton(skeldir, targetdir, context,
@@ -143,15 +146,15 @@
if not askconfirm or not exists(tfpath) or \
ASK.confirm('%s exists, overwrite?' % tfpath):
fill_templated_file(fpath, tfpath, context)
- print '[generate] %s <-- %s' % (tfpath, fpath)
+ print('[generate] %s <-- %s' % (tfpath, fpath))
elif exists(tfpath):
show_diffs(tfpath, fpath, askconfirm)
else:
shutil.copyfile(fpath, tfpath)
def fill_templated_file(fpath, tfpath, context):
- fobj = file(tfpath, 'w')
- templated = file(fpath).read()
+ fobj = open(tfpath, 'w')
+ templated = open(fpath).read()
fobj.write(templated % context)
fobj.close()
@@ -160,8 +163,8 @@
if log:
log('set permissions to 0600 for %s', filepath)
else:
- print '-> set permissions to 0600 for %s' % filepath
- chmod(filepath, 0600)
+ print('-> set permissions to 0600 for %s' % filepath)
+ chmod(filepath, 0o600)
def read_config(config_file, raise_if_unreadable=False):
"""read some simple configuration from `config_file` and return it as a
@@ -209,12 +212,13 @@
return cls
+@add_metaclass(metacmdhandler)
class CommandHandler(object):
"""configuration specific helper for cubicweb-ctl commands"""
- __metaclass__ = metacmdhandler
def __init__(self, config):
self.config = config
+
class Command(BaseCommand):
"""base class for cubicweb-ctl commands"""
@@ -234,7 +238,7 @@
raise ConfigurationError(msg)
def fail(self, reason):
- print "command failed:", reason
+ print("command failed:", reason)
sys.exit(1)
--- a/tox.ini Thu Mar 06 15:55:33 2014 +0100
+++ b/tox.ini Thu Nov 12 10:52:28 2015 +0100
@@ -29,6 +29,8 @@
-r{toxinidir}/ext/test/requirements.txt
[testenv:hooks]
+deps =
+ -r{toxinidir}/hooks/test/requirements.txt
[testenv:server]
deps =
--- a/transaction.py Thu Mar 06 15:55:33 2014 +0100
+++ b/transaction.py Thu Nov 12 10:52:28 2015 +0100
@@ -17,7 +17,7 @@
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
""" undoable transaction objects. """
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from cubicweb import RepositoryError
--- a/uilib.py Thu Mar 06 15:55:33 2014 +0100
+++ b/uilib.py Thu Nov 12 10:52:28 2015 +0100
@@ -26,12 +26,15 @@
import csv
import re
-from StringIO import StringIO
+from io import StringIO
+
+from six import PY2, PY3, text_type, binary_type, string_types, integer_types
from logilab.mtconverter import xml_escape, html_unescape
from logilab.common.date import ustrftime
from logilab.common.deprecation import deprecated
+from cubicweb import _
from cubicweb.utils import js_dumps
@@ -62,7 +65,7 @@
return value
def print_int(value, req, props, displaytime=True):
- return unicode(value)
+ return text_type(value)
def print_date(value, req, props, displaytime=True):
return ustrftime(value, req.property_value('ui.date-format'))
@@ -92,7 +95,7 @@
_('%d seconds')
def print_timedelta(value, req, props, displaytime=True):
- if isinstance(value, (int, long)):
+ if isinstance(value, integer_types):
# `date - date`, unlike `datetime - datetime` gives an int
# (number of days), not a timedelta
# XXX should rql be fixed to return Int instead of Interval in
@@ -122,7 +125,7 @@
return req._('no')
def print_float(value, req, props, displaytime=True):
- return unicode(req.property_value('ui.float-format') % value)
+ return text_type(req.property_value('ui.float-format') % value) # XXX cast needed ?
PRINTERS = {
'Bytes': print_bytes,
@@ -337,9 +340,8 @@
def __unicode__(self):
if self.parent:
return u'%s.%s' % (self.parent, self.id)
- return unicode(self.id)
- def __str__(self):
- return unicode(self).encode('utf8')
+ return text_type(self.id)
+ __str__ = __unicode__ if PY3 else lambda self: self.__unicode__().encode('utf-8')
def __getattr__(self, attr):
return _JSId(attr, self)
def __call__(self, *args):
@@ -357,6 +359,7 @@
if self.parent:
return u'%s(%s)' % (self.parent, ','.join(args))
return ','.join(args)
+ __str__ = __unicode__ if PY3 else lambda self: self.__unicode__().encode('utf-8')
class _JS(object):
def __getattr__(self, attr):
@@ -389,7 +392,7 @@
'img', 'area', 'input', 'col'))
def sgml_attributes(attrs):
- return u' '.join(u'%s="%s"' % (attr, xml_escape(unicode(value)))
+ return u' '.join(u'%s="%s"' % (attr, xml_escape(text_type(value)))
for attr, value in sorted(attrs.items())
if value is not None)
@@ -407,7 +410,7 @@
value += u' ' + sgml_attributes(attrs)
if content:
if escapecontent:
- content = xml_escape(unicode(content))
+ content = xml_escape(text_type(content))
value += u'>%s</%s>' % (content, tag)
else:
if tag in HTML4_EMPTY_TAGS:
@@ -436,8 +439,8 @@
stream = StringIO() #UStringIO() don't want unicode assertion
formater.format(layout, stream)
res = stream.getvalue()
- if isinstance(res, str):
- res = unicode(res, 'UTF8')
+ if isinstance(res, binary_type):
+ res = res.decode('UTF8')
return res
# traceback formatting ########################################################
@@ -445,14 +448,17 @@
import traceback
def exc_message(ex, encoding):
- try:
- excmsg = unicode(ex)
- except Exception:
+ if PY3:
+ excmsg = str(ex)
+ else:
try:
- excmsg = unicode(str(ex), encoding, 'replace')
+ excmsg = unicode(ex)
except Exception:
- excmsg = unicode(repr(ex), encoding, 'replace')
- exctype = unicode(ex.__class__.__name__)
+ try:
+ excmsg = unicode(str(ex), encoding, 'replace')
+ except Exception:
+ excmsg = unicode(repr(ex), encoding, 'replace')
+ exctype = ex.__class__.__name__
return u'%s: %s' % (exctype, excmsg)
@@ -462,7 +468,10 @@
for stackentry in traceback.extract_tb(info[2]):
res.append(u'\tFile %s, line %s, function %s' % tuple(stackentry[:3]))
if stackentry[3]:
- res.append(u'\t %s' % stackentry[3].decode('utf-8', 'replace'))
+ data = xml_escape(stackentry[3])
+ if PY2:
+ data = data.decode('utf-8', 'replace')
+ res.append(u'\t %s' % data)
res.append(u'\n')
try:
res.append(u'\t Error: %s\n' % exception)
@@ -496,14 +505,16 @@
u'<b class="function">%s</b>:<br/>'%(
xml_escape(stackentry[0]), stackentry[1], xml_escape(stackentry[2])))
if stackentry[3]:
- string = xml_escape(stackentry[3]).decode('utf-8', 'replace')
+ string = xml_escape(stackentry[3])
+ if PY2:
+ string = string.decode('utf-8', 'replace')
strings.append(u'  %s<br/>\n' % (string))
# add locals info for each entry
try:
local_context = tcbk.tb_frame.f_locals
html_info = []
chars = 0
- for name, value in local_context.iteritems():
+ for name, value in local_context.items():
value = xml_escape(repr(value))
info = u'<span class="name">%s</span>=%s, ' % (name, value)
line_length = len(name) + len(value)
@@ -526,7 +537,9 @@
# csv files / unicode support #################################################
class UnicodeCSVWriter:
- """proxies calls to csv.writer.writerow to be able to deal with unicode"""
+ """proxies calls to csv.writer.writerow to be able to deal with unicode
+
+ Under Python 3, this code no longer encodes anything."""
def __init__(self, wfunc, encoding, **kwargs):
self.writer = csv.writer(self, **kwargs)
@@ -537,9 +550,12 @@
self.wfunc(data)
def writerow(self, row):
+ if PY3:
+ self.writer.writerow(row)
+ return
csvrow = []
for elt in row:
- if isinstance(elt, unicode):
+ if isinstance(elt, text_type):
csvrow.append(elt.encode(self.encoding))
else:
csvrow.append(str(elt))
@@ -559,7 +575,7 @@
def __call__(self, function):
def newfunc(*args, **kwargs):
ret = function(*args, **kwargs)
- if isinstance(ret, basestring):
+ if isinstance(ret, string_types):
return ret[:self.maxsize]
return ret
return newfunc
@@ -568,6 +584,6 @@
def htmlescape(function):
def newfunc(*args, **kwargs):
ret = function(*args, **kwargs)
- assert isinstance(ret, basestring)
+ assert isinstance(ret, string_types)
return xml_escape(ret)
return newfunc
--- a/utils.py Thu Mar 06 15:55:33 2014 +0100
+++ b/utils.py Thu Nov 12 10:52:28 2015 +0100
@@ -33,9 +33,10 @@
from uuid import uuid4
from warnings import warn
from threading import Lock
-from urlparse import urlparse
+from logging import getLogger
-from logging import getLogger
+from six import text_type
+from six.moves.urllib.parse import urlparse
from logilab.mtconverter import xml_escape
from logilab.common.deprecation import deprecated
@@ -100,7 +101,7 @@
"""
def __init__(self, w, tag, closetag=None):
self.written = False
- self.tag = unicode(tag)
+ self.tag = text_type(tag)
self.closetag = closetag
self.w = w
@@ -116,7 +117,7 @@
def __exit__(self, exctype, value, traceback):
if self.written is True:
if self.closetag:
- self.w(unicode(self.closetag))
+ self.w(text_type(self.closetag))
else:
self.w(self.tag.replace('<', '</', 1))
@@ -185,13 +186,13 @@
def __iter__(self):
return repeat(self._item, self._size)
def __getitem__(self, index):
+ if isinstance(index, slice):
+ # XXX could be more efficient, but do we bother?
+ return ([self._item] * self._size)[index]
return self._item
def __delitem__(self, idc):
assert self._size > 0
self._size -= 1
- def __getslice__(self, i, j):
- # XXX could be more efficient, but do we bother?
- return ([self._item] * self._size)[i:j]
def __add__(self, other):
if isinstance(other, RepeatList):
if other._item == self._item:
@@ -208,8 +209,10 @@
if isinstance(other, RepeatList):
return other._size == self._size and other._item == self._item
return self[:] == other
- # py3k future warning "Overriding __eq__ blocks inheritance of __hash__ in 3.x"
- # is annoying but won't go away because we don't want to hash() the repeatlist
+ def __ne__(self, other):
+ return not (self == other)
+ def __hash__(self):
+ raise NotImplementedError
def pop(self, i):
self._size -= 1
@@ -223,11 +226,13 @@
self.tracewrites = tracewrites
super(UStringIO, self).__init__(*args, **kwargs)
- def __nonzero__(self):
+ def __bool__(self):
return True
+ __nonzero__ = __bool__
+
def write(self, value):
- assert isinstance(value, unicode), u"unicode required not %s : %s"\
+ assert isinstance(value, text_type), u"unicode required not %s : %s"\
% (type(value).__name__, repr(value))
if self.tracewrites:
from traceback import format_stack
@@ -553,9 +558,9 @@
def _dict2js(d, predictable=False):
if predictable:
- it = sorted(d.iteritems())
+ it = sorted(d.items())
else:
- it = d.iteritems()
+ it = d.items()
res = [key + ': ' + js_dumps(val, predictable)
for key, val in it]
return '{%s}' % ', '.join(res)
--- a/view.py Thu Mar 06 15:55:33 2014 +0100
+++ b/view.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,12 +18,14 @@
"""abstract views and templates classes for CubicWeb web client"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from io import BytesIO
from warnings import warn
from functools import partial
+from six.moves import range
+
from logilab.common.deprecation import deprecated
from logilab.common.registry import yes
from logilab.mtconverter import xml_escape
@@ -173,7 +175,7 @@
# specific view
if rset.rowcount != 1:
kwargs.setdefault('initargs', self.cw_extra_kwargs)
- for i in xrange(len(rset)):
+ for i in range(len(rset)):
if wrap:
self.w(u'<div class="section">')
self.wview(self.__regid__, rset, row=i, **kwargs)
@@ -213,7 +215,7 @@
return self._cw.build_url('view', vid=self.__regid__)
coltypes = rset.column_types(0)
if len(coltypes) == 1:
- etype = iter(coltypes).next()
+ etype = next(iter(coltypes))
if not self._cw.vreg.schema.eschema(etype).final:
if len(rset) == 1:
entity = rset.get_entity(0, 0)
@@ -281,7 +283,7 @@
else :
etypes = rset.column_types(0)
if len(etypes) == 1:
- etype = iter(etypes).next()
+ etype = next(iter(etypes))
clabel = display_name(self._cw, etype, 'plural')
else :
clabel = u'#[*] (%s)' % vtitle
@@ -394,7 +396,7 @@
if rset is None:
rset = self.cw_rset = self._cw.execute(self.startup_rql())
if rset:
- for i in xrange(len(rset)):
+ for i in range(len(rset)):
self.wview(self.__regid__, rset, row=i, **kwargs)
else:
self.no_entities(**kwargs)
--- a/web/__init__.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/__init__.py Thu Nov 12 10:52:28 2015 +0100
@@ -20,10 +20,9 @@
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
-from urllib import quote as urlquote
-
+from six.moves.urllib.parse import quote as urlquote
from logilab.common.deprecation import deprecated
from cubicweb.web._exceptions import *
--- a/web/_exceptions.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/_exceptions.py Thu Nov 12 10:52:28 2015 +0100
@@ -20,7 +20,7 @@
__docformat__ = "restructuredtext en"
-import httplib
+from six.moves import http_client
from cubicweb._exceptions import *
from cubicweb.utils import json_dumps
@@ -41,7 +41,7 @@
"""base class for publishing related exception"""
def __init__(self, *args, **kwargs):
- self.status = kwargs.pop('status', httplib.OK)
+ self.status = kwargs.pop('status', http_client.OK)
super(PublishException, self).__init__(*args, **kwargs)
class LogOut(PublishException):
@@ -52,7 +52,7 @@
class Redirect(PublishException):
"""raised to redirect the http request"""
- def __init__(self, location, status=httplib.SEE_OTHER):
+ def __init__(self, location, status=http_client.SEE_OTHER):
super(Redirect, self).__init__(status=status)
self.location = location
@@ -71,7 +71,7 @@
"""raised when a request can't be served because of a bad input"""
def __init__(self, *args, **kwargs):
- kwargs.setdefault('status', httplib.BAD_REQUEST)
+ kwargs.setdefault('status', http_client.BAD_REQUEST)
super(RequestError, self).__init__(*args, **kwargs)
@@ -79,14 +79,14 @@
"""raised when an edit request doesn't specify any eid to edit"""
def __init__(self, *args, **kwargs):
- kwargs.setdefault('status', httplib.BAD_REQUEST)
+ kwargs.setdefault('status', http_client.BAD_REQUEST)
super(NothingToEdit, self).__init__(*args, **kwargs)
class ProcessFormError(RequestError):
"""raised when posted data can't be processed by the corresponding field
"""
def __init__(self, *args, **kwargs):
- kwargs.setdefault('status', httplib.BAD_REQUEST)
+ kwargs.setdefault('status', http_client.BAD_REQUEST)
super(ProcessFormError, self).__init__(*args, **kwargs)
class NotFound(RequestError):
@@ -94,13 +94,13 @@
a 404 error should be returned"""
def __init__(self, *args, **kwargs):
- kwargs.setdefault('status', httplib.NOT_FOUND)
+ kwargs.setdefault('status', http_client.NOT_FOUND)
super(NotFound, self).__init__(*args, **kwargs)
class RemoteCallFailed(RequestError):
"""raised when a json remote call fails
"""
- def __init__(self, reason='', status=httplib.INTERNAL_SERVER_ERROR):
+ def __init__(self, reason='', status=http_client.INTERNAL_SERVER_ERROR):
super(RemoteCallFailed, self).__init__(reason, status=status)
self.reason = reason
--- a/web/action.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/action.py Thu Nov 12 10:52:28 2015 +0100
@@ -33,7 +33,7 @@
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from cubicweb import target
from cubicweb.predicates import (partial_relation_possible, match_search_state,
--- a/web/application.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/application.py Thu Nov 12 10:52:28 2015 +0100
@@ -25,7 +25,8 @@
from warnings import warn
import json
-import httplib
+from six import text_type, binary_type
+from six.moves import http_client
from logilab.common.deprecation import deprecated
@@ -68,8 +69,8 @@
def __init__(self, appli):
self.repo = appli.repo
self.vreg = appli.vreg
- self.session_manager = self.vreg['components'].select('sessionmanager',
- repo=self.repo)
+ self.session_manager = self.vreg['sessions'].select('sessionmanager',
+ repo=self.repo)
global SESSION_MANAGER
SESSION_MANAGER = self.session_manager
if self.vreg.config.mode != 'test':
@@ -80,8 +81,8 @@
def reset_session_manager(self):
data = self.session_manager.dump_data()
- self.session_manager = self.vreg['components'].select('sessionmanager',
- repo=self.repo)
+ self.session_manager = self.vreg['sessions'].select('sessionmanager',
+ repo=self.repo)
self.session_manager.restore_data(data)
global SESSION_MANAGER
SESSION_MANAGER = self.session_manager
@@ -256,7 +257,7 @@
# activate realm-based auth
realm = self.vreg.config['realm']
req.set_header('WWW-Authenticate', [('Basic', {'realm' : realm })], raw=False)
- content = ''
+ content = b''
try:
try:
session = self.get_session(req)
@@ -290,7 +291,7 @@
if self.vreg.config['auth-mode'] == 'cookie' and ex.url:
req.headers_out.setHeader('location', str(ex.url))
if ex.status is not None:
- req.status_out = httplib.SEE_OTHER
+ req.status_out = http_client.SEE_OTHER
# When the authentification is handled by http we must
# explicitly ask for authentification to flush current http
# authentification information
@@ -310,23 +311,24 @@
# the request does not use https, redirect to login form
https_url = self.vreg.config['https-url']
if https_url and req.base_url() != https_url:
- req.status_out = httplib.SEE_OTHER
+ req.status_out = http_client.SEE_OTHER
req.headers_out.setHeader('location', https_url + 'login')
else:
# We assume here that in http auth mode the user *May* provide
# Authentification Credential if asked kindly.
if self.vreg.config['auth-mode'] == 'http':
- req.status_out = httplib.UNAUTHORIZED
+ req.status_out = http_client.UNAUTHORIZED
# In the other case (coky auth) we assume that there is no way
# for the user to provide them...
# XXX But WHY ?
else:
- req.status_out = httplib.FORBIDDEN
+ req.status_out = http_client.FORBIDDEN
# If previous error handling already generated a custom content
# do not overwrite it. This is used by LogOut Except
# XXX ensure we don't actually serve content
if not content:
content = self.need_login_content(req)
+ assert isinstance(content, binary_type)
return content
@@ -368,7 +370,7 @@
except cors.CORSPreflight:
# Return directly an empty 200
req.status_out = 200
- result = ''
+ result = b''
except StatusResponse as ex:
warn('[3.16] StatusResponse is deprecated use req.status_out',
DeprecationWarning, stacklevel=2)
@@ -394,12 +396,12 @@
except Unauthorized as ex:
req.data['errmsg'] = req._('You\'re not authorized to access this page. '
'If you think you should, please contact the site administrator.')
- req.status_out = httplib.FORBIDDEN
+ req.status_out = http_client.FORBIDDEN
result = self.error_handler(req, ex, tb=False)
except Forbidden as ex:
req.data['errmsg'] = req._('This action is forbidden. '
'If you think it should be allowed, please contact the site administrator.')
- req.status_out = httplib.FORBIDDEN
+ req.status_out = http_client.FORBIDDEN
result = self.error_handler(req, ex, tb=False)
except (BadRQLQuery, RequestError) as ex:
result = self.error_handler(req, ex, tb=False)
@@ -413,7 +415,7 @@
raise
### Last defense line
except BaseException as ex:
- req.status_out = httplib.INTERNAL_SERVER_ERROR
+ req.status_out = http_client.INTERNAL_SERVER_ERROR
result = self.error_handler(req, ex, tb=True)
finally:
if req.cnx and not commited:
@@ -437,7 +439,7 @@
req.headers_out.setHeader('location', str(ex.location))
assert 300 <= ex.status < 400
req.status_out = ex.status
- return ''
+ return b''
def validation_error_handler(self, req, ex):
ex.translate(req._) # translate messages using ui language
@@ -453,9 +455,9 @@
# messages.
location = req.form['__errorurl'].rsplit('#', 1)[0]
req.headers_out.setHeader('location', str(location))
- req.status_out = httplib.SEE_OTHER
- return ''
- req.status_out = httplib.CONFLICT
+ req.status_out = http_client.SEE_OTHER
+ return b''
+ req.status_out = http_client.CONFLICT
return self.error_handler(req, ex, tb=False)
def error_handler(self, req, ex, tb=False):
@@ -491,14 +493,14 @@
def ajax_error_handler(self, req, ex):
req.set_header('content-type', 'application/json')
- status = httplib.INTERNAL_SERVER_ERROR
+ status = http_client.INTERNAL_SERVER_ERROR
if isinstance(ex, PublishException) and ex.status is not None:
status = ex.status
if req.status_out < 400:
# don't overwrite it if it's already set
req.status_out = status
- json_dumper = getattr(ex, 'dumps', lambda : json.dumps({'reason': unicode(ex)}))
- return json_dumper()
+ json_dumper = getattr(ex, 'dumps', lambda : json.dumps({'reason': text_type(ex)}))
+ return json_dumper().encode('utf-8')
# special case handling
--- a/web/box.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/box.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,7 +18,9 @@
"""abstract box classes for CubicWeb web client"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
+
+from six import add_metaclass
from logilab.mtconverter import xml_escape
from logilab.common.deprecation import class_deprecated, class_renamed
@@ -41,7 +43,7 @@
actions_by_cat.setdefault(action.category, []).append(
(action.title, action) )
for key, values in actions_by_cat.items():
- actions_by_cat[key] = [act for title, act in sorted(values)]
+ actions_by_cat[key] = [act for title, act in sorted(values, key=lambda x: x[0])]
if categories_in_order:
for cat in categories_in_order:
if cat in actions_by_cat:
@@ -53,6 +55,7 @@
# old box system, deprecated ###################################################
+@add_metaclass(class_deprecated)
class BoxTemplate(View):
"""base template for boxes, usually a (contextual) list of possible
actions. Various classes attributes may be used to control the box
@@ -66,7 +69,6 @@
box.render(self.w)
"""
- __metaclass__ = class_deprecated
__deprecation_warning__ = '[3.10] *BoxTemplate classes are deprecated, use *CtxComponent instead (%(cls)s)'
__registry__ = 'ctxcomponents'
--- a/web/captcha.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/captcha.py Thu Nov 12 10:52:28 2015 +0100
@@ -22,7 +22,9 @@
__docformat__ = "restructuredtext en"
from random import randint, choice
-from cStringIO import StringIO
+from io import BytesIO
+
+from six.moves import range
from PIL import Image, ImageFont, ImageDraw, ImageFilter
@@ -51,7 +53,7 @@
draw = ImageDraw.Draw(img)
# draw 100 random colored boxes on the background
x, y = img.size
- for num in xrange(100):
+ for num in range(100):
draw.rectangle((randint(0, x), randint(0, y),
randint(0, x), randint(0, y)),
fill=randint(0, 0xffffff))
@@ -67,7 +69,7 @@
"""
text = u''.join(choice('QWERTYUOPASDFGHJKLZXCVBNM') for i in range(size))
img = pil_captcha(text, fontfile, fontsize)
- out = StringIO()
+ out = BytesIO()
img.save(out, format)
out.seek(0)
return text, out
--- a/web/component.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/component.py Thu Nov 12 10:52:28 2015 +0100
@@ -20,10 +20,12 @@
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from warnings import warn
+from six import PY3, add_metaclass, text_type
+
from logilab.common.deprecation import class_deprecated, class_renamed, deprecated
from logilab.mtconverter import xml_escape
@@ -215,6 +217,9 @@
def __unicode__(self):
return tags.a(self.label, href=self.href, **self.attrs)
+ if PY3:
+ __str__ = __unicode__
+
def render(self, w):
w(tags.a(self.label, href=self.href, **self.attrs))
@@ -425,7 +430,7 @@
@property
def domid(self):
- return domid(self.__regid__) + unicode(self.entity.eid)
+ return domid(self.__regid__) + text_type(self.entity.eid)
def lazy_view_holder(self, w, entity, oid, registry='views'):
"""add a holder and return a URL that may be used to replace this
@@ -498,7 +503,7 @@
args['subject'],
args['object'])
return u'[<a href="javascript: %s" class="action">%s</a>] %s' % (
- xml_escape(unicode(jscall)), label, etarget.view('incontext'))
+ xml_escape(text_type(jscall)), label, etarget.view('incontext'))
def related_boxitems(self, entity):
return [self.box_item(entity, etarget, 'delete_relation', u'-')
@@ -515,7 +520,7 @@
"""returns the list of unrelated entities, using the entity's
appropriate vocabulary function
"""
- skip = set(unicode(e.eid) for e in entity.related(self.rtype, role(self),
+ skip = set(text_type(e.eid) for e in entity.related(self.rtype, role(self),
entities=True))
skip.add(None)
skip.add(INTERNAL_FIELD_VALUE)
@@ -633,7 +638,7 @@
if maydel:
if not js_css_added:
js_css_added = self.add_js_css()
- jscall = unicode(js.ajaxBoxRemoveLinkedEntity(
+ jscall = text_type(js.ajaxBoxRemoveLinkedEntity(
self.__regid__, entity.eid, rentity.eid,
self.fname_remove,
self.removed_msg and _(self.removed_msg)))
@@ -648,7 +653,7 @@
if mayadd:
multiple = self.rdef.role_cardinality(self.role) in '*+'
w(u'<table><tr><td>')
- jscall = unicode(js.ajaxBoxShowSelector(
+ jscall = text_type(js.ajaxBoxShowSelector(
self.__regid__, entity.eid, self.fname_vocabulary,
self.fname_validate, self.added_msg and _(self.added_msg),
_(stdmsgs.BUTTON_OK[0]), _(stdmsgs.BUTTON_CANCEL[0]),
@@ -677,6 +682,7 @@
# old contextual components, deprecated ########################################
+@add_metaclass(class_deprecated)
class EntityVComponent(Component):
"""abstract base class for additinal components displayed in content
headers and footer according to:
@@ -687,7 +693,6 @@
it should be configured using .accepts, .etype, .rtype, .target and
.context class attributes
"""
- __metaclass__ = class_deprecated
__deprecation_warning__ = '[3.10] *VComponent classes are deprecated, use *CtxComponent instead (%(cls)s)'
__registry__ = 'ctxcomponents'
--- a/web/controller.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/controller.py Thu Nov 12 10:52:28 2015 +0100
@@ -19,6 +19,8 @@
__docformat__ = "restructuredtext en"
+from six import PY2
+
from logilab.mtconverter import xml_escape
from logilab.common.registry import yes
from logilab.common.deprecation import deprecated
@@ -87,7 +89,7 @@
rql = req.form.get('rql')
if rql:
req.ensure_ro_rql(rql)
- if not isinstance(rql, unicode):
+ if PY2 and not isinstance(rql, unicode):
rql = unicode(rql, req.encoding)
pp = req.vreg['components'].select_or_none('magicsearch', req)
if pp is not None:
@@ -155,7 +157,7 @@
and '_cwmsgid' in newparams):
# are we here on creation or modification?
if any(eid == self._edited_entity.eid
- for eid in self._cw.data.get('eidmap', {}).itervalues()):
+ for eid in self._cw.data.get('eidmap', {}).values()):
msg = self._cw._('click here to see created entity')
else:
msg = self._cw._('click here to see edited entity')
--- a/web/cors.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/cors.py Thu Nov 12 10:52:28 2015 +0100
@@ -14,7 +14,7 @@
"""
-import urlparse
+from six.moves.urllib.parse import urlsplit
from cubicweb.web import LOGGER
info = LOGGER.info
@@ -37,7 +37,7 @@
In case of non-compliance, no CORS-related header is set.
"""
- base_url = urlparse.urlsplit(req.base_url())
+ base_url = urlsplit(req.base_url())
expected_host = '://'.join((base_url.scheme, base_url.netloc))
if not req.get_header('Origin') or req.get_header('Origin') == expected_host:
# not a CORS request, nothing to do
@@ -50,7 +50,7 @@
process_preflight(req, config)
else: # Simple CORS or actual request
process_simple(req, config)
- except CORSFailed, exc:
+ except CORSFailed as exc:
info('Cross origin resource sharing failed: %s' % exc)
except CORSPreflight:
info('Cross origin resource sharing: valid Preflight request %s')
@@ -101,7 +101,7 @@
if '*' not in allowed_origins and origin not in allowed_origins:
raise CORSFailed('Origin is not allowed')
# bit of sanity check; see "6.3 Security"
- myhost = urlparse.urlsplit(req.base_url()).netloc
+ myhost = urlsplit(req.base_url()).netloc
host = req.get_header('Host')
if host != myhost:
info('cross origin resource sharing detected possible '
--- a/web/facet.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/facet.py Thu Nov 12 10:52:28 2015 +0100
@@ -50,13 +50,15 @@
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from functools import reduce
from warnings import warn
from copy import deepcopy
from datetime import datetime, timedelta
+from six import text_type, string_types
+
from logilab.mtconverter import xml_escape
from logilab.common.graph import has_path
from logilab.common.decorators import cached, cachedproperty
@@ -80,7 +82,7 @@
ptypes = facet.cw_rset.column_types(0)
if len(ptypes) == 1:
return display_name(facet._cw, facet.rtype, form=facet.role,
- context=iter(ptypes).next())
+ context=next(iter(ptypes)))
return display_name(facet._cw, facet.rtype, form=facet.role)
def get_facet(req, facetid, select, filtered_variable):
@@ -133,7 +135,7 @@
or the first variable selected in column 0
"""
if mainvar is None:
- vref = select.selection[0].iget_nodes(nodes.VariableRef).next()
+ vref = next(select.selection[0].iget_nodes(nodes.VariableRef))
return vref.variable
return select.defined_vars[mainvar]
@@ -156,7 +158,7 @@
for term in select.selection[:]:
select.remove_selected(term)
# remove unbound variables which only have some type restriction
- for dvar in list(select.defined_vars.itervalues()):
+ for dvar in list(select.defined_vars.values()):
if not (dvar is filtered_variable or dvar.stinfo['relations']):
select.undefine_variable(dvar)
# global tree config: DISTINCT, LIMIT, OFFSET
@@ -305,7 +307,7 @@
# optional relation
return ovar
if all(rdef.cardinality[cardidx] in '1+'
- for rdef in rschema.rdefs.itervalues()):
+ for rdef in rschema.rdefs.values()):
# mandatory relation without any restriction on the other variable
for orel in ovar.stinfo['relations']:
if rel is orel:
@@ -670,7 +672,7 @@
insert_attr_select_relation(
select, self.filtered_variable, self.rtype, self.role,
self.target_attr, select_target_entity=False)
- values = [unicode(x) for x, in self.rqlexec(select.as_string())]
+ values = [text_type(x) for x, in self.rqlexec(select.as_string())]
except Exception:
self.exception('while computing values for %s', self)
return []
@@ -719,14 +721,14 @@
def rset_vocabulary(self, rset):
if self.i18nable:
- _ = self._cw._
+ tr = self._cw._
else:
- _ = unicode
+ tr = text_type
if self.rql_sort:
- values = [(_(label), eid) for eid, label in rset]
+ values = [(tr(label), eid) for eid, label in rset]
else:
if self.label_vid is None:
- values = [(_(label), eid) for eid, label in rset]
+ values = [(tr(label), eid) for eid, label in rset]
else:
values = [(entity.view(self.label_vid), entity.eid)
for entity in rset.entities()]
@@ -754,7 +756,7 @@
# XXX handle rel is None case in RQLPathFacet?
if self.restr_attr != 'eid':
self.select.set_distinct(True)
- if isinstance(value, basestring):
+ if isinstance(value, string_types):
# only one value selected
if value:
self.select.add_constant_restriction(
@@ -808,7 +810,7 @@
rschema = self._cw.vreg.schema.rschema(self.rtype)
# XXX when called via ajax, no rset to compute possible types
possibletypes = self.cw_rset and self.cw_rset.column_types(0)
- for rdef in rschema.rdefs.itervalues():
+ for rdef in rschema.rdefs.values():
if possibletypes is not None:
if self.role == 'subject':
if rdef.subject not in possibletypes:
@@ -829,13 +831,13 @@
if self._cw.vreg.schema.rschema(self.rtype).final:
return False
if self.role == 'object':
- subj = utils.rqlvar_maker(defined=self.select.defined_vars,
- aliases=self.select.aliases).next()
+ subj = next(utils.rqlvar_maker(defined=self.select.defined_vars,
+ aliases=self.select.aliases))
obj = self.filtered_variable.name
else:
subj = self.filtered_variable.name
- obj = utils.rqlvar_maker(defined=self.select.defined_vars,
- aliases=self.select.aliases).next()
+ obj = next(utils.rqlvar_maker(defined=self.select.defined_vars,
+ aliases=self.select.aliases))
restrictions = []
if self.select.where:
restrictions.append(self.select.where.as_string())
@@ -916,15 +918,13 @@
def rset_vocabulary(self, rset):
if self.i18nable:
- _ = self._cw._
+ tr = self._cw._
else:
- _ = unicode
+ tr = text_type
if self.rql_sort:
- return [(_(value), value) for value, in rset]
- values = [(_(value), value) for value, in rset]
- if self.sortasc:
- return sorted(values)
- return reversed(sorted(values))
+ return [(tr(value), value) for value, in rset]
+ values = [(tr(value), value) for value, in rset]
+ return sorted(values, reverse=not self.sortasc)
class AttributeFacet(RelationAttributeFacet):
@@ -1073,7 +1073,7 @@
assert self.path and isinstance(self.path, (list, tuple)), \
'path should be a list of 3-uples, not %s' % self.path
for part in self.path:
- if isinstance(part, basestring):
+ if isinstance(part, string_types):
part = part.split()
assert len(part) == 3, \
'path should be a list of 3-uples, not %s' % part
@@ -1126,7 +1126,7 @@
cleanup_select(select, self.filtered_variable)
varmap, restrvar = self.add_path_to_select(skiplabel=True)
select.append_selected(nodes.VariableRef(restrvar))
- values = [unicode(x) for x, in self.rqlexec(select.as_string())]
+ values = [text_type(x) for x, in self.rqlexec(select.as_string())]
except Exception:
self.exception('while computing values for %s', self)
return []
@@ -1149,7 +1149,7 @@
varmap = {'X': self.filtered_variable}
actual_filter_variable = None
for part in self.path:
- if isinstance(part, basestring):
+ if isinstance(part, string_types):
part = part.split()
subject, rtype, object = part
if skiplabel and object == self.label_variable:
@@ -1165,7 +1165,7 @@
if len(attrtypes) > 1:
raise Exception('ambigous attribute %s, specify attrtype on %s'
% (rtype, self.__class__))
- self.restr_attr_type = iter(attrtypes).next()
+ self.restr_attr_type = next(iter(attrtypes))
if skipattrfilter:
actual_filter_variable = subject
continue
@@ -1253,7 +1253,7 @@
rset = self._range_rset()
if rset:
minv, maxv = rset[0]
- return [(unicode(minv), minv), (unicode(maxv), maxv)]
+ return [(text_type(minv), minv), (text_type(maxv), maxv)]
return []
def possible_values(self):
@@ -1272,7 +1272,7 @@
def formatvalue(self, value):
"""format `value` before in order to insert it in the RQL query"""
- return unicode(value)
+ return text_type(value)
def infvalue(self, min=False):
if min:
@@ -1373,7 +1373,7 @@
# *list* (see rqlexec implementation)
if rset:
minv, maxv = rset[0]
- return [(unicode(minv), minv), (unicode(maxv), maxv)]
+ return [(text_type(minv), minv), (text_type(maxv), maxv)]
return []
@@ -1392,7 +1392,7 @@
skiplabel=True, skipattrfilter=True)
restrel = None
for part in self.path:
- if isinstance(part, basestring):
+ if isinstance(part, string_types):
part = part.split()
subject, rtype, object = part
if object == self.filter_variable:
@@ -1516,7 +1516,7 @@
if not val or val & mask])
def possible_values(self):
- return [unicode(val) for label, val in self.vocabulary()]
+ return [text_type(val) for label, val in self.vocabulary()]
## html widets ################################################################
@@ -1595,7 +1595,7 @@
if selected:
cssclass += ' facetValueSelected'
w(u'<div class="%s" cubicweb:value="%s">\n'
- % (cssclass, xml_escape(unicode(value))))
+ % (cssclass, xml_escape(text_type(value))))
# If it is overflowed one must add padding to compensate for the vertical
# scrollbar; given current css values, 4 blanks work perfectly ...
padding = u' ' * self.scrollbar_padding_factor if overflow else u''
@@ -1754,7 +1754,7 @@
imgsrc = self._cw.data_url(self.unselected_img)
imgalt = self._cw._('not selected')
w(u'<div class="%s" cubicweb:value="%s">\n'
- % (cssclass, xml_escape(unicode(self.value))))
+ % (cssclass, xml_escape(text_type(self.value))))
w(u'<div>')
w(u'<img src="%s" alt="%s" cubicweb:unselimg="true" /> ' % (imgsrc, imgalt))
w(u'<label class="facetTitle" cubicweb:facetName="%s">%s</label>'
--- a/web/form.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/form.py Thu Nov 12 10:52:28 2015 +0100
@@ -20,6 +20,8 @@
from warnings import warn
+from six import add_metaclass
+
from logilab.common.decorators import iclassmethod
from logilab.common.deprecation import deprecated
@@ -74,8 +76,8 @@
found
"""
+@add_metaclass(metafieldsform)
class Form(AppObject):
- __metaclass__ = metafieldsform
__registry__ = 'forms'
parent_form = None
@@ -120,7 +122,7 @@
extrakw = {}
# search for navigation parameters and customization of existing
# attributes; remaining stuff goes in extrakwargs
- for key, val in kwargs.iteritems():
+ for key, val in kwargs.items():
if key in controller.NAV_FORM_PARAMETERS:
hiddens.append( (key, val) )
elif key == 'redirect_path':
--- a/web/formfields.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/formfields.py Thu Nov 12 10:52:28 2015 +0100
@@ -66,6 +66,8 @@
from warnings import warn
from datetime import datetime, timedelta
+from six import PY2, text_type, string_types
+
from logilab.mtconverter import xml_escape
from logilab.common import nullobject
from logilab.common.date import ustrftime
@@ -231,11 +233,14 @@
def __unicode__(self):
return self.as_string(False)
- def __str__(self):
- return self.as_string(False).encode('UTF8')
+ if PY2:
+ def __str__(self):
+ return self.as_string(False).encode('UTF8')
+ else:
+ __str__ = __unicode__
def __repr__(self):
- return self.as_string(True).encode('UTF8')
+ return self.as_string(True)
def init_widget(self, widget):
if widget is not None:
@@ -279,7 +284,7 @@
return u''
if value is True:
return u'1'
- return unicode(value)
+ return text_type(value)
def get_widget(self, form):
"""return the widget instance associated to this field"""
@@ -381,7 +386,7 @@
assert self.choices is not None
if callable(self.choices):
# pylint: disable=E1102
- if getattr(self.choices, 'im_self', None) is self:
+ if getattr(self.choices, '__self__', None) is self:
vocab = self.choices(form=form, **kwargs)
else:
vocab = self.choices(form=form, field=self, **kwargs)
@@ -794,7 +799,7 @@
if data:
encoding = self.encoding(form)
try:
- form.formvalues[(self, form)] = unicode(data.getvalue(), encoding)
+ form.formvalues[(self, form)] = data.getvalue().decode(encoding)
except UnicodeError:
pass
else:
@@ -815,7 +820,7 @@
def _process_form_value(self, form):
value = form._cw.form.get(self.input_name(form))
- if isinstance(value, unicode):
+ if isinstance(value, text_type):
# file modified using a text widget
return Binary(value.encode(self.encoding(form)))
return super(EditableFileField, self)._process_form_value(form)
@@ -842,7 +847,7 @@
self.widget.attrs.setdefault('size', self.default_text_input_size)
def _ensure_correctly_typed(self, form, value):
- if isinstance(value, basestring):
+ if isinstance(value, string_types):
value = value.strip()
if not value:
return None
@@ -924,7 +929,7 @@
return self.format_single_value(req, 1.234)
def _ensure_correctly_typed(self, form, value):
- if isinstance(value, basestring):
+ if isinstance(value, string_types):
value = value.strip()
if not value:
return None
@@ -946,7 +951,7 @@
def format_single_value(self, req, value):
if value:
value = format_time(value.days * 24 * 3600 + value.seconds)
- return unicode(value)
+ return text_type(value)
return u''
def example_format(self, req):
@@ -956,7 +961,7 @@
return u'20s, 10min, 24h, 4d'
def _ensure_correctly_typed(self, form, value):
- if isinstance(value, basestring):
+ if isinstance(value, string_types):
value = value.strip()
if not value:
return None
@@ -986,14 +991,14 @@
return self.format_single_value(req, datetime.now())
def _ensure_correctly_typed(self, form, value):
- if isinstance(value, basestring):
+ if isinstance(value, string_types):
value = value.strip()
if not value:
return None
try:
value = form._cw.parse_datetime(value, self.etype)
except ValueError as ex:
- raise ProcessFormError(unicode(ex))
+ raise ProcessFormError(text_type(ex))
return value
@@ -1083,7 +1088,7 @@
linkedto = form.linked_to.get((self.name, self.role))
if linkedto:
buildent = form._cw.entity_from_eid
- return [(buildent(eid).view('combobox'), unicode(eid))
+ return [(buildent(eid).view('combobox'), text_type(eid))
for eid in linkedto]
return []
@@ -1095,7 +1100,7 @@
# vocabulary doesn't include current values, add them
if form.edited_entity.has_eid():
rset = form.edited_entity.related(self.name, self.role)
- vocab += [(e.view('combobox'), unicode(e.eid))
+ vocab += [(e.view('combobox'), text_type(e.eid))
for e in rset.entities()]
return vocab
@@ -1129,11 +1134,11 @@
if entity.eid in done:
continue
done.add(entity.eid)
- res.append((entity.view('combobox'), unicode(entity.eid)))
+ res.append((entity.view('combobox'), text_type(entity.eid)))
return res
def format_single_value(self, req, value):
- return unicode(value)
+ return text_type(value)
def process_form_value(self, form):
"""process posted form and return correctly typed value"""
--- a/web/formwidgets.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/formwidgets.py Thu Nov 12 10:52:28 2015 +0100
@@ -99,6 +99,8 @@
from datetime import date
from warnings import warn
+from six import text_type, string_types
+
from logilab.mtconverter import xml_escape
from logilab.common.deprecation import deprecated
from logilab.common.date import todatetime
@@ -282,7 +284,7 @@
"""
posted = form._cw.form
val = posted.get(field.input_name(form, self.suffix))
- if isinstance(val, basestring):
+ if isinstance(val, string_types):
val = val.strip()
return val
@@ -416,7 +418,7 @@
lines = value.splitlines()
linecount = len(lines)
for line in lines:
- linecount += len(line) / self._columns
+ linecount += len(line) // self._columns
attrs.setdefault('cols', self._columns)
attrs.setdefault('rows', min(self._maxrows, linecount + self._minrows))
return tags.textarea(value, name=field.input_name(form, self.suffix),
@@ -474,7 +476,7 @@
options.append(u'</optgroup>')
if not 'size' in attrs:
if self._multiple:
- size = unicode(min(self.default_size, len(vocab) or 1))
+ size = text_type(min(self.default_size, len(vocab) or 1))
else:
size = u'1'
attrs['size'] = size
@@ -706,7 +708,7 @@
else:
value = self.value
attrs = self.attributes(form, field)
- attrs.setdefault('size', unicode(self.default_size))
+ attrs.setdefault('size', text_type(self.default_size))
return tags.input(name=field.input_name(form, self.suffix),
value=value, type='text', **attrs)
@@ -779,13 +781,13 @@
try:
date = todatetime(req.parse_datetime(datestr, 'Date'))
except ValueError as exc:
- raise ProcessFormError(unicode(exc))
+ raise ProcessFormError(text_type(exc))
if timestr is None:
return date
try:
time = req.parse_datetime(timestr, 'Time')
except ValueError as exc:
- raise ProcessFormError(unicode(exc))
+ raise ProcessFormError(text_type(exc))
return date.replace(hour=time.hour, minute=time.minute, second=time.second)
@@ -993,12 +995,12 @@
req = form._cw
values = {}
path = req.form.get(field.input_name(form, 'path'))
- if isinstance(path, basestring):
+ if isinstance(path, string_types):
path = path.strip()
if path is None:
path = u''
fqs = req.form.get(field.input_name(form, 'fqs'))
- if isinstance(fqs, basestring):
+ if isinstance(fqs, string_types):
fqs = fqs.strip() or None
if fqs:
for i, line in enumerate(fqs.split('\n')):
@@ -1009,7 +1011,7 @@
except ValueError:
raise ProcessFormError(req._("wrong query parameter line %s") % (i+1))
# value will be url quoted by build_url_params
- values.setdefault(key.encode(req.encoding), []).append(val)
+ values.setdefault(key, []).append(val)
if not values:
return path
return u'%s?%s' % (path, req.build_url_params(**values))
--- a/web/htmlwidgets.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/htmlwidgets.py Thu Nov 12 10:52:28 2015 +0100
@@ -24,6 +24,9 @@
import random
from math import floor
+from six import add_metaclass
+from six.moves import range
+
from logilab.mtconverter import xml_escape
from logilab.common.deprecation import class_deprecated
@@ -115,9 +118,9 @@
self.w(u'</div>')
+@add_metaclass(class_deprecated)
class SideBoxWidget(BoxWidget):
"""default CubicWeb's sidebox widget"""
- __metaclass__ = class_deprecated
__deprecation_warning__ = '[3.10] class %(cls)s is deprecated'
title_class = u'sideBoxTitle'
@@ -207,9 +210,9 @@
self.w(u'</ul></div></div>')
+@add_metaclass(class_deprecated)
class BoxField(HTMLWidget):
"""couples label / value meant to be displayed in a box"""
- __metaclass__ = class_deprecated
__deprecation_warning__ = '[3.10] class %(cls)s is deprecated'
def __init__(self, label, value):
self.label = label
@@ -220,18 +223,19 @@
u'<span class="value">%s</span></div></li>'
% (self.label, self.value))
+
+@add_metaclass(class_deprecated)
class BoxSeparator(HTMLWidget):
"""a menu separator"""
- __metaclass__ = class_deprecated
__deprecation_warning__ = '[3.10] class %(cls)s is deprecated'
def _render(self):
self.w(u'</ul><hr class="boxSeparator"/><ul>')
+@add_metaclass(class_deprecated)
class BoxLink(HTMLWidget):
"""a link in a box"""
- __metaclass__ = class_deprecated
__deprecation_warning__ = '[3.10] class %(cls)s is deprecated'
def __init__(self, href, label, _class='', title='', ident='', escape=False):
self.href = href
@@ -252,9 +256,9 @@
self.w(u'<li class="%s">%s</li>\n' % (self._class, link))
+@add_metaclass(class_deprecated)
class BoxHtml(HTMLWidget):
"""a form in a box"""
- __metaclass__ = class_deprecated
__deprecation_warning__ = '[3.10] class %(cls)s is deprecated'
def __init__(self, rawhtml):
self.rawhtml = rawhtml
@@ -339,17 +343,17 @@
self.w(u'<thead>')
self.w(u'<tr class="header">')
for column in self.columns:
- attrs = ('%s="%s"' % (name, value) for name, value in column.cell_attrs.iteritems())
+ attrs = ('%s="%s"' % (name, value) for name, value in column.cell_attrs.items())
self.w(u'<th %s>%s</th>' % (' '.join(attrs), column.name or u''))
self.w(u'</tr>')
self.w(u'</thead><tbody>')
- for rowindex in xrange(len(self.model.get_rows())):
+ for rowindex in range(len(self.model.get_rows())):
klass = (rowindex%2==1) and 'odd' or 'even'
self.w(u'<tr class="%s" %s>' % (klass, self.highlight))
for column, sortvalue in self.itercols(rowindex):
attrs = dict(column.cell_attrs)
attrs["cubicweb:sortvalue"] = sortvalue
- attrs = ('%s="%s"' % (name, value) for name, value in attrs.iteritems())
+ attrs = ('%s="%s"' % (name, value) for name, value in attrs.items())
self.w(u'<td %s>' % (' '.join(attrs)))
for cellvid, colindex in column.cellrenderers:
self.model.render_cell(cellvid, rowindex, colindex, w=self.w)
--- a/web/http_headers.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/http_headers.py Thu Nov 12 10:52:28 2015 +0100
@@ -2,11 +2,14 @@
# http://twistedmatrix.com/trac/wiki/TwistedWeb2
-import types, time
+import time
from calendar import timegm
import base64
import re
-import urlparse
+
+from six import string_types
+from six.moves.urllib.parse import urlparse
+
def dashCapitalize(s):
''' Capitalize a string, making sure to treat - as a word seperator '''
@@ -295,9 +298,9 @@
cur = cur+1
if qpair:
- raise ValueError, "Missing character after '\\'"
+ raise ValueError("Missing character after '\\'")
if quoted:
- raise ValueError, "Missing end quote"
+ raise ValueError("Missing end quote")
if start != cur:
if foldCase:
@@ -347,7 +350,7 @@
##### parser utilities:
def checkSingleToken(tokens):
if len(tokens) != 1:
- raise ValueError, "Expected single token, not %s." % (tokens,)
+ raise ValueError("Expected single token, not %s." % (tokens,))
return tokens[0]
def parseKeyValue(val):
@@ -355,11 +358,11 @@
return val[0], None
elif len(val) == 3 and val[1] == Token('='):
return val[0], val[2]
- raise ValueError, "Expected key or key=value, but got %s." % (val,)
+ raise ValueError("Expected key or key=value, but got %s." % (val,))
def parseArgs(field):
args = split(field, Token(';'))
- val = args.next()
+ val = next(args)
args = [parseKeyValue(arg) for arg in args]
return val, args
@@ -380,7 +383,7 @@
def unique(seq):
'''if seq is not a string, check it's a sequence of one element and return it'''
- if isinstance(seq, basestring):
+ if isinstance(seq, string_types):
return seq
if len(seq) != 1:
raise ValueError('single value required, not %s' % seq)
@@ -398,7 +401,7 @@
"""Ensure origin is a valid URL-base stuff, or null"""
if origin == 'null':
return origin
- p = urlparse.urlparse(origin)
+ p = urlparse(origin)
if p.params or p.query or p.username or p.path not in ('', '/'):
raise ValueError('Incorrect Accept-Control-Allow-Origin value %s' % origin)
if p.scheme not in ('http', 'https'):
@@ -452,14 +455,15 @@
"""
if (value in (True, 1) or
- isinstance(value, basestring) and value.lower() == 'true'):
+ isinstance(value, string_types) and value.lower() == 'true'):
return 'true'
if (value in (False, 0) or
- isinstance(value, basestring) and value.lower() == 'false'):
+ isinstance(value, string_types) and value.lower() == 'false'):
return 'false'
raise ValueError("Invalid true/false header value: %s" % value)
class MimeType(object):
+ @classmethod
def fromString(klass, mimeTypeString):
"""Generate a MimeType object from the given string.
@@ -469,8 +473,6 @@
"""
return DefaultHTTPHandler.parse('content-type', [mimeTypeString])
- fromString = classmethod(fromString)
-
def __init__(self, mediaType, mediaSubtype, params={}, **kwargs):
"""
@type mediaType: C{str}
@@ -499,14 +501,14 @@
return "MimeType(%r, %r, %r)" % (self.mediaType, self.mediaSubtype, self.params)
def __hash__(self):
- return hash(self.mediaType)^hash(self.mediaSubtype)^hash(tuple(self.params.iteritems()))
+ return hash(self.mediaType)^hash(self.mediaSubtype)^hash(tuple(self.params.items()))
##### Specific header parsers.
def parseAccept(field):
type, args = parseArgs(field)
if len(type) != 3 or type[1] != Token('/'):
- raise ValueError, "MIME Type "+str(type)+" invalid."
+ raise ValueError("MIME Type "+str(type)+" invalid.")
# okay, this spec is screwy. A 'q' parameter is used as the separator
# between MIME parameters and (as yet undefined) additional HTTP
@@ -569,7 +571,7 @@
type, args = parseArgs(header)
if len(type) != 3 or type[1] != Token('/'):
- raise ValueError, "MIME Type "+str(type)+" invalid."
+ raise ValueError("MIME Type "+str(type)+" invalid.")
args = [(kv[0].lower(), kv[1]) for kv in args]
@@ -730,7 +732,7 @@
out ="%s/%s"%(mimeType.mediaType, mimeType.mediaSubtype)
if mimeType.params:
- out+=';'+generateKeyValues(mimeType.params.iteritems())
+ out+=';'+generateKeyValues(mimeType.params.items())
if q != 1.0:
out+=(';q=%.3f' % (q,)).rstrip('0').rstrip('.')
@@ -766,7 +768,8 @@
v = [field.strip().lower() for field in v.split(',')]
return k, v
-def generateCacheControl((k, v)):
+def generateCacheControl(args):
+ k, v = args
if v is None:
return str(k)
else:
@@ -833,7 +836,7 @@
def generateContentType(mimeType):
out = "%s/%s" % (mimeType.mediaType, mimeType.mediaSubtype)
if mimeType.params:
- out += ';' + generateKeyValues(mimeType.params.iteritems())
+ out += ';' + generateKeyValues(mimeType.params.items())
return out
def generateIfRange(dateOrETag):
@@ -854,7 +857,7 @@
try:
l = []
- for k, v in dict(challenge).iteritems():
+ for k, v in dict(challenge).items():
l.append("%s=%s" % (k, quoteString(v)))
_generated.append("%s %s" % (scheme, ", ".join(l)))
@@ -864,7 +867,7 @@
return _generated
def generateAuthorization(seq):
- return [' '.join(seq)]
+ return [' '.join(str(v) for v in seq)]
####
@@ -1326,10 +1329,10 @@
self._headers = {}
self.handler = handler
if headers is not None:
- for key, value in headers.iteritems():
+ for key, value in headers.items():
self.setHeader(key, value)
if rawHeaders is not None:
- for key, value in rawHeaders.iteritems():
+ for key, value in rawHeaders.items():
self.setRawHeaders(key, value)
def _setRawHeaders(self, headers):
@@ -1458,7 +1461,7 @@
"""Return an iterator of key, value pairs of all headers
contained in this object, as strings. The keys are capitalized
in canonical capitalization."""
- for k, v in self._raw_headers.iteritems():
+ for k, v in self._raw_headers.items():
if v is _RecalcNeeded:
v = self._toRaw(k)
yield self.canonicalNameCaps(k), v
@@ -1480,7 +1483,7 @@
is strictly an error, but we're nice.).
"""
-iteritems = lambda x: x.iteritems()
+iteritems = lambda x: x.items()
parser_general_headers = {
--- a/web/propertysheet.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/propertysheet.py Thu Nov 12 10:52:28 2015 +0100
@@ -57,7 +57,9 @@
def load(self, fpath):
scriptglobals = self.context.copy()
scriptglobals['__file__'] = fpath
- execfile(fpath, scriptglobals, self)
+ with open(fpath, 'rb') as fobj:
+ code = compile(fobj.read(), fpath, 'exec')
+ exec(code, scriptglobals, self)
for name, type in TYPE_CHECKS:
if name in self:
if not isinstance(self[name], type):
@@ -67,10 +69,10 @@
self._ordered_propfiles.append(fpath)
def need_reload(self):
- for rid, (adirectory, rdirectory, mtime) in self._cache.items():
+ for rid, (adirectory, rdirectory, mtime) in list(self._cache.items()):
if os.stat(osp.join(rdirectory, rid)).st_mtime > mtime:
del self._cache[rid]
- for fpath, mtime in self._propfile_mtime.iteritems():
+ for fpath, mtime in self._propfile_mtime.items():
if os.stat(fpath).st_mtime > mtime:
return True
return False
@@ -96,7 +98,7 @@
if not osp.exists(rcachedir):
os.makedirs(rcachedir)
sourcefile = osp.join(rdirectory, rid)
- content = file(sourcefile).read()
+ content = open(sourcefile).read()
# XXX replace % not followed by a paren by %% to avoid having to do
# this in the source css file ?
try:
@@ -105,7 +107,7 @@
self.error("can't process %s/%s: %s", rdirectory, rid, ex)
adirectory = rdirectory
else:
- stream = file(cachefile, 'w')
+ stream = open(cachefile, 'w')
stream.write(content)
stream.close()
adirectory = self._cache_directory
--- a/web/request.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/request.py Thu Nov 12 10:52:28 2015 +0100
@@ -22,15 +22,16 @@
import time
import random
import base64
-import urllib
-from StringIO import StringIO
from hashlib import sha1 # pylint: disable=E0611
-from Cookie import SimpleCookie
from calendar import timegm
from datetime import date, datetime
-from urlparse import urlsplit
-import httplib
from warnings import warn
+from io import BytesIO
+
+from six import PY2, binary_type, text_type, string_types
+from six.moves import http_client
+from six.moves.urllib.parse import urlsplit, quote as urlquote
+from six.moves.http_cookies import SimpleCookie
from rql.utils import rqlvar_maker
@@ -51,7 +52,7 @@
_MARKER = object()
def build_cb_uid(seed):
- sha = sha1('%s%s%s' % (time.time(), seed, random.random()))
+ sha = sha1(('%s%s%s' % (time.time(), seed, random.random())).encode('ascii'))
return 'cb_%s' % (sha.hexdigest())
@@ -137,12 +138,12 @@
#: received headers
self._headers_in = Headers()
if headers is not None:
- for k, v in headers.iteritems():
+ for k, v in headers.items():
self._headers_in.addRawHeader(k, v)
#: form parameters
self.setup_params(form)
#: received body
- self.content = StringIO()
+ self.content = BytesIO()
# prepare output header
#: Header used for the final response
self.headers_out = Headers()
@@ -250,12 +251,13 @@
if params is None:
return
encoding = self.encoding
- for param, val in params.iteritems():
+ for param, val in params.items():
if isinstance(val, (tuple, list)):
- val = [unicode(x, encoding) for x in val]
+ if PY2:
+ val = [unicode(x, encoding) for x in val]
if len(val) == 1:
val = val[0]
- elif isinstance(val, str):
+ elif PY2 and isinstance(val, str):
val = unicode(val, encoding)
if param in self.no_script_form_params and val:
val = self.no_script_form_param(param, val)
@@ -317,7 +319,7 @@
return None
def set_message(self, msg):
- assert isinstance(msg, unicode)
+ assert isinstance(msg, text_type)
self.reset_message()
self._msg = msg
@@ -330,7 +332,7 @@
def set_redirect_message(self, msg):
# TODO - this should probably be merged with append_to_redirect_message
- assert isinstance(msg, unicode)
+ assert isinstance(msg, text_type)
msgid = self.redirect_message_id()
self.session.data[msgid] = msg
return msgid
@@ -437,7 +439,7 @@
eids = form['eid']
except KeyError:
raise NothingToEdit(self._('no selected entities'))
- if isinstance(eids, basestring):
+ if isinstance(eids, string_types):
eids = (eids,)
for peid in eids:
if withtype:
@@ -569,18 +571,18 @@
header = [disposition]
unicode_filename = None
try:
- ascii_filename = filename.encode('ascii')
+ ascii_filename = filename.encode('ascii').decode('ascii')
except UnicodeEncodeError:
# fallback filename for very old browser
unicode_filename = filename
- ascii_filename = filename.encode('ascii', 'ignore')
+ ascii_filename = filename.encode('ascii', 'ignore').decode('ascii')
# escape " and \
# see http://greenbytes.de/tech/tc2231/#attwithfilenameandextparamescaped
ascii_filename = ascii_filename.replace('\x5c', r'\\').replace('"', r'\"')
header.append('filename="%s"' % ascii_filename)
if unicode_filename is not None:
# encoded filename according RFC5987
- urlquoted_filename = urllib.quote(unicode_filename.encode('utf-8'), '')
+ urlquoted_filename = urlquote(unicode_filename.encode('utf-8'), '')
header.append("filename*=utf-8''" + urlquoted_filename)
self.set_header('content-disposition', ';'.join(header))
@@ -596,7 +598,7 @@
:param localfile: if True, the default data dir prefix is added to the
JS filename
"""
- if isinstance(jsfiles, basestring):
+ if isinstance(jsfiles, string_types):
jsfiles = (jsfiles,)
for jsfile in jsfiles:
if localfile:
@@ -616,7 +618,7 @@
the css inclusion. cf:
http://msdn.microsoft.com/en-us/library/ms537512(VS.85).aspx
"""
- if isinstance(cssfiles, basestring):
+ if isinstance(cssfiles, string_types):
cssfiles = (cssfiles,)
if ieonly:
if self.ie_browser():
@@ -738,9 +740,9 @@
# overwrite headers_out to forge a brand new not-modified response
self.headers_out = self._forge_cached_headers()
if self.http_method() in ('HEAD', 'GET'):
- self.status_out = httplib.NOT_MODIFIED
+ self.status_out = http_client.NOT_MODIFIED
else:
- self.status_out = httplib.PRECONDITION_FAILED
+ self.status_out = http_client.PRECONDITION_FAILED
# XXX replace by True once validate_cache bw compat method is dropped
return self.status_out
# XXX replace by False once validate_cache bw compat method is dropped
@@ -800,7 +802,7 @@
def header_accept_language(self):
"""returns an ordered list of preferred languages"""
acceptedlangs = self.get_header('Accept-Language', raw=False) or {}
- for lang, _ in sorted(acceptedlangs.iteritems(), key=lambda x: x[1],
+ for lang, _ in sorted(acceptedlangs.items(), key=lambda x: x[1],
reverse=True):
lang = lang.split('-')[0]
yield lang
@@ -844,7 +846,7 @@
scheme = scheme.lower()
try:
assert scheme == "basic"
- user, passwd = base64.decodestring(rest).split(":", 1)
+ user, passwd = base64.decodestring(rest.encode('ascii')).split(b":", 1)
# XXX HTTP header encoding: use email.Header?
return user.decode('UTF8'), passwd
except Exception as ex:
@@ -966,8 +968,10 @@
def __getattribute__(self, attr):
raise AuthenticationError()
- def __nonzero__(self):
+ def __bool__(self):
return False
+
+ __nonzero__ = __bool__
class _MockAnonymousSession(object):
sessionid = 'thisisnotarealsession'
@@ -1023,8 +1027,8 @@
self.set_language(lang)
except KeyError:
# this occurs usually during test execution
- self._ = self.__ = unicode
- self.pgettext = lambda x, y: unicode(y)
+ self._ = self.__ = text_type
+ self.pgettext = lambda x, y: text_type(y)
entity_metas = _cnx_func('entity_metas')
source_defs = _cnx_func('source_defs')
--- a/web/schemaviewer.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/schemaviewer.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,7 +18,9 @@
"""an helper class to display CubicWeb schema using ureports"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
+
+from six import string_types
from logilab.common.ureports import Section, Title, Table, Link, Span, Text
@@ -218,7 +220,7 @@
elif prop == 'constraints':
val = ', '.join([c.expression for c in val])
elif isinstance(val, dict):
- for key, value in val.iteritems():
+ for key, value in val.items():
if isinstance(value, (list, tuple)):
val[key] = ', '.join(sorted( str(v) for v in value))
val = str(val)
@@ -226,7 +228,7 @@
elif isinstance(val, (list, tuple)):
val = sorted(val)
val = ', '.join(str(v) for v in val)
- elif val and isinstance(val, basestring):
+ elif val and isinstance(val, string_types):
val = _(val)
else:
val = str(val)
--- a/web/test/data/views.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/test/data/views.py Thu Nov 12 10:52:28 2015 +0100
@@ -42,10 +42,10 @@
"""
try:
result_dict = {}
- for key, value in self._cw.form.iteritems():
+ for key, value in self._cw.form.items():
result_dict[key] = _recursive_replace_stream_by_content(value)
return result_dict
- except Exception, ex:
+ except Exception as ex:
import traceback as tb
tb.print_exc(ex)
--- a/web/test/jstests/ajax_url2.html Thu Mar 06 15:55:33 2014 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,7 +0,0 @@
-<div id="ajaxroot">
- <div class="ajaxHtmlHead">
- <script src="http://foo.js" type="text/javascript"> </script>
- <link rel="stylesheet" type="text/css" media="all" href="qunit.css" />
- </div>
- <h1>Hello</h1>
-</div>
--- a/web/test/test_jscript.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/test/test_jscript.py Thu Nov 12 10:52:28 2015 +0100
@@ -28,11 +28,6 @@
"../../web/data/cubicweb.compat.js",
"../../web/data/cubicweb.htmlhelpers.js",
"../../web/data/cubicweb.ajax.js",
- ), (
- "jstests/ajax_url0.html",
- "jstests/ajax_url1.html",
- "jstests/ajax_url2.html",
- "jstests/ajaxresult.json",
),
),
)
--- a/web/test/test_views.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/test/test_views.py Thu Nov 12 10:52:28 2015 +0100
@@ -50,19 +50,19 @@
composite entity
"""
with self.admin_access.web_request() as req:
- rset = req.execute('CWUser X WHERE X login "admin"')
+ rset = req.execute(u'CWUser X WHERE X login "admin"')
self.view('copy', rset, req=req)
def test_sortable_js_added(self):
with self.admin_access.web_request() as req:
# sortable.js should not be included by default
rset = req.execute('CWUser X')
- self.assertNotIn('jquery.tablesorter.js', self.view('oneline', rset, req=req).source)
+ self.assertNotIn(b'jquery.tablesorter.js', self.view('oneline', rset, req=req).source)
with self.admin_access.web_request() as req:
# but should be included by the tableview
rset = req.execute('Any P,F,S LIMIT 1 WHERE P is CWUser, P firstname F, P surname S')
- self.assertIn('jquery.tablesorter.js', self.view('table', rset, req=req).source)
+ self.assertIn(b'jquery.tablesorter.js', self.view('table', rset, req=req).source)
def test_js_added_only_once(self):
with self.admin_access.web_request() as req:
@@ -70,14 +70,14 @@
self.vreg.register(SomeView)
rset = req.execute('CWUser X')
source = self.view('someview', rset, req=req).source
- self.assertEqual(source.count('spam.js'), 1)
+ self.assertEqual(source.count(b'spam.js'), 1)
def test_unrelateddivs(self):
with self.admin_access.client_cnx() as cnx:
group = cnx.create_entity('CWGroup', name=u'R&D')
cnx.commit()
with self.admin_access.web_request(relation='in_group_subject') as req:
- rset = req.execute('Any X WHERE X is CWUser, X login "admin"')
+ rset = req.execute(u'Any X WHERE X is CWUser, X login "admin"')
self.view('unrelateddivs', rset, req=req)
--- a/web/test/unittest_application.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/test/unittest_application.py Thu Nov 12 10:52:28 2015 +0100
@@ -17,8 +17,11 @@
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""unit tests for cubicweb.web.application"""
-import base64, Cookie
-import httplib
+import base64
+
+from six import text_type
+from six.moves import http_client
+from six.moves.http_cookies import SimpleCookie
from logilab.common.testlib import TestCase, unittest_main
from logilab.common.decorators import clear_cache, classproperty
@@ -180,7 +183,7 @@
def test_publish_validation_error(self):
with self.admin_access.web_request() as req:
user = self.user(req)
- eid = unicode(user.eid)
+ eid = text_type(user.eid)
req.form = {
'eid': eid,
'__type:'+eid: 'CWUser', '_cw_entity_fields:'+eid: 'login-subject',
@@ -267,14 +270,14 @@
with self.admin_access.web_request(vid='test.ajax.error') as req:
req.ajax_request = True
page = app.handle_request(req, '')
- self.assertEqual(httplib.INTERNAL_SERVER_ERROR,
+ self.assertEqual(http_client.INTERNAL_SERVER_ERROR,
req.status_out)
def _test_cleaned(self, kwargs, injected, cleaned):
with self.admin_access.web_request(**kwargs) as req:
page = self.app_handle_request(req, 'view')
- self.assertNotIn(injected, page)
- self.assertIn(cleaned, page)
+ self.assertNotIn(injected.encode('ascii'), page)
+ self.assertIn(cleaned.encode('ascii'), page)
def test_nonregr_script_kiddies(self):
"""test against current script injection"""
@@ -314,8 +317,8 @@
self.app.handle_request(req, 'login')
self.assertEqual(401, req.status_out)
clear_cache(req, 'get_authorization')
- authstr = base64.encodestring('%s:%s' % (self.admlogin, self.admpassword))
- req.set_request_header('Authorization', 'basic %s' % authstr)
+ authstr = base64.encodestring(('%s:%s' % (self.admlogin, self.admpassword)).encode('ascii'))
+ req.set_request_header('Authorization', 'basic %s' % authstr.decode('ascii'))
self.assertAuthSuccess(req, origsession)
self.assertRaises(LogOut, self.app_handle_request, req, 'logout')
self.assertEqual(len(self.open_sessions), 0)
@@ -328,8 +331,8 @@
except Redirect as redir:
self.fail('anonymous user should get login form')
clear_cache(req, 'get_authorization')
- self.assertIn('__login', form)
- self.assertIn('__password', form)
+ self.assertIn(b'__login', form)
+ self.assertIn(b'__password', form)
self.assertFalse(req.cnx) # Mock cnx are False
req.form['__login'] = self.admlogin
req.form['__password'] = self.admpassword
@@ -361,7 +364,7 @@
def _reset_cookie(self, req):
# preparing the suite of the test
# set session id in cookie
- cookie = Cookie.SimpleCookie()
+ cookie = SimpleCookie()
sessioncookie = self.app.session_handler.session_cookie(req)
cookie[sessioncookie] = req.session.sessionid
req.set_request_header('Cookie', cookie[sessioncookie].OutputString(),
@@ -390,11 +393,11 @@
def test_http_auth_anon_allowed(self):
req, origsession = self.init_authentication('http', 'anon')
self._test_auth_anon(req)
- authstr = base64.encodestring('toto:pouet')
- req.set_request_header('Authorization', 'basic %s' % authstr)
+ authstr = base64.encodestring(b'toto:pouet')
+ req.set_request_header('Authorization', 'basic %s' % authstr.decode('ascii'))
self._test_anon_auth_fail(req)
- authstr = base64.encodestring('%s:%s' % (self.admlogin, self.admpassword))
- req.set_request_header('Authorization', 'basic %s' % authstr)
+ authstr = base64.encodestring(('%s:%s' % (self.admlogin, self.admpassword)).encode('ascii'))
+ req.set_request_header('Authorization', 'basic %s' % authstr.decode('ascii'))
self.assertAuthSuccess(req, origsession)
self.assertRaises(LogOut, self.app_handle_request, req, 'logout')
self.assertEqual(len(self.open_sessions), 0)
--- a/web/test/unittest_facet.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/test/unittest_facet.py Thu Nov 12 10:52:28 2015 +0100
@@ -70,8 +70,8 @@
def test_relation_optional_rel(self):
with self.admin_access.web_request() as req:
- rset = req.cnx.execute('Any X,GROUP_CONCAT(GN) GROUPBY X '
- 'WHERE X in_group G?, G name GN, NOT G name "users"')
+ rset = req.cnx.execute(u'Any X,GROUP_CONCAT(GN) GROUPBY X '
+ 'WHERE X in_group G?, G name GN, NOT G name "users"')
rqlst = rset.syntax_tree().copy()
select = rqlst.children[0]
filtered_variable, baserql = facet.init_facets(rset, select)
@@ -87,18 +87,18 @@
self.assertEqual(f.vocabulary(),
[(u'guests', guests), (u'managers', managers)])
# ensure rqlst is left unmodified
- self.assertEqual(rqlst.as_string(), "DISTINCT Any WHERE X in_group G?, G name GN, NOT G name 'users'")
+ self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X in_group G?, G name GN, NOT G name "users"')
#rqlst = rset.syntax_tree()
self.assertEqual(sorted(f.possible_values()),
[str(guests), str(managers)])
# ensure rqlst is left unmodified
- self.assertEqual(rqlst.as_string(), "DISTINCT Any WHERE X in_group G?, G name GN, NOT G name 'users'")
+ self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X in_group G?, G name GN, NOT G name "users"')
req.form[f.__regid__] = str(guests)
f.add_rql_restrictions()
# selection is cluttered because rqlst has been prepared for facet (it
# is not in real life)
self.assertEqual(f.select.as_string(),
- "DISTINCT Any WHERE X in_group G?, G name GN, NOT G name 'users', X in_group D, D eid %s" % guests)
+ 'DISTINCT Any WHERE X in_group G?, G name GN, NOT G name "users", X in_group D, D eid %s' % guests)
def test_relation_no_relation_1(self):
with self.admin_access.web_request() as req:
@@ -141,12 +141,12 @@
['guests', 'managers'])
# ensure rqlst is left unmodified
self.assertEqual(f.select.as_string(), 'DISTINCT Any WHERE X is CWUser')
- f._cw.form[f.__regid__] = 'guests'
+ f._cw.form[f.__regid__] = u'guests'
f.add_rql_restrictions()
# selection is cluttered because rqlst has been prepared for facet (it
# is not in real life)
self.assertEqual(f.select.as_string(),
- "DISTINCT Any WHERE X is CWUser, X in_group E, E name 'guests'")
+ 'DISTINCT Any WHERE X is CWUser, X in_group E, E name "guests"')
def test_hasrelation(self):
with self.admin_access.web_request() as req:
@@ -207,12 +207,12 @@
['admin', 'anon'])
# ensure rqlst is left unmodified
self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser')
- req.form[f.__regid__] = 'admin'
+ req.form[f.__regid__] = u'admin'
f.add_rql_restrictions()
# selection is cluttered because rqlst has been prepared for facet (it
# is not in real life)
self.assertEqual(f.select.as_string(),
- "DISTINCT Any WHERE X is CWUser, X login 'admin'")
+ 'DISTINCT Any WHERE X is CWUser, X login "admin"')
def test_bitfield(self):
with self.admin_access.web_request() as req:
@@ -310,12 +310,12 @@
self.assertEqual(f.possible_values(), ['admin',])
# ensure rqlst is left unmodified
self.assertEqual(rqlst.as_string(), 'DISTINCT Any WHERE X is CWUser')
- req.form[f.__regid__] = 'admin'
+ req.form[f.__regid__] = u'admin'
f.add_rql_restrictions()
# selection is cluttered because rqlst has been prepared for facet (it
# is not in real life)
self.assertEqual(f.select.as_string(),
- "DISTINCT Any WHERE X is CWUser, X created_by G, G owned_by H, H login 'admin'")
+ 'DISTINCT Any WHERE X is CWUser, X created_by G, G owned_by H, H login "admin"')
def test_rql_path_check_filter_label_variable(self):
with self.admin_access.web_request() as req:
@@ -359,13 +359,13 @@
def prepareg_aggregat_rqlst(self, req):
return self.prepare_rqlst(req,
- 'Any 1, COUNT(X) WHERE X is CWUser, X creation_date XD, '
- 'X modification_date XM, Y creation_date YD, Y is CWGroup '
- 'HAVING DAY(XD)>=DAY(YD) AND DAY(XM)<=DAY(YD)', 'X',
- expected_baserql='Any 1,COUNT(X) WHERE X is CWUser, X creation_date XD, '
+ u'Any 1, COUNT(X) WHERE X is CWUser, X creation_date XD, '
+ 'X modification_date XM, Y creation_date YD, Y is CWGroup '
+ 'HAVING DAY(XD)>=DAY(YD) AND DAY(XM)<=DAY(YD)', 'X',
+ expected_baserql=u'Any 1,COUNT(X) WHERE X is CWUser, X creation_date XD, '
'X modification_date XM, Y creation_date YD, Y is CWGroup '
'HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)',
- expected_preparedrql='DISTINCT Any WHERE X is CWUser, X creation_date XD, '
+ expected_preparedrql=u'DISTINCT Any WHERE X is CWUser, X creation_date XD, '
'X modification_date XM, Y creation_date YD, Y is CWGroup '
'HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)')
@@ -390,13 +390,13 @@
filtered_variable=filtered_variable)
self.assertEqual(f.vocabulary(), [(u'admin', u'admin')])
self.assertEqual(f.possible_values(), ['admin'])
- req.form[f.__regid__] = 'admin'
+ req.form[f.__regid__] = u'admin'
f.add_rql_restrictions()
self.assertEqual(f.select.as_string(),
- "DISTINCT Any WHERE X is CWUser, X creation_date XD, "
- "X modification_date XM, Y creation_date YD, Y is CWGroup, "
- "X created_by G, G owned_by H, H login 'admin' "
- "HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)")
+ 'DISTINCT Any WHERE X is CWUser, X creation_date XD, '
+ 'X modification_date XM, Y creation_date YD, Y is CWGroup, '
+ 'X created_by G, G owned_by H, H login "admin" '
+ 'HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)')
def test_aggregat_query_attribute(self):
with self.admin_access.web_request() as req:
@@ -409,12 +409,12 @@
[(u'admin', u'admin'), (u'anon', u'anon')])
self.assertEqual(f.possible_values(),
['admin', 'anon'])
- req.form[f.__regid__] = 'admin'
+ req.form[f.__regid__] = u'admin'
f.add_rql_restrictions()
self.assertEqual(f.select.as_string(),
- "DISTINCT Any WHERE X is CWUser, X creation_date XD, "
- "X modification_date XM, Y creation_date YD, Y is CWGroup, X login 'admin' "
- "HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)")
+ 'DISTINCT Any WHERE X is CWUser, X creation_date XD, '
+ 'X modification_date XM, Y creation_date YD, Y is CWGroup, X login "admin" '
+ 'HAVING DAY(XD) >= DAY(YD), DAY(XM) <= DAY(YD)')
if __name__ == '__main__':
from logilab.common.testlib import unittest_main
--- a/web/test/unittest_form.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/test/unittest_form.py Thu Nov 12 10:52:28 2015 +0100
@@ -21,6 +21,8 @@
from xml.etree.ElementTree import fromstring
from lxml import html
+from six import text_type
+
from logilab.common.testlib import unittest_main
from cubicweb import Binary, ValidationError
@@ -65,19 +67,19 @@
t = req.create_entity('Tag', name=u'x')
form1 = self.vreg['forms'].select('edition', req, entity=t)
choices = [reid for rview, reid in form1.field_by_name('tags', 'subject', t.e_schema).choices(form1)]
- self.assertIn(unicode(b.eid), choices)
+ self.assertIn(text_type(b.eid), choices)
form2 = self.vreg['forms'].select('edition', req, entity=b)
choices = [reid for rview, reid in form2.field_by_name('tags', 'object', t.e_schema).choices(form2)]
- self.assertIn(unicode(t.eid), choices)
+ self.assertIn(text_type(t.eid), choices)
b.cw_clear_all_caches()
t.cw_clear_all_caches()
req.cnx.execute('SET X tags Y WHERE X is Tag, Y is BlogEntry')
choices = [reid for rview, reid in form1.field_by_name('tags', 'subject', t.e_schema).choices(form1)]
- self.assertIn(unicode(b.eid), choices)
+ self.assertIn(text_type(b.eid), choices)
choices = [reid for rview, reid in form2.field_by_name('tags', 'object', t.e_schema).choices(form2)]
- self.assertIn(unicode(t.eid), choices)
+ self.assertIn(text_type(t.eid), choices)
def test_form_field_choices_new_entity(self):
with self.admin_access.web_request() as req:
@@ -217,7 +219,7 @@
eidparam=True, role='subject')
with self.admin_access.web_request() as req:
file = req.create_entity('File', data_name=u"pouet.txt", data_encoding=u'UTF-8',
- data=Binary('new widgets system'))
+ data=Binary(b'new widgets system'))
form = FFForm(req, redirect_path='perdu.com', entity=file)
self.assertMultiLineEqual(self._render_entity_field(req, 'data', form),
'''<input id="data-subject:%(eid)s" name="data-subject:%(eid)s" tabindex="1" type="file" value="" />
@@ -241,7 +243,7 @@
eidparam=True, role='subject')
with self.admin_access.web_request() as req:
file = req.create_entity('File', data_name=u"pouet.txt", data_encoding=u'UTF-8',
- data=Binary('new widgets system'))
+ data=Binary(b'new widgets system'))
form = EFFForm(req, redirect_path='perdu.com', entity=file)
self.assertMultiLineEqual(self._render_entity_field(req, 'data', form),
'''<input id="data-subject:%(eid)s" name="data-subject:%(eid)s" tabindex="1" type="file" value="" />
--- a/web/test/unittest_formfields.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/test/unittest_formfields.py Thu Nov 12 10:52:28 2015 +0100
@@ -21,6 +21,7 @@
from yams.constraints import StaticVocabularyConstraint, SizeConstraint
+import cubicweb
from cubicweb.devtools import TestServerConfiguration
from cubicweb.devtools.testlib import CubicWebTC
from cubicweb.web.formwidgets import PasswordInput, TextArea, Select, Radio
@@ -127,7 +128,7 @@
self.assertIsInstance(field, BooleanField)
self.assertEqual(field.required, False)
self.assertIsInstance(field.widget, Radio)
- self.assertEqual(field.vocabulary(mock(_cw=mock(_=unicode))),
+ self.assertEqual(field.vocabulary(mock(_cw=mock(_=cubicweb._))),
[(u'yes', '1'), (u'no', '')])
def test_bool_field_explicit_choices(self):
@@ -135,7 +136,7 @@
field = guess_field(schema['CWAttribute'], schema['indexed'],
choices=[(u'maybe', '1'), (u'no', '')], req=req)
self.assertIsInstance(field.widget, Radio)
- self.assertEqual(field.vocabulary(mock(req=mock(_=unicode))),
+ self.assertEqual(field.vocabulary(mock(req=mock(_=cubicweb._))),
[(u'maybe', '1'), (u'no', '')])
--- a/web/test/unittest_formwidgets.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/test/unittest_formwidgets.py Thu Nov 12 10:52:28 2015 +0100
@@ -24,16 +24,23 @@
from cubes.file.entities import File
-def setUpModule(*args):
- global schema
- config = TestServerConfiguration('data', apphome=WidgetsTC.datadir)
- config.bootstrap_cubes()
- schema = config.load_schema()
class WidgetsTC(TestCase):
+ @classmethod
+ def setUpClass(cls):
+ super(WidgetsTC, cls).setUpClass()
+ config = TestServerConfiguration('data', apphome=cls.datadir)
+ config.bootstrap_cubes()
+ cls.schema = config.load_schema()
+
+ @classmethod
+ def tearDownClass(cls):
+ del cls.schema
+ super(WidgetsTC, cls).tearDownClass()
+
def test_editableurl_widget(self):
- field = formfields.guess_field(schema['Bookmark'], schema['path'])
+ field = formfields.guess_field(self.schema['Bookmark'], self.schema['path'])
widget = formwidgets.EditableURLWidget()
req = fake.FakeRequest(form={'path-subjectfqs:A': 'param=value&vid=view'})
form = mock(_cw=req, formvalues={}, edited_entity=mock(eid='A'))
@@ -41,7 +48,7 @@
'?param=value%26vid%3Dview')
def test_bitselect_widget(self):
- field = formfields.guess_field(schema['CWAttribute'], schema['ordernum'])
+ field = formfields.guess_field(self.schema['CWAttribute'], self.schema['ordernum'])
field.choices = [('un', '1',), ('deux', '2',)]
widget = formwidgets.BitSelect(settabindex=False)
req = fake.FakeRequest(form={'ordernum-subject:A': ['1', '2']})
--- a/web/test/unittest_idownloadable.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/test/unittest_idownloadable.py Thu Nov 12 10:52:28 2015 +0100
@@ -42,7 +42,7 @@
return self.entity.name() + '.txt'
def download_data(self):
- return 'Babar is not dead!'
+ return b'Babar is not dead!'
class BrokenIDownloadableGroup(IDownloadableUser):
@@ -72,7 +72,7 @@
get('content-disposition'))
self.assertEqual(['text/plain;charset=ascii'],
get('content-type'))
- self.assertEqual('Babar is not dead!', data)
+ self.assertEqual(b'Babar is not dead!', data)
def test_header_with_space(self):
with self.admin_access.web_request() as req:
@@ -87,13 +87,13 @@
get('content-disposition'))
self.assertEqual(['text/plain;charset=ascii'],
get('content-type'))
- self.assertEqual('Babar is not dead!', data)
+ self.assertEqual(b'Babar is not dead!', data)
def test_header_with_space_and_comma(self):
with self.admin_access.web_request() as req:
- self.create_user(req, login=ur'c " l\ a', password='babar')
+ self.create_user(req, login=u'c " l\\ a', password='babar')
req.cnx.commit()
- with self.new_access(ur'c " l\ a').web_request() as req:
+ with self.new_access(u'c " l\\ a').web_request() as req:
req.form['vid'] = 'download'
req.form['eid'] = str(req.user.eid)
data = self.ctrl_publish(req,'view')
@@ -102,7 +102,7 @@
get('content-disposition'))
self.assertEqual(['text/plain;charset=ascii'],
get('content-type'))
- self.assertEqual('Babar is not dead!', data)
+ self.assertEqual(b'Babar is not dead!', data)
def test_header_unicode_filename(self):
with self.admin_access.web_request() as req:
--- a/web/test/unittest_magicsearch.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/test/unittest_magicsearch.py Thu Nov 12 10:52:28 2015 +0100
@@ -21,6 +21,8 @@
import sys
from contextlib import contextmanager
+from six.moves import range
+
from logilab.common.testlib import TestCase, unittest_main
from rql import BadRQLQuery, RQLSyntaxError
@@ -62,19 +64,19 @@
def test_basic_translations(self):
"""tests basic translations (no ambiguities)"""
with self.proc() as proc:
- rql = "Any C WHERE C is Adresse, P adel C, C adresse 'Logilab'"
+ rql = u"Any C WHERE C is Adresse, P adel C, C adresse 'Logilab'"
rql, = proc.preprocess_query(rql)
- self.assertEqual(rql, "Any C WHERE C is EmailAddress, P use_email C, C address 'Logilab'")
+ self.assertEqual(rql, 'Any C WHERE C is EmailAddress, P use_email C, C address "Logilab"')
def test_ambiguous_translations(self):
"""tests possibly ambiguous translations"""
with self.proc() as proc:
- rql = "Any P WHERE P adel C, C is EmailAddress, C nom 'Logilab'"
+ rql = u"Any P WHERE P adel C, C is EmailAddress, C nom 'Logilab'"
rql, = proc.preprocess_query(rql)
- self.assertEqual(rql, "Any P WHERE P use_email C, C is EmailAddress, C alias 'Logilab'")
- rql = "Any P WHERE P is Utilisateur, P adel C, P nom 'Smith'"
+ self.assertEqual(rql, 'Any P WHERE P use_email C, C is EmailAddress, C alias "Logilab"')
+ rql = u"Any P WHERE P is Utilisateur, P adel C, P nom 'Smith'"
rql, = proc.preprocess_query(rql)
- self.assertEqual(rql, "Any P WHERE P is CWUser, P use_email C, P surname 'Smith'")
+ self.assertEqual(rql, 'Any P WHERE P is CWUser, P use_email C, P surname "Smith"')
class QSPreProcessorTC(CubicWebTC):
@@ -330,7 +332,7 @@
# suggestions should contain any possible value for
# a given attribute (limited to 10)
with self.admin_access.web_request() as req:
- for i in xrange(15):
+ for i in range(15):
req.create_entity('Personne', nom=u'n%s' % i, prenom=u'p%s' % i)
req.cnx.commit()
self.assertListEqual(['Any X WHERE X is Personne, X nom "n0"',
--- a/web/test/unittest_urlpublisher.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/test/unittest_urlpublisher.py Thu Nov 12 10:52:28 2015 +0100
@@ -25,7 +25,7 @@
from cubicweb.rset import ResultSet
from cubicweb.devtools.testlib import CubicWebTC
from cubicweb.devtools.fake import FakeRequest
-from cubicweb.web import NotFound, Redirect
+from cubicweb.web import NotFound, Redirect, views
from cubicweb.web.views.urlrewrite import SimpleReqRewriter
@@ -69,6 +69,7 @@
self.assertEqual("Any X,AA,AB ORDERBY AB WHERE X is_instance_of CWEType, "
"X modification_date AA, X name AB",
rset.printable_rql())
+ self.assertEqual(req.form['vid'], 'sameetypelist')
def test_rest_path_by_attr(self):
with self.admin_access.web_request() as req:
@@ -91,6 +92,7 @@
'X firstname AA, X login AB, X modification_date AC, '
'X surname AD, X login "admin"',
rset.printable_rql())
+ self.assertEqual(req.form['vid'], 'primary')
def test_rest_path_eid(self):
with self.admin_access.web_request() as req:
@@ -125,6 +127,15 @@
'X title "hell\'o"',
rset.printable_rql())
+ def test_rest_path_use_vid_from_rset(self):
+ with self.admin_access.web_request(headers={'Accept': 'application/rdf+xml'}) as req:
+ views.VID_BY_MIMETYPE['application/rdf+xml'] = 'rdf'
+ try:
+ ctrl, rset = self.process(req, 'CWEType')
+ finally:
+ views.VID_BY_MIMETYPE.pop('application/rdf+xml')
+ self.assertEqual(req.form['vid'], 'rdf')
+
def test_rest_path_errors(self):
with self.admin_access.web_request() as req:
self.assertRaises(NotFound, self.process, req, 'CWUser/eid/30000')
@@ -141,25 +152,24 @@
self.assertRaises(NotFound, self.process, req, '1/non_action')
self.assertRaises(NotFound, self.process, req, 'CWUser/login/admin/non_action')
-
def test_regexp_path(self):
"""tests the regexp path resolution"""
with self.admin_access.web_request() as req:
ctrl, rset = self.process(req, 'add/Task')
self.assertEqual(ctrl, 'view')
self.assertEqual(rset, None)
- self.assertEqual(req.form, {'etype' : "Task", 'vid' : "creation"})
+ self.assertEqual(req.form, {'etype': "Task", 'vid': "creation"})
self.assertRaises(NotFound, self.process, req, 'add/foo/bar')
def test_nonascii_path(self):
oldrules = SimpleReqRewriter.rules
- SimpleReqRewriter.rules = [(re.compile('/\w+', re.U), dict(vid='foo')),]
+ SimpleReqRewriter.rules = [(re.compile('/\w+', re.U), dict(vid='foo'))]
with self.admin_access.web_request() as req:
try:
path = str(FakeRequest().url_quote(u'été'))
ctrl, rset = self.process(req, path)
self.assertEqual(rset, None)
- self.assertEqual(req.form, {'vid' : "foo"})
+ self.assertEqual(req.form, {'vid': "foo"})
finally:
SimpleReqRewriter.rules = oldrules
--- a/web/test/unittest_urlrewrite.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/test/unittest_urlrewrite.py Thu Nov 12 10:52:28 2015 +0100
@@ -16,6 +16,8 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
+from six import text_type
+
from logilab.common import tempattr
from cubicweb.devtools.testlib import CubicWebTC
@@ -137,8 +139,8 @@
rgx_action(r'Any X WHERE X surname %(sn)s, '
'X firstname %(fn)s',
argsgroups=('sn', 'fn'),
- transforms={'sn' : unicode.capitalize,
- 'fn' : unicode.lower,})),
+ transforms={'sn' : text_type.capitalize,
+ 'fn' : text_type.lower,})),
]
with self.admin_access.web_request() as req:
rewriter = TestSchemaBasedRewriter(req)
--- a/web/test/unittest_views_basecontrollers.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/test/unittest_views_basecontrollers.py Thu Nov 12 10:52:28 2015 +0100
@@ -17,12 +17,8 @@
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""cubicweb.web.views.basecontrollers unit tests"""
-from urlparse import urlsplit, urlunsplit, urljoin
-# parse_qs is deprecated in cgi and has been moved to urlparse in Python 2.6
-try:
- from urlparse import parse_qs as url_parse_query
-except ImportError:
- from cgi import parse_qs as url_parse_query
+from six import text_type
+from six.moves.urllib.parse import urlsplit, urlunsplit, urljoin, parse_qs
import lxml
@@ -82,7 +78,7 @@
}
with self.assertRaises(ValidationError) as cm:
self.ctrl_publish(req)
- cm.exception.translate(unicode)
+ cm.exception.translate(text_type)
self.assertEqual({'login-subject': 'the value "admin" is already used, use another one'},
cm.exception.errors)
@@ -136,12 +132,12 @@
user = req.user
groupeids = [eid for eid, in req.execute('CWGroup G WHERE G name '
'in ("managers", "users")')]
- groups = [unicode(eid) for eid in groupeids]
- eid = unicode(user.eid)
+ groups = [text_type(eid) for eid in groupeids]
+ eid = text_type(user.eid)
req.form = {
'eid': eid, '__type:'+eid: 'CWUser',
'_cw_entity_fields:'+eid: 'login-subject,firstname-subject,surname-subject,in_group-subject',
- 'login-subject:'+eid: unicode(user.login),
+ 'login-subject:'+eid: text_type(user.login),
'surname-subject:'+eid: u'Th\xe9nault',
'firstname-subject:'+eid: u'Sylvain',
'in_group-subject:'+eid: groups,
@@ -159,7 +155,7 @@
self.create_user(cnx, u'user')
cnx.commit()
with self.new_access(u'user').web_request() as req:
- eid = unicode(req.user.eid)
+ eid = text_type(req.user.eid)
req.form = {
'eid': eid, '__maineid' : eid,
'__type:'+eid: 'CWUser',
@@ -179,12 +175,12 @@
with self.admin_access.web_request() as req:
user = req.user
groupeids = [g.eid for g in user.in_group]
- eid = unicode(user.eid)
+ eid = text_type(user.eid)
req.form = {
'eid': eid,
'__type:'+eid: 'CWUser',
'_cw_entity_fields:'+eid: 'login-subject,firstname-subject,surname-subject',
- 'login-subject:'+eid: unicode(user.login),
+ 'login-subject:'+eid: text_type(user.login),
'firstname-subject:'+eid: u'Th\xe9nault',
'surname-subject:'+eid: u'Sylvain',
}
@@ -207,7 +203,7 @@
'login-subject:X': u'adim',
'upassword-subject:X': u'toto', 'upassword-subject-confirm:X': u'toto',
'surname-subject:X': u'Di Mascio',
- 'in_group-subject:X': unicode(gueid),
+ 'in_group-subject:X': text_type(gueid),
'__type:Y': 'EmailAddress',
'_cw_entity_fields:Y': 'address-subject,use_email-object',
@@ -231,7 +227,7 @@
'__type:Y': 'File',
'_cw_entity_fields:Y': 'data-subject,described_by_test-object',
- 'data-subject:Y': (u'coucou.txt', Binary('coucou')),
+ 'data-subject:Y': (u'coucou.txt', Binary(b'coucou')),
'described_by_test-object:Y': 'X',
}
path, _params = self.expect_redirect_handle_request(req, 'edit')
@@ -256,7 +252,7 @@
'__type:Y': 'File',
'_cw_entity_fields:Y': 'data-subject',
- 'data-subject:Y': (u'coucou.txt', Binary('coucou')),
+ 'data-subject:Y': (u'coucou.txt', Binary(b'coucou')),
}
path, _params = self.expect_redirect_handle_request(req, 'edit')
self.assertTrue(path.startswith('salesterm/'), path)
@@ -274,7 +270,7 @@
# non regression test for #3120495. Without the fix, leads to
# "unhashable type: 'list'" error
with self.admin_access.web_request() as req:
- cwrelation = unicode(req.execute('CWEType X WHERE X name "CWSource"')[0][0])
+ cwrelation = text_type(req.execute('CWEType X WHERE X name "CWSource"')[0][0])
req.form = {'eid': [cwrelation], '__maineid' : cwrelation,
'__type:'+cwrelation: 'CWEType',
@@ -287,7 +283,7 @@
def test_edit_multiple_linked(self):
with self.admin_access.web_request() as req:
- peid = unicode(self.create_user(req, u'adim').eid)
+ peid = text_type(self.create_user(req, u'adim').eid)
req.form = {'eid': [peid, 'Y'], '__maineid': peid,
'__type:'+peid: u'CWUser',
@@ -307,7 +303,7 @@
self.assertEqual(email.address, 'dima@logilab.fr')
# with self.admin_access.web_request() as req:
- emaileid = unicode(email.eid)
+ emaileid = text_type(email.eid)
req.form = {'eid': [peid, emaileid],
'__type:'+peid: u'CWUser',
@@ -329,7 +325,7 @@
with self.admin_access.web_request() as req:
user = req.user
req.form = {'eid': 'X',
- '__cloned_eid:X': unicode(user.eid), '__type:X': 'CWUser',
+ '__cloned_eid:X': text_type(user.eid), '__type:X': 'CWUser',
'_cw_entity_fields:X': 'login-subject,upassword-subject',
'login-subject:X': u'toto',
'upassword-subject:X': u'toto',
@@ -338,7 +334,7 @@
self.ctrl_publish(req)
self.assertEqual({'upassword-subject': u'password and confirmation don\'t match'},
cm.exception.errors)
- req.form = {'__cloned_eid:X': unicode(user.eid),
+ req.form = {'__cloned_eid:X': text_type(user.eid),
'eid': 'X', '__type:X': 'CWUser',
'_cw_entity_fields:X': 'login-subject,upassword-subject',
'login-subject:X': u'toto',
@@ -354,7 +350,7 @@
def test_interval_bound_constraint_success(self):
with self.admin_access.repo_cnx() as cnx:
feid = cnx.execute('INSERT File X: X data_name "toto.txt", X data %(data)s',
- {'data': Binary('yo')})[0][0]
+ {'data': Binary(b'yo')})[0][0]
cnx.commit()
with self.admin_access.web_request(rollbackfirst=True) as req:
@@ -362,11 +358,11 @@
'__type:X': 'Salesterm',
'_cw_entity_fields:X': 'amount-subject,described_by_test-subject',
'amount-subject:X': u'-10',
- 'described_by_test-subject:X': unicode(feid),
+ 'described_by_test-subject:X': text_type(feid),
}
with self.assertRaises(ValidationError) as cm:
self.ctrl_publish(req)
- cm.exception.translate(unicode)
+ cm.exception.translate(text_type)
self.assertEqual({'amount-subject': 'value -10 must be >= 0'},
cm.exception.errors)
@@ -375,11 +371,11 @@
'__type:X': 'Salesterm',
'_cw_entity_fields:X': 'amount-subject,described_by_test-subject',
'amount-subject:X': u'110',
- 'described_by_test-subject:X': unicode(feid),
+ 'described_by_test-subject:X': text_type(feid),
}
with self.assertRaises(ValidationError) as cm:
self.ctrl_publish(req)
- cm.exception.translate(unicode)
+ cm.exception.translate(text_type)
self.assertEqual(cm.exception.errors, {'amount-subject': 'value 110 must be <= 100'})
with self.admin_access.web_request(rollbackfirst=True) as req:
@@ -387,7 +383,7 @@
'__type:X': 'Salesterm',
'_cw_entity_fields:X': 'amount-subject,described_by_test-subject',
'amount-subject:X': u'10',
- 'described_by_test-subject:X': unicode(feid),
+ 'described_by_test-subject:X': text_type(feid),
}
self.expect_redirect_handle_request(req, 'edit')
# should be redirected on the created
@@ -400,31 +396,31 @@
constrained attributes"""
with self.admin_access.repo_cnx() as cnx:
feid = cnx.execute('INSERT File X: X data_name "toto.txt", X data %(data)s',
- {'data': Binary('yo')})[0][0]
+ {'data': Binary(b'yo')})[0][0]
seid = cnx.create_entity('Salesterm', amount=0, described_by_test=feid).eid
cnx.commit()
# ensure a value that violate a constraint is properly detected
with self.admin_access.web_request(rollbackfirst=True) as req:
- req.form = {'eid': [unicode(seid)],
+ req.form = {'eid': [text_type(seid)],
'__type:%s'%seid: 'Salesterm',
'_cw_entity_fields:%s'%seid: 'amount-subject',
'amount-subject:%s'%seid: u'-10',
}
self.assertMultiLineEqual('''<script type="text/javascript">
window.parent.handleFormValidationResponse('entityForm', null, null, [false, [%s, {"amount-subject": "value -10 must be >= 0"}], null], null);
-</script>'''%seid, self.ctrl_publish(req, 'validateform'))
+</script>'''%seid, self.ctrl_publish(req, 'validateform').decode('ascii'))
# ensure a value that comply a constraint is properly processed
with self.admin_access.web_request(rollbackfirst=True) as req:
- req.form = {'eid': [unicode(seid)],
+ req.form = {'eid': [text_type(seid)],
'__type:%s'%seid: 'Salesterm',
'_cw_entity_fields:%s'%seid: 'amount-subject',
'amount-subject:%s'%seid: u'20',
}
self.assertMultiLineEqual('''<script type="text/javascript">
window.parent.handleFormValidationResponse('entityForm', null, null, [true, "http://testing.fr/cubicweb/view", null], null);
-</script>''', self.ctrl_publish(req, 'validateform'))
+</script>''', self.ctrl_publish(req, 'validateform').decode('ascii'))
self.assertEqual(20, req.execute('Any V WHERE X amount V, X eid %(eid)s',
{'eid': seid})[0][0])
@@ -433,7 +429,7 @@
'__type:X': 'Salesterm',
'_cw_entity_fields:X': 'amount-subject,described_by_test-subject',
'amount-subject:X': u'0',
- 'described_by_test-subject:X': unicode(feid),
+ 'described_by_test-subject:X': text_type(feid),
}
# ensure a value that is modified in an operation on a modify
@@ -452,11 +448,11 @@
with self.temporary_appobjects(ValidationErrorInOpAfterHook):
self.assertMultiLineEqual('''<script type="text/javascript">
window.parent.handleFormValidationResponse('entityForm', null, null, [false, ["X", {"amount-subject": "value -10 must be >= 0"}], null], null);
-</script>''', self.ctrl_publish(req, 'validateform'))
+</script>''', self.ctrl_publish(req, 'validateform').decode('ascii'))
self.assertMultiLineEqual('''<script type="text/javascript">
window.parent.handleFormValidationResponse('entityForm', null, null, [true, "http://testing.fr/cubicweb/view", null], null);
-</script>''', self.ctrl_publish(req, 'validateform'))
+</script>''', self.ctrl_publish(req, 'validateform').decode('ascii'))
def test_req_pending_insert(self):
"""make sure req's pending insertions are taken into account"""
@@ -541,7 +537,7 @@
def test_redirect_delete_button(self):
with self.admin_access.web_request() as req:
eid = req.create_entity('BlogEntry', title=u'hop', content=u'hop').eid
- req.form = {'eid': unicode(eid), '__type:%s'%eid: 'BlogEntry',
+ req.form = {'eid': text_type(eid), '__type:%s'%eid: 'BlogEntry',
'__action_delete': ''}
path, params = self.expect_redirect_handle_request(req, 'edit')
self.assertEqual(path, 'blogentry')
@@ -550,14 +546,14 @@
req.execute('SET X use_email E WHERE E eid %(e)s, X eid %(x)s',
{'x': req.user.eid, 'e': eid})
req.cnx.commit()
- req.form = {'eid': unicode(eid), '__type:%s'%eid: 'EmailAddress',
+ req.form = {'eid': text_type(eid), '__type:%s'%eid: 'EmailAddress',
'__action_delete': ''}
path, params = self.expect_redirect_handle_request(req, 'edit')
self.assertEqual(path, 'cwuser/admin')
self.assertIn('_cwmsgid', params)
eid1 = req.create_entity('BlogEntry', title=u'hop', content=u'hop').eid
eid2 = req.create_entity('EmailAddress', address=u'hop@logilab.fr').eid
- req.form = {'eid': [unicode(eid1), unicode(eid2)],
+ req.form = {'eid': [text_type(eid1), text_type(eid2)],
'__type:%s'%eid1: 'BlogEntry',
'__type:%s'%eid2: 'EmailAddress',
'__action_delete': ''}
@@ -607,13 +603,13 @@
groupeids = sorted(eid
for eid, in req.execute('CWGroup G '
'WHERE G name in ("managers", "users")'))
- groups = [unicode(eid) for eid in groupeids]
+ groups = [text_type(eid) for eid in groupeids]
cwetypeeid = req.execute('CWEType X WHERE X name "CWEType"')[0][0]
- basegroups = [unicode(eid)
+ basegroups = [text_type(eid)
for eid, in req.execute('CWGroup G '
'WHERE X read_permission G, X eid %(x)s',
{'x': cwetypeeid})]
- cwetypeeid = unicode(cwetypeeid)
+ cwetypeeid = text_type(cwetypeeid)
req.form = {
'eid': cwetypeeid,
'__type:'+cwetypeeid: 'CWEType',
@@ -662,7 +658,7 @@
'_cw_entity_fields:X': 'login-subject,upassword-subject,in_group-subject',
'login-subject:X': u'adim',
'upassword-subject:X': u'toto', 'upassword-subject-confirm:X': u'toto',
- 'in_group-subject:X': `gueid`,
+ 'in_group-subject:X': repr(gueid),
'__type:Y': 'EmailAddress',
'_cw_entity_fields:Y': 'address-subject,alias-subject,use_email-object',
@@ -737,7 +733,7 @@
'__type:Y': 'File',
'_cw_entity_fields:Y': 'data-subject',
- 'data-subject:Y': (u'coucou.txt', Binary('coucou')),
+ 'data-subject:Y': (u'coucou.txt', Binary(b'coucou')),
}
values_by_eid = dict((eid, req.extract_entity_params(eid, minparams=2))
for eid in req.edited_eids())
@@ -783,7 +779,7 @@
rset = self.john.as_rset()
rset.req = req
source = ctrl.publish()
- self.assertTrue(source.startswith('<div>'))
+ self.assertTrue(source.startswith(b'<div>'))
# def test_json_exec(self):
# rql = 'Any T,N WHERE T is Tag, T name N'
@@ -824,7 +820,7 @@
rset.req = req
source = ctrl.publish()
# maydel jscall
- self.assertIn('ajaxBoxRemoveLinkedEntity', source)
+ self.assertIn(b'ajaxBoxRemoveLinkedEntity', source)
def test_pending_insertion(self):
with self.remote_calling('add_pending_inserts', [['12', 'tags', '13']]) as (_, req):
@@ -887,16 +883,16 @@
# silly tests
def test_external_resource(self):
with self.remote_calling('external_resource', 'RSS_LOGO') as (res, _):
- self.assertEqual(json_dumps(self.config.uiprops['RSS_LOGO']),
+ self.assertEqual(json_dumps(self.config.uiprops['RSS_LOGO']).encode('ascii'),
res)
def test_i18n(self):
with self.remote_calling('i18n', ['bimboom']) as (res, _):
- self.assertEqual(json_dumps(['bimboom']), res)
+ self.assertEqual(json_dumps(['bimboom']).encode('ascii'), res)
def test_format_date(self):
with self.remote_calling('format_date', '2007-01-01 12:00:00') as (res, _):
- self.assertEqual(json_dumps('2007/01/01'), res)
+ self.assertEqual(json_dumps('2007/01/01').encode('ascii'), res)
def test_ajaxfunc_noparameter(self):
@ajaxfunc
@@ -968,7 +964,7 @@
def js_foo(self):
return u'hello'
with self.remote_calling('foo') as (res, _):
- self.assertEqual(res, u'hello')
+ self.assertEqual(res, b'hello')
def test_monkeypatch_jsoncontroller_xhtmlize(self):
with self.assertRaises(RemoteCallFailed):
@@ -979,7 +975,7 @@
def js_foo(self):
return u'hello'
with self.remote_calling('foo') as (res, _):
- self.assertEqual(u'<div>hello</div>', res)
+ self.assertEqual(b'<div>hello</div>', res)
def test_monkeypatch_jsoncontroller_jsonize(self):
with self.assertRaises(RemoteCallFailed):
@@ -990,7 +986,7 @@
def js_foo(self):
return 12
with self.remote_calling('foo') as (res, _):
- self.assertEqual(res, '12')
+ self.assertEqual(res, b'12')
def test_monkeypatch_jsoncontroller_stdfunc(self):
@monkeypatch(JSonController)
@@ -998,7 +994,7 @@
def js_reledit_form(self):
return 12
with self.remote_calling('reledit_form') as (res, _):
- self.assertEqual(res, '12')
+ self.assertEqual(res, b'12')
class UndoControllerTC(CubicWebTC):
@@ -1042,7 +1038,7 @@
"""
with self.admin_access.web_request() as req:
scheme, netloc, path, query, fragment = urlsplit(url)
- query_dict = url_parse_query(query)
+ query_dict = parse_qs(query)
expected_url = urljoin(req.base_url(), expected_path)
self.assertEqual( urlunsplit((scheme, netloc, path, None, None)), expected_url)
--- a/web/test/unittest_views_baseviews.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/test/unittest_views_baseviews.py Thu Nov 12 10:52:28 2015 +0100
@@ -129,8 +129,8 @@
source_lines = [line.strip()
for line in html_source.splitlines(False)
if line.strip()]
- self.assertListEqual(['<!DOCTYPE html>',
- '<html xmlns:cubicweb="http://www.cubicweb.org" lang="en">'],
+ self.assertListEqual([b'<!DOCTYPE html>',
+ b'<html xmlns:cubicweb="http://www.cubicweb.org" lang="en">'],
source_lines[:2])
def test_set_doctype_no_reset_xmldecl(self):
@@ -151,9 +151,9 @@
source_lines = [line.strip()
for line in html_source.splitlines(False)
if line.strip()]
- self.assertListEqual([html_doctype,
- '<html xmlns:cubicweb="http://www.cubicweb.org" lang="cz">',
- '<head>'],
+ self.assertListEqual([html_doctype.encode('ascii'),
+ b'<html xmlns:cubicweb="http://www.cubicweb.org" lang="cz">',
+ b'<head>'],
source_lines[:3])
if __name__ == '__main__':
--- a/web/test/unittest_views_csv.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/test/unittest_views_csv.py Thu Nov 12 10:52:28 2015 +0100
@@ -30,19 +30,19 @@
self.assertEqual(req.headers_out.getRawHeaders('content-type'),
['text/comma-separated-values;charset=UTF-8'])
expected_data = "String;COUNT(CWUser)\nguests;1\nmanagers;1"
- self.assertMultiLineEqual(expected_data, data)
+ self.assertMultiLineEqual(expected_data, data.decode('utf-8'))
def test_csvexport_on_empty_rset(self):
"""Should return the CSV header.
"""
with self.admin_access.web_request() as req:
- rset = req.execute('Any GN,COUNT(X) GROUPBY GN ORDERBY GN '
- 'WHERE X in_group G, G name GN, X login "Miles"')
+ rset = req.execute(u'Any GN,COUNT(X) GROUPBY GN ORDERBY GN '
+ 'WHERE X in_group G, G name GN, X login "Miles"')
data = self.view('csvexport', rset, req=req)
self.assertEqual(req.headers_out.getRawHeaders('content-type'),
['text/comma-separated-values;charset=UTF-8'])
expected_data = "String;COUNT(CWUser)"
- self.assertMultiLineEqual(expected_data, data)
+ self.assertMultiLineEqual(expected_data, data.decode('utf-8'))
if __name__ == '__main__':
--- a/web/test/unittest_views_errorform.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/test/unittest_views_errorform.py Thu Nov 12 10:52:28 2015 +0100
@@ -50,8 +50,8 @@
req.data['excinfo'] = sys.exc_info()
req.data['ex'] = e
html = self.view('error', req=req)
- self.failUnless(re.search(r'^<input name="__signature" type="hidden" '
- 'value="[0-9a-f]{32}" />$',
+ self.assertTrue(re.search(b'^<input name="__signature" type="hidden" '
+ b'value="[0-9a-f]{32}" />$',
html.source, re.M))
--- a/web/test/unittest_views_json.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/test/unittest_views_json.py Thu Nov 12 10:52:28 2015 +0100
@@ -16,12 +16,14 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
+from six import binary_type
+
from cubicweb.devtools.testlib import CubicWebTC
class JsonViewsTC(CubicWebTC):
anonymize = True
- res_jsonp_data = '[["guests", 1]]'
+ res_jsonp_data = b'[["guests", 1]]'
def setUp(self):
super(JsonViewsTC, self).setUp()
@@ -36,7 +38,7 @@
def test_json_rsetexport_empty_rset(self):
with self.admin_access.web_request() as req:
- rset = req.execute('Any X WHERE X is CWUser, X login "foobarbaz"')
+ rset = req.execute(u'Any X WHERE X is CWUser, X login "foobarbaz"')
data = self.view('jsonexport', rset, req=req)
self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['application/json'])
self.assertListEqual(data, [])
@@ -47,10 +49,10 @@
'rql': u'Any GN,COUNT(X) GROUPBY GN ORDERBY GN '
'WHERE X in_group G, G name GN'})
data = self.ctrl_publish(req, ctrl='jsonp')
- self.assertIsInstance(data, str)
+ self.assertIsInstance(data, binary_type)
self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['application/javascript'])
# because jsonp anonymizes data, only 'guests' group should be found
- self.assertEqual(data, 'foo(%s)' % self.res_jsonp_data)
+ self.assertEqual(data, b'foo(' + self.res_jsonp_data + b')')
def test_json_rsetexport_with_jsonp_and_bad_vid(self):
with self.admin_access.web_request() as req:
@@ -61,7 +63,7 @@
data = self.ctrl_publish(req, ctrl='jsonp')
self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['application/javascript'])
# result should be plain json, not the table view
- self.assertEqual(data, 'foo(%s)' % self.res_jsonp_data)
+ self.assertEqual(data, b'foo(' + self.res_jsonp_data + b')')
def test_json_ersetexport(self):
with self.admin_access.web_request() as req:
@@ -71,7 +73,7 @@
self.assertEqual(data[0]['name'], 'guests')
self.assertEqual(data[1]['name'], 'managers')
- rset = req.execute('Any G WHERE G is CWGroup, G name "foo"')
+ rset = req.execute(u'Any G WHERE G is CWGroup, G name "foo"')
data = self.view('ejsonexport', rset, req=req)
self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['application/json'])
self.assertEqual(data, [])
@@ -79,7 +81,7 @@
class NotAnonymousJsonViewsTC(JsonViewsTC):
anonymize = False
- res_jsonp_data = '[["guests", 1], ["managers", 1]]'
+ res_jsonp_data = b'[["guests", 1], ["managers", 1]]'
if __name__ == '__main__':
from logilab.common.testlib import unittest_main
--- a/web/test/unittest_views_searchrestriction.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/test/unittest_views_searchrestriction.py Thu Nov 12 10:52:28 2015 +0100
@@ -37,62 +37,62 @@
@property
def select(self):
- return self.parse('Any B,(NOW - CD),S,V,U,GROUP_CONCAT(TN),VN,P,CD,BMD '
- 'GROUPBY B,CD,S,V,U,VN,P,BMD '
- 'WHERE B in_state S, B creation_date CD, '
- 'B modification_date BMD, T? tags B, T name TN, '
- 'V? bookmarked_by B, V title VN, B created_by U?, '
- 'B in_group P, P name "managers"')
+ return self.parse(u'Any B,(NOW - CD),S,V,U,GROUP_CONCAT(TN),VN,P,CD,BMD '
+ 'GROUPBY B,CD,S,V,U,VN,P,BMD '
+ 'WHERE B in_state S, B creation_date CD, '
+ 'B modification_date BMD, T? tags B, T name TN, '
+ 'V? bookmarked_by B, V title VN, B created_by U?, '
+ 'B in_group P, P name "managers"')
def test_1(self):
self.assertEqual(self._generate(self.select, 'in_state', 'subject', 'name'),
- "DISTINCT Any A,C ORDERBY C WHERE B in_group P, P name 'managers', "
- "B in_state A, B is CWUser, A name C")
+ 'DISTINCT Any A,C ORDERBY C WHERE B in_group P, P name "managers", '
+ 'B in_state A, B is CWUser, A name C')
def test_2(self):
self.assertEqual(self._generate(self.select, 'tags', 'object', 'name'),
- "DISTINCT Any A,C ORDERBY C WHERE B in_group P, P name 'managers', "
- "A tags B, B is CWUser, A name C")
+ 'DISTINCT Any A,C ORDERBY C WHERE B in_group P, P name "managers", '
+ 'A tags B, B is CWUser, A name C')
def test_3(self):
self.assertEqual(self._generate(self.select, 'created_by', 'subject', 'login'),
- "DISTINCT Any A,C ORDERBY C WHERE B in_group P, P name 'managers', "
- "B created_by A, B is CWUser, A login C")
+ 'DISTINCT Any A,C ORDERBY C WHERE B in_group P, P name "managers", '
+ 'B created_by A, B is CWUser, A login C')
def test_4(self):
- self.assertEqual(self._generate(self.parse('Any X WHERE X is CWUser'), 'created_by', 'subject', 'login'),
+ self.assertEqual(self._generate(self.parse(u'Any X WHERE X is CWUser'), 'created_by', 'subject', 'login'),
"DISTINCT Any A,B ORDERBY B WHERE X is CWUser, X created_by A, A login B")
def test_5(self):
- self.assertEqual(self._generate(self.parse('Any X,L WHERE X is CWUser, X login L'), 'created_by', 'subject', 'login'),
+ self.assertEqual(self._generate(self.parse(u'Any X,L WHERE X is CWUser, X login L'), 'created_by', 'subject', 'login'),
"DISTINCT Any A,B ORDERBY B WHERE X is CWUser, X created_by A, A login B")
def test_nonregr1(self):
- select = self.parse('Any T,V WHERE T bookmarked_by V?, '
- 'V in_state VS, VS name "published", T created_by U')
+ select = self.parse(u'Any T,V WHERE T bookmarked_by V?, '
+ 'V in_state VS, VS name "published", T created_by U')
self.assertEqual(self._generate(select, 'created_by', 'subject', 'login'),
"DISTINCT Any A,B ORDERBY B WHERE T created_by U, "
"T created_by A, T is Bookmark, A login B")
def test_nonregr2(self):
#'DISTINCT Any X,TMP,N WHERE P name TMP, X version_of P, P is Project, X is Version, not X in_state S,S name "published", X num N ORDERBY TMP,N'
- select = self.parse('DISTINCT Any V,TN,L ORDERBY TN,L WHERE T nom TN, V connait T, T is Personne, V is CWUser,'
- 'NOT V in_state VS, VS name "published", V login L')
+ select = self.parse(u'DISTINCT Any V,TN,L ORDERBY TN,L WHERE T nom TN, V connait T, T is Personne, V is CWUser,'
+ 'NOT V in_state VS, VS name "published", V login L')
rschema = self.schema['connait']
- for rdefs in rschema.rdefs.itervalues():
+ for rdefs in rschema.rdefs.values():
rdefs.cardinality = '++'
try:
self.assertEqual(self._generate(select, 'in_state', 'subject', 'name'),
- "DISTINCT Any A,B ORDERBY B WHERE V is CWUser, "
- "NOT EXISTS(V in_state VS), VS name 'published', "
- "V in_state A, A name B")
+ 'DISTINCT Any A,B ORDERBY B WHERE V is CWUser, '
+ 'NOT EXISTS(V in_state VS), VS name "published", '
+ 'V in_state A, A name B')
finally:
- for rdefs in rschema.rdefs.itervalues():
+ for rdefs in rschema.rdefs.values():
rdefs.cardinality = '**'
def test_nonregr3(self):
#'DISTINCT Any X,TMP,N WHERE P name TMP, X version_of P, P is Project, X is Version, not X in_state S,S name "published", X num N ORDERBY TMP,N'
- select = self.parse('DISTINCT Any X, MAX(Y) GROUPBY X WHERE X is CWUser, Y is Bookmark, X in_group A')
+ select = self.parse(u'DISTINCT Any X, MAX(Y) GROUPBY X WHERE X is CWUser, Y is Bookmark, X in_group A')
self.assertEqual(self._generate(select, 'in_group', 'subject', 'name'),
"DISTINCT Any B,C ORDERBY C WHERE X is CWUser, X in_group B, B name C")
--- a/web/test/unittest_views_staticcontrollers.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/test/unittest_views_staticcontrollers.py Thu Nov 12 10:52:28 2015 +0100
@@ -120,12 +120,12 @@
yield res, req
def expected_content(self, js_files):
- content = u''
+ content = b''
for js_file in js_files:
dirpath, rid = self.config.locate_resource(js_file)
if dirpath is not None: # ignore resources not found
- with open(osp.join(dirpath, rid)) as f:
- content += f.read() + '\n'
+ with open(osp.join(dirpath, rid), 'rb') as f:
+ content += f.read() + b'\n'
return content
def test_cache(self):
--- a/web/test/unittest_viewselector.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/test/unittest_viewselector.py Thu Nov 12 10:52:28 2015 +0100
@@ -17,6 +17,7 @@
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""XXX rename, split, reorganize this"""
+from __future__ import print_function
from logilab.common.testlib import unittest_main
@@ -76,9 +77,9 @@
try:
self.assertSetEqual(list(content), expected)
except Exception:
- print registry, sorted(expected), sorted(content)
- print 'no more', [v for v in expected if not v in content]
- print 'missing', [v for v in content if not v in expected]
+ print(registry, sorted(expected), sorted(content))
+ print('no more', [v for v in expected if not v in content])
+ print('missing', [v for v in content if not v in expected])
raise
def setUp(self):
@@ -421,7 +422,7 @@
def test_interface_selector(self):
with self.admin_access.web_request() as req:
- req.create_entity('File', data_name=u'bim.png', data=Binary('bim'))
+ req.create_entity('File', data_name=u'bim.png', data=Binary(b'bim'))
# image primary view priority
rset = req.execute('File X WHERE X data_name "bim.png"')
self.assertIsInstance(self.vreg['views'].select('primary', req, rset=rset),
@@ -430,21 +431,21 @@
def test_score_entity_selector(self):
with self.admin_access.web_request() as req:
- req.create_entity('File', data_name=u'bim.png', data=Binary('bim'))
+ req.create_entity('File', data_name=u'bim.png', data=Binary(b'bim'))
# image/ehtml primary view priority
rset = req.execute('File X WHERE X data_name "bim.png"')
self.assertIsInstance(self.vreg['views'].select('image', req, rset=rset),
idownloadable.ImageView)
self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'ehtml', req, rset=rset)
- fileobj = req.create_entity('File', data_name=u'bim.html', data=Binary('<html>bam</html'))
+ fileobj = req.create_entity('File', data_name=u'bim.html', data=Binary(b'<html>bam</html'))
# image/ehtml primary view priority
rset = req.execute('File X WHERE X data_name "bim.html"')
self.assertIsInstance(self.vreg['views'].select('ehtml', req, rset=rset),
idownloadable.EHTMLView)
self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'image', req, rset=rset)
- fileobj = req.create_entity('File', data_name=u'bim.txt', data=Binary('boum'))
+ fileobj = req.create_entity('File', data_name=u'bim.txt', data=Binary(b'boum'))
# image/ehtml primary view priority
rset = req.execute('File X WHERE X data_name "bim.txt"')
self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'image', req, rset=rset)
@@ -461,7 +462,7 @@
obj = self.vreg['views'].select(vid, req, rset=rset, **args)
return obj.render(**args)
except Exception:
- print vid, rset, args
+ print(vid, rset, args)
raise
def test_form(self):
@@ -476,12 +477,12 @@
def test_properties(self):
- self.assertEqual(sorted(k for k in self.vreg['propertydefs'].iterkeys()
+ self.assertEqual(sorted(k for k in self.vreg['propertydefs']
if k.startswith('ctxcomponents.edit_box')),
['ctxcomponents.edit_box.context',
'ctxcomponents.edit_box.order',
'ctxcomponents.edit_box.visible'])
- self.assertEqual([k for k in self.vreg['propertyvalues'].iterkeys()
+ self.assertEqual([k for k in self.vreg['propertyvalues']
if not k.startswith('system.version')],
[])
self.assertEqual(self.vreg.property_value('ctxcomponents.edit_box.visible'), True)
--- a/web/uihelper.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/uihelper.py Thu Nov 12 10:52:28 2015 +0100
@@ -45,6 +45,7 @@
"""
__docformat__ = "restructuredtext en"
+from six import add_metaclass
from logilab.common.deprecation import deprecated
from cubicweb.web.views import uicfg
@@ -93,6 +94,7 @@
super(meta_formconfig, cls).__init__(name, bases, classdict)
+@add_metaclass(meta_formconfig)
class FormConfig:
"""helper base class to define uicfg rules on a given entity type.
@@ -162,7 +164,6 @@
inlined = ('use_email',)
"""
- __metaclass__ = meta_formconfig
formtype = 'main'
etype = None # must be defined in concrete subclasses
hidden = ()
--- a/web/views/__init__.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/__init__.py Thu Nov 12 10:52:28 2015 +0100
@@ -23,6 +23,8 @@
import sys
import tempfile
+from six import add_metaclass
+
from rql import nodes
from logilab.mtconverter import xml_escape
from logilab.common.deprecation import class_deprecated
@@ -77,7 +79,7 @@
#'text/xml': 'xml',
# XXX rss, owl...
}
-def vid_from_rset(req, rset, schema):
+def vid_from_rset(req, rset, schema, check_table=True):
"""given a result set, return a view id"""
if rset is None:
return 'index'
@@ -90,7 +92,7 @@
return 'noresult'
# entity result set
if not schema.eschema(rset.description[0][0]).final:
- if need_table_view(rset, schema):
+ if check_table and need_table_view(rset, schema):
return 'table'
if nb_rows == 1:
if req.search_state[0] == 'normal':
@@ -127,8 +129,8 @@
+@add_metaclass(class_deprecated)
class TmpFileViewMixin(object):
- __metaclass__ = class_deprecated
__deprecation_warning__ = '[3.18] %(cls)s is deprecated'
binary = True
content_type = 'application/octet-stream'
--- a/web/views/actions.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/actions.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,7 +18,7 @@
"""Set of HTML base actions"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from warnings import warn
@@ -76,7 +76,7 @@
return 0
select = rqlst.children[0]
if len(select.defined_vars) == 1 and len(select.solutions) == 1:
- rset._searched_etype = select.solutions[0].itervalues().next()
+ rset._searched_etype = next(iter(select.solutions[0].values()))
eschema = req.vreg.schema.eschema(rset._searched_etype)
if not (eschema.final or eschema.is_subobject(strict=True)) \
and eschema.has_perm(req, 'add'):
--- a/web/views/ajaxcontroller.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/ajaxcontroller.py Thu Nov 12 10:52:28 2015 +0100
@@ -66,6 +66,8 @@
from warnings import warn
from functools import partial
+from six import PY2, text_type
+
from logilab.common.date import strptime
from logilab.common.registry import yes
from logilab.common.deprecation import deprecated
@@ -84,7 +86,7 @@
if extraargs is None:
return {}
# we receive unicode keys which is not supported by the **syntax
- return dict((str(key), value) for key, value in extraargs.iteritems())
+ return dict((str(key), value) for key, value in extraargs.items())
class AjaxController(Controller):
@@ -117,7 +119,9 @@
raise RemoteCallFailed('no method specified')
# 1/ check first for old-style (JSonController) ajax func for bw compat
try:
- func = getattr(basecontrollers.JSonController, 'js_%s' % fname).im_func
+ func = getattr(basecontrollers.JSonController, 'js_%s' % fname)
+ if PY2:
+ func = func.__func__
func = partial(func, self)
except AttributeError:
# 2/ check for new-style (AjaxController) ajax func
@@ -150,7 +154,7 @@
if result is None:
return ''
# get unicode on @htmlize methods, encoded string on @jsonize methods
- elif isinstance(result, unicode):
+ elif isinstance(result, text_type):
return result.encode(self._cw.encoding)
return result
--- a/web/views/authentication.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/authentication.py Thu Nov 12 10:52:28 2015 +0100
@@ -19,12 +19,9 @@
__docformat__ = "restructuredtext en"
-from threading import Lock
-
-from logilab.common.decorators import clear_cache
from logilab.common.deprecation import class_renamed
-from cubicweb import AuthenticationError, BadConnectionId
+from cubicweb import AuthenticationError
from cubicweb.view import Component
from cubicweb.web import InvalidSession
@@ -101,41 +98,11 @@
'("ie" instead of "ei")')
-class AbstractAuthenticationManager(Component):
- """authenticate user associated to a request and check session validity"""
- __abstract__ = True
- __regid__ = 'authmanager'
- def __init__(self, repo):
- self.vreg = repo.vreg
-
- def validate_session(self, req, session):
- """check session validity, reconnecting it to the repository if the
- associated connection expired in the repository side (hence the
- necessity for this method).
-
- raise :exc:`InvalidSession` if session is corrupted for a reason or
- another and should be closed
- """
- raise NotImplementedError()
-
- def authenticate(self, req):
- """authenticate user using connection information found in the request,
- and return corresponding a :class:`~cubicweb.dbapi.Connection` instance,
- as well as login and authentication information dictionary used to open
- the connection.
-
- raise :exc:`cubicweb.AuthenticationError` if authentication failed
- (no authentication info found or wrong user/password)
- """
- raise NotImplementedError()
-
-
-class RepositoryAuthenticationManager(AbstractAuthenticationManager):
+class RepositoryAuthenticationManager(object):
"""authenticate user associated to a request and check session validity"""
def __init__(self, repo):
- super(RepositoryAuthenticationManager, self).__init__(repo)
self.repo = repo
vreg = repo.vreg
self.log_queries = vreg.config['query-log-file']
--- a/web/views/autoform.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/autoform.py Thu Nov 12 10:52:28 2015 +0100
@@ -119,10 +119,12 @@
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from warnings import warn
+from six.moves import range
+
from logilab.mtconverter import xml_escape
from logilab.common.decorators import iclassmethod, cached
from logilab.common.deprecation import deprecated
@@ -355,7 +357,7 @@
self.w(self._cw._('no such entity type %s') % self.etype)
return
entity = cls(self._cw)
- entity.eid = self._cw.varmaker.next()
+ entity.eid = next(self._cw.varmaker)
return entity
def call(self, i18nctx, **kwargs):
@@ -491,7 +493,8 @@
pendings.remove( (int(eidfrom), rel, int(eidto)) )
@ajaxfunc(output_type='json')
-def remove_pending_insert(self, (eidfrom, rel, eidto)):
+def remove_pending_insert(self, args):
+ eidfrom, rel, eidto = args
_remove_pending(self._cw, eidfrom, rel, eidto, 'insert')
@ajaxfunc(output_type='json')
@@ -500,11 +503,13 @@
_add_pending(self._cw, eidfrom, rel, eidto, 'insert')
@ajaxfunc(output_type='json')
-def remove_pending_delete(self, (eidfrom, rel, eidto)):
+def remove_pending_delete(self, args):
+ eidfrom, rel, eidto = args
_remove_pending(self._cw, eidfrom, rel, eidto, 'delete')
@ajaxfunc(output_type='json')
-def add_pending_delete(self, (eidfrom, rel, eidto)):
+def add_pending_delete(self, args):
+ eidfrom, rel, eidto = args
_add_pending(self._cw, eidfrom, rel, eidto, 'delete')
@@ -608,7 +613,7 @@
toggleable_rel_link_func = toggleable_relation_link
else:
toggleable_rel_link_func = lambda x, y, z: u''
- for row in xrange(rset.rowcount):
+ for row in range(rset.rowcount):
nodeid = relation_id(entity.eid, rschema, role,
rset[row][0])
if nodeid in pending_deletes:
@@ -1048,4 +1053,4 @@
AutomaticEntityForm.error('field for %s %s may not be found in schema' % (rtype, role))
return None
- vreg.register_all(globals().itervalues(), __name__)
+ vreg.register_all(globals().values(), __name__)
--- a/web/views/basecomponents.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/basecomponents.py Thu Nov 12 10:52:28 2015 +0100
@@ -21,7 +21,7 @@
* the logged user link
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from logilab.mtconverter import xml_escape
from logilab.common.registry import yes
--- a/web/views/basecontrollers.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/basecontrollers.py Thu Nov 12 10:52:28 2015 +0100
@@ -20,10 +20,12 @@
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from warnings import warn
+from six import text_type
+
from logilab.common.deprecation import deprecated
from cubicweb import (NoSelectableObject, ObjectNotFound, ValidationError,
@@ -233,7 +235,7 @@
except Exception as ex:
req.cnx.rollback()
req.exception('unexpected error while validating form')
- return (False, str(ex).decode('utf-8'), ctrl._edited_entity)
+ return (False, text_type(ex), ctrl._edited_entity)
return (False, '???', None)
@@ -255,9 +257,8 @@
# XXX unclear why we have a separated controller here vs
# js_validate_form on the json controller
status, args, entity = _validate_form(self._cw, self._cw.vreg)
- domid = self._cw.form.get('__domid', 'entityForm').encode(
- self._cw.encoding)
- return self.response(domid, status, args, entity)
+ domid = self._cw.form.get('__domid', 'entityForm')
+ return self.response(domid, status, args, entity).encode(self._cw.encoding)
class JSonController(Controller):
--- a/web/views/basetemplates.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/basetemplates.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,7 +18,7 @@
"""default templates for CubicWeb web client"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from logilab.mtconverter import xml_escape
from logilab.common.deprecation import class_renamed
--- a/web/views/baseviews.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/baseviews.py Thu Nov 12 10:52:28 2015 +0100
@@ -76,11 +76,13 @@
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from datetime import timedelta
from warnings import warn
+from six.moves import range
+
from rql import nodes
from logilab.mtconverter import TransformError, xml_escape
@@ -231,8 +233,8 @@
"""
rset = self.cw_rset
if rset is None:
- raise NotImplementedError, self
- for i in xrange(len(rset)):
+ raise NotImplementedError(self)
+ for i in range(len(rset)):
self.wview(self.__regid__, rset, row=i, **kwargs)
if len(rset) > 1:
self.w(u"\n")
@@ -314,7 +316,7 @@
self.w(u'<ul>\n')
else:
self.w(u'<ul%s class="%s">\n' % (listid, klass or 'section'))
- for i in xrange(self.cw_rset.rowcount):
+ for i in range(self.cw_rset.rowcount):
self.cell_call(row=i, col=0, vid=subvid, klass=klass, **kwargs)
self.w(u'</ul>\n')
if title:
@@ -393,7 +395,7 @@
@property
def title(self):
- etype = iter(self.cw_rset.column_types(0)).next()
+ etype = next(iter(self.cw_rset.column_types(0)))
return display_name(self._cw, etype, form='plural')
def call(self, **kwargs):
@@ -427,7 +429,7 @@
def call(self, subvid=None, **kwargs):
kwargs['vid'] = subvid
rset = self.cw_rset
- for i in xrange(len(rset)):
+ for i in range(len(rset)):
self.cell_call(i, 0, **kwargs)
if i < rset.rowcount-1:
self.w(self.separator)
--- a/web/views/bookmark.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/bookmark.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,7 +18,7 @@
"""Primary view for bookmarks + user's bookmarks box"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from logilab.mtconverter import xml_escape
--- a/web/views/boxes.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/boxes.py Thu Nov 12 10:52:28 2015 +0100
@@ -26,10 +26,12 @@
* startup views box
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from warnings import warn
+from six import text_type, add_metaclass
+
from logilab.mtconverter import xml_escape
from logilab.common.deprecation import class_deprecated
@@ -93,7 +95,7 @@
etypes = self.cw_rset.column_types(0)
if len(etypes) == 1:
plural = self.cw_rset.rowcount > 1 and 'plural' or ''
- etypelabel = display_name(self._cw, iter(etypes).next(), plural)
+ etypelabel = display_name(self._cw, next(iter(etypes)), plural)
title = u'%s - %s' % (title, etypelabel.lower())
w(title)
@@ -216,7 +218,7 @@
@property
def domid(self):
- return super(RsetBox, self).domid + unicode(abs(id(self))) + unicode(abs(id(self.cw_rset)))
+ return super(RsetBox, self).domid + text_type(abs(id(self))) + text_type(abs(id(self.cw_rset)))
def render_title(self, w):
w(self.cw_extra_kwargs['title'])
@@ -231,9 +233,9 @@
# helper classes ##############################################################
+@add_metaclass(class_deprecated)
class SideBoxView(EntityView):
"""helper view class to display some entities in a sidebox"""
- __metaclass__ = class_deprecated
__deprecation_warning__ = '[3.10] SideBoxView is deprecated, use RsetBox instead (%(cls)s)'
__regid__ = 'sidebox'
--- a/web/views/calendar.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/calendar.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,7 +18,7 @@
"""html calendar views"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
import copy
from datetime import timedelta
--- a/web/views/csvexport.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/csvexport.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,7 +18,10 @@
"""csv export views"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
+
+from six import PY2
+from six.moves import range
from cubicweb.schema import display_name
from cubicweb.predicates import any_rset, empty_rset
@@ -29,7 +32,7 @@
"""mixin class for CSV views"""
templatable = False
content_type = "text/comma-separated-values"
- binary = True # avoid unicode assertion
+ binary = PY2 # python csv module is unicode aware in py3k
csv_params = {'dialect': 'excel',
'quotechar': '"',
'delimiter': ';',
@@ -88,7 +91,7 @@
rows_by_type = {}
writer = self.csvwriter()
rowdef_by_type = {}
- for index in xrange(len(self.cw_rset)):
+ for index in range(len(self.cw_rset)):
entity = self.cw_rset.complete_entity(index)
if entity.e_schema not in rows_by_type:
rowdef_by_type[entity.e_schema] = [rs for rs, at in entity.e_schema.attribute_definitions()
@@ -98,7 +101,7 @@
rows = rows_by_type[entity.e_schema]
rows.append([entity.printable_value(rs.type, format='text/plain')
for rs in rowdef_by_type[entity.e_schema]])
- for rows in rows_by_type.itervalues():
+ for rows in rows_by_type.values():
writer.writerows(rows)
# use two empty lines as separator
writer.writerows([[], []])
--- a/web/views/cwproperties.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/cwproperties.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,7 +18,7 @@
"""Specific views for CWProperty (eg site/user preferences"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from logilab.mtconverter import xml_escape
@@ -119,10 +119,10 @@
_ = self._cw._
self.w(u'<h1>%s</h1>\n' % _(self.title))
for label, group, form in sorted((_(g), g, f)
- for g, f in mainforms.iteritems()):
+ for g, f in mainforms.items()):
self.wrap_main_form(group, label, form)
for label, group, objects in sorted((_(g), g, o)
- for g, o in groupedforms.iteritems()):
+ for g, o in groupedforms.items()):
self.wrap_grouped_form(group, label, objects)
@property
@@ -171,7 +171,7 @@
entity = self.cwprops_rset.get_entity(values[key], 0)
else:
entity = self._cw.vreg['etypes'].etype_class('CWProperty')(self._cw)
- entity.eid = self._cw.varmaker.next()
+ entity.eid = next(self._cw.varmaker)
entity.cw_attr_cache['pkey'] = key
entity.cw_attr_cache['value'] = self._cw.vreg.property_value(key)
return entity
@@ -224,7 +224,7 @@
(make_togglable_link('fieldset_' + group, label)))
self.w(u'<div id="fieldset_%s" %s>' % (group, status))
sorted_objects = sorted((self._cw.__('%s_%s' % (group, o)), o, f)
- for o, f in objects.iteritems())
+ for o, f in objects.items())
for label, oid, form in sorted_objects:
self.wrap_object_form(group, oid, label, form)
self.w(u'</div>')
--- a/web/views/cwsources.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/cwsources.py Thu Nov 12 10:52:28 2015 +0100
@@ -20,10 +20,13 @@
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
import logging
from itertools import repeat
+
+from six.moves import range
+
from logilab.mtconverter import xml_escape
from logilab.common.decorators import cachedproperty
@@ -95,7 +98,7 @@
if hostconfig:
self.w(u'<h3>%s</h3>' % self._cw._('CWSourceHostConfig_plural'))
self._cw.view('table', hostconfig, w=self.w,
- displaycols=range(2),
+ displaycols=list(range(2)),
cellvids={1: 'editable-final'})
@@ -186,7 +189,7 @@
warning(_('relation %(rtype)s with %(etype)s as %(role)s is '
'supported but no target type supported') %
{'rtype': rschema, 'role': role, 'etype': etype})
- for rtype, rdefs in self.srelations.iteritems():
+ for rtype, rdefs in self.srelations.items():
if rdefs is None:
rschema = self.schema[rtype]
for subj, obj in rschema.rdefs:
--- a/web/views/cwuser.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/cwuser.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,10 +18,13 @@
"""Specific views for users and groups"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from hashlib import sha1 # pylint: disable=E0611
+from six import text_type
+from six.moves import range
+
from logilab.mtconverter import xml_escape
from cubicweb import tags
@@ -64,7 +67,7 @@
<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:rdfs="http://www.w3org/2000/01/rdf-schema#"
xmlns:foaf="http://xmlns.com/foaf/0.1/"> '''% self._cw.encoding)
- for i in xrange(self.cw_rset.rowcount):
+ for i in range(self.cw_rset.rowcount):
self.cell_call(i, 0)
self.w(u'</rdf:RDF>\n')
@@ -250,6 +253,6 @@
'group': tableview.MainEntityColRenderer(),
'nb_users': tableview.EntityTableColRenderer(
header=_('num. users'),
- renderfunc=lambda w,x: w(unicode(x.num_users())),
+ renderfunc=lambda w,x: w(text_type(x.num_users())),
sortfunc=lambda x: x.num_users()),
}
--- a/web/views/debug.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/debug.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,10 +18,12 @@
"""management and error screens"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from time import strftime, localtime
+from six import text_type
+
from logilab.mtconverter import xml_escape
from cubicweb.predicates import none_rset, match_user_groups
@@ -106,7 +108,7 @@
stats[k] = '%s / %s' % (stats[k]['size'], stats[k]['maxsize'])
for element in sorted(stats):
w(u'<tr><th align="left">%s</th><td>%s %s</td></tr>'
- % (element, xml_escape(unicode(stats[element])),
+ % (element, xml_escape(text_type(stats[element])),
element.endswith('percent') and '%' or '' ))
w(u'</table>')
if req.cnx.is_repo_in_memory and req.user.is_in_group('managers'):
@@ -116,7 +118,7 @@
w(u'<ul>')
for session in sessions:
w(u'<li>%s (%s: %s)<br/>' % (
- xml_escape(unicode(session)),
+ xml_escape(text_type(session)),
_('last usage'),
strftime(dtformat, localtime(session.timestamp))))
dict_to_html(w, session.data)
@@ -170,7 +172,7 @@
continue
self.w(u'<h3 id="%s">%s</h3>' % (key, key))
if self._cw.vreg[key]:
- values = sorted(self._cw.vreg[key].iteritems())
+ values = sorted(self._cw.vreg[key].items())
self.wview('pyvaltable', pyvalue=[(key, xml_escape(repr(val)))
for key, val in values])
else:
--- a/web/views/dotgraphview.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/dotgraphview.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,7 +18,7 @@
"""some basic stuff to build dot generated graph images"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
import tempfile
import os
--- a/web/views/editcontroller.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/editcontroller.py Thu Nov 12 10:52:28 2015 +0100
@@ -24,6 +24,8 @@
from datetime import datetime
+from six import text_type
+
from logilab.common.deprecation import deprecated
from logilab.common.graph import ordered_nodes
@@ -93,9 +95,9 @@
def update_query(self, eid):
varmaker = rqlvar_maker()
- var = varmaker.next()
+ var = next(varmaker)
while var in self.kwargs:
- var = varmaker.next()
+ var = next(varmaker)
rql = 'SET %s WHERE X eid %%(%s)s' % (','.join(self.edited), var)
if self.restrictions:
rql += ', %s' % ','.join(self.restrictions)
@@ -143,7 +145,7 @@
values_by_eid = dict((eid, req.extract_entity_params(eid, minparams=2))
for eid in req.edited_eids())
# iterate over all the edited entities
- for eid, values in values_by_eid.iteritems():
+ for eid, values in values_by_eid.items():
# add eid to the dependency graph
graph.setdefault(eid, set())
# search entity's edited fields for mandatory inlined relation
@@ -197,7 +199,7 @@
if '__linkto' in req.form and 'eid' in req.form:
self.execute_linkto()
elif not ('__delete' in req.form or '__insert' in req.form):
- raise ValidationError(None, {None: unicode(ex)})
+ raise ValidationError(None, {None: text_type(ex)})
# all pending inlined relations to newly created entities have been
# treated now (pop to ensure there are no attempt to add new ones)
pending_inlined = req.data.pop('pending_inlined')
@@ -215,7 +217,7 @@
autoform.delete_relations(self._cw, todelete)
self._cw.remove_pending_operations()
if self.errors:
- errors = dict((f.name, unicode(ex)) for f, ex in self.errors)
+ errors = dict((f.name, text_type(ex)) for f, ex in self.errors)
raise ValidationError(valerror_eid(form.get('__maineid')), errors)
def _insert_entity(self, etype, eid, rqlquery):
@@ -265,7 +267,7 @@
for form_, field in req.data['pending_inlined'].pop(entity.eid, ()):
rqlquery.set_inlined(field.name, form_.edited_entity.eid)
if self.errors:
- errors = dict((f.role_name(), unicode(ex)) for f, ex in self.errors)
+ errors = dict((f.role_name(), text_type(ex)) for f, ex in self.errors)
raise ValidationError(valerror_eid(entity.eid), errors)
if eid is None: # creation or copy
entity.eid = eid = self._insert_entity(etype, formparams['eid'], rqlquery)
@@ -316,7 +318,7 @@
"""handle edition for the (rschema, x) relation of the given entity
"""
if values:
- rqlquery.set_inlined(field.name, iter(values).next())
+ rqlquery.set_inlined(field.name, next(iter(values)))
elif form.edited_entity.has_eid():
self.handle_relation(form, field, values, origvalues)
@@ -355,13 +357,13 @@
for eid, etype in eidtypes:
entity = self._cw.entity_from_eid(eid, etype)
path, params = entity.cw_adapt_to('IEditControl').after_deletion_path()
- redirect_info.add( (path, tuple(params.iteritems())) )
+ redirect_info.add( (path, tuple(params.items())) )
entity.cw_delete()
if len(redirect_info) > 1:
# In the face of ambiguity, refuse the temptation to guess.
self._after_deletion_path = 'view', ()
else:
- self._after_deletion_path = iter(redirect_info).next()
+ self._after_deletion_path = next(iter(redirect_info))
if len(eidtypes) > 1:
self._cw.set_message(self._cw._('entities deleted'))
else:
--- a/web/views/editforms.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/editforms.py Thu Nov 12 10:52:28 2015 +0100
@@ -20,10 +20,12 @@
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from copy import copy
+from six.moves import range
+
from logilab.mtconverter import xml_escape
from logilab.common.decorators import cached
from logilab.common.registry import yes
@@ -145,7 +147,7 @@
# selector
etype = kwargs.pop('etype', self._cw.form.get('etype'))
entity = self._cw.vreg['etypes'].etype_class(etype)(self._cw)
- entity.eid = self._cw.varmaker.next()
+ entity.eid = next(self._cw.varmaker)
self.render_form(entity)
def form_title(self, entity):
@@ -197,7 +199,7 @@
entity.complete()
self.newentity = copy(entity)
self.copying = entity
- self.newentity.eid = self._cw.varmaker.next()
+ self.newentity.eid = next(self._cw.varmaker)
self.w(u'<script type="text/javascript">updateMessage("%s");</script>\n'
% self._cw._(self.warning_message))
super(CopyFormView, self).render_form(self.newentity)
@@ -230,7 +232,7 @@
def __init__(self, req, rset, **kwargs):
kwargs.setdefault('__redirectrql', rset.printable_rql())
super(TableEditForm, self).__init__(req, rset=rset, **kwargs)
- for row in xrange(len(self.cw_rset)):
+ for row in range(len(self.cw_rset)):
form = self._cw.vreg['forms'].select('edition', self._cw,
rset=self.cw_rset, row=row,
formtype='muledit',
--- a/web/views/editviews.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/editviews.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,7 +18,7 @@
"""Some views used to help to the edition process"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from logilab.common.decorators import cached
from logilab.mtconverter import xml_escape
--- a/web/views/facets.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/facets.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,7 +18,7 @@
"""the facets box and some basic facets"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from warnings import warn
@@ -168,7 +168,7 @@
DeprecationWarning, stacklevel=2)
else:
vidargs = {}
- vidargs = dict((k, v) for k, v in vidargs.iteritems() if v)
+ vidargs = dict((k, v) for k, v in vidargs.items() if v)
facetargs = xml_escape(json_dumps([divid, vid, paginate, vidargs]))
w(u'<form id="%sForm" class="%s" method="post" action="" '
'cubicweb:facetargs="%s" >' % (divid, cssclass, facetargs))
--- a/web/views/formrenderers.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/formrenderers.py Thu Nov 12 10:52:28 2015 +0100
@@ -33,10 +33,12 @@
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from warnings import warn
+from six import text_type
+
from logilab.mtconverter import xml_escape
from logilab.common.registry import yes
@@ -119,7 +121,7 @@
data.insert(0, errormsg)
# NOTE: we call unicode because `tag` objects may be found within data
# e.g. from the cwtags library
- w(''.join(unicode(x) for x in data))
+ w(''.join(text_type(x) for x in data))
def render_content(self, w, form, values):
if self.display_progress_div:
@@ -241,7 +243,7 @@
if form.fieldsets_in_order:
fieldsets = form.fieldsets_in_order
else:
- fieldsets = byfieldset.iterkeys()
+ fieldsets = byfieldset
for fieldset in list(fieldsets):
try:
fields = byfieldset.pop(fieldset)
--- a/web/views/forms.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/forms.py Thu Nov 12 10:52:28 2015 +0100
@@ -48,6 +48,9 @@
from warnings import warn
import time
+import inspect
+
+from six import text_type
from logilab.common import dictattr, tempattr
from logilab.common.decorators import iclassmethod, cached
@@ -257,7 +260,7 @@
editedfields = self._cw.form['_cw_fields']
except KeyError:
raise RequestError(self._cw._('no edited fields specified'))
- entityform = entity and self.field_by_name.im_func.func_code.co_argcount == 4 # XXX
+ entityform = entity and len(inspect.getargspec(self.field_by_name)) == 4 # XXX
for editedfield in splitstrip(editedfields):
try:
name, role = editedfield.split('-')
@@ -286,7 +289,7 @@
except ProcessFormError as exc:
errors.append((field, exc))
if errors:
- errors = dict((f.role_name(), unicode(ex)) for f, ex in errors)
+ errors = dict((f.role_name(), text_type(ex)) for f, ex in errors)
raise ValidationError(None, errors)
return processed
@@ -377,7 +380,7 @@
Warning: this method must be called only when all form fields are setup
"""
- for (rtype, role), eids in self.linked_to.iteritems():
+ for (rtype, role), eids in self.linked_to.items():
# if the relation is already setup by a form field, do not add it
# in a __linkto hidden to avoid setting it twice in the controller
try:
--- a/web/views/ibreadcrumbs.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/ibreadcrumbs.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,10 +18,12 @@
"""breadcrumbs components definition for CubicWeb web client"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from warnings import warn
+from six import text_type
+
from logilab.mtconverter import xml_escape
from cubicweb import tags, uilib
@@ -141,7 +143,7 @@
xml_escape(url), xml_escape(uilib.cut(title, textsize))))
else:
textsize = self._cw.property_value('navigation.short-line-size')
- w(xml_escape(uilib.cut(unicode(part), textsize)))
+ w(xml_escape(uilib.cut(text_type(part), textsize)))
class BreadCrumbETypeVComponent(BreadCrumbEntityVComponent):
--- a/web/views/idownloadable.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/idownloadable.py Thu Nov 12 10:52:28 2015 +0100
@@ -20,7 +20,9 @@
=====================================================
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
+
+from six.moves import range
from logilab.mtconverter import BINARY_ENCODINGS, TransformError, xml_escape
from logilab.common.deprecation import class_renamed, deprecated
@@ -166,7 +168,7 @@
def call(self, **kwargs):
rset = self.cw_rset
- for i in xrange(len(rset)):
+ for i in range(len(rset)):
self.w(u'<div class="efile">')
self.wview(self.__regid__, rset, row=i, col=0, **kwargs)
self.w(u'</div>')
--- a/web/views/json.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/json.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,7 +18,7 @@
"""json export views"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from cubicweb.uilib import rest_traceback
@@ -64,7 +64,7 @@
# use ``application/javascript`` if ``callback`` parameter is
# provided, keep ``application/json`` otherwise
self._cw.set_content_type('application/javascript')
- json_data = b'%s(%s)' % (json_padding, json_data)
+ json_data = json_padding + b'(' + json_data + b')'
return json_data
@@ -85,7 +85,8 @@
indent = int(self._cw.form['_indent'])
else:
indent = None
- self.w(json_dumps(data, indent=indent))
+ # python's json.dumps escapes non-ascii characters
+ self.w(json_dumps(data, indent=indent).encode('ascii'))
class JsonRsetView(JsonMixIn, AnyRsetView):
--- a/web/views/magicsearch.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/magicsearch.py Thu Nov 12 10:52:28 2015 +0100
@@ -23,6 +23,8 @@
import re
from logging import getLogger
+from six import text_type
+
from yams.interfaces import IVocabularyConstraint
from rql import RQLSyntaxError, BadRQLQuery, parse
@@ -86,7 +88,7 @@
else:
# Only one possible translation, no ambiguity
if len(translation_set) == 1:
- relation.r_type = iter(translations[rtype]).next()
+ relation.r_type = next(iter(translations[rtype]))
# More than 1 possible translation => resolve it later
else:
ambiguous_nodes[relation] = (lhs.name, translation_set)
@@ -386,7 +388,7 @@
self.processors = sorted(processors, key=lambda x: x.priority)
def process_query(self, uquery):
- assert isinstance(uquery, unicode)
+ assert isinstance(uquery, text_type)
try:
procname, query = uquery.split(':', 1)
proc = self.by_name[procname.strip().lower()]
@@ -589,7 +591,7 @@
"""
schema = self._cw.vreg.schema
relations = set()
- untyped_dest_var = rqlvar_maker(defined=select.defined_vars).next()
+ untyped_dest_var = next(rqlvar_maker(defined=select.defined_vars))
# for each solution
# 1. find each possible relation
# 2. for each relation:
--- a/web/views/management.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/management.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,7 +18,7 @@
"""security management and error screens"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from logilab.mtconverter import xml_escape
@@ -169,7 +169,7 @@
binfo += u'\n\n:URL: %s\n' % req.url()
if not '__bugreporting' in req.form:
binfo += u'\n:form params:\n'
- binfo += u'\n'.join(u' * %s = %s' % (k, v) for k, v in req.form.iteritems())
+ binfo += u'\n'.join(u' * %s = %s' % (k, v) for k, v in req.form.items())
binfo += u'\n\n:CubicWeb version: %s\n' % (eversion,)
for pkg, pkgversion in cubes:
binfo += u":Cube %s version: %s\n" % (pkg, pkgversion)
--- a/web/views/navigation.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/navigation.py Thu Nov 12 10:52:28 2015 +0100
@@ -46,10 +46,12 @@
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from datetime import datetime
+from six import text_type
+
from rql.nodes import VariableRef, Constant
from logilab.mtconverter import xml_escape
@@ -192,10 +194,10 @@
return entity.printable_value(attrname, format='text/plain')
elif col is None: # smart links disabled.
def index_display(row):
- return unicode(row)
+ return text_type(row)
elif self._cw.vreg.schema.eschema(rset.description[0][col]).final:
def index_display(row):
- return unicode(rset[row][col])
+ return text_type(rset[row][col])
else:
def index_display(row):
return rset.get_entity(row, col).view('text')
--- a/web/views/owl.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/owl.py Thu Nov 12 10:52:28 2015 +0100
@@ -19,7 +19,9 @@
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
+
+from six.moves import range
from logilab.mtconverter import TransformError, xml_escape
@@ -166,7 +168,7 @@
def call(self):
self.w(OWL_OPENING_ROOT % {'appid': self._cw.vreg.schema.name})
- for i in xrange(self.cw_rset.rowcount):
+ for i in range(self.cw_rset.rowcount):
self.cell_call(i, 0)
self.w(OWL_CLOSING_ROOT)
--- a/web/views/plots.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/plots.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,7 +18,10 @@
"""basic plot views"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
+
+from six import add_metaclass
+from six.moves import range
from logilab.common.date import datetime2ticks
from logilab.common.deprecation import class_deprecated
@@ -83,9 +86,10 @@
def _render(self, *args, **kwargs):
raise NotImplementedError
+
+@add_metaclass(class_deprecated)
class FlotPlotWidget(PlotWidget):
"""PlotRenderer widget using Flot"""
- __metaclass__ = class_deprecated
__deprecation_warning__ = '[3.14] cubicweb.web.views.plots module is deprecated, use the jqplot cube instead'
onload = u"""
var fig = jQuery('#%(figid)s');
@@ -117,7 +121,7 @@
if req.ie_browser():
req.add_js('excanvas.js')
req.add_js(('jquery.flot.js', 'cubicweb.flot.js'))
- figid = u'figure%s' % req.varmaker.next()
+ figid = u'figure%s' % next(req.varmaker)
plotdefs = []
plotdata = []
self.w(u'<div id="%s" style="width: %spx; height: %spx;"></div>' %
@@ -137,8 +141,8 @@
'dateformat': '"%s"' % fmt})
+@add_metaclass(class_deprecated)
class PlotView(baseviews.AnyRsetView):
- __metaclass__ = class_deprecated
__deprecation_warning__ = '[3.14] cubicweb.web.views.plots module is deprecated, use the jqplot cube instead'
__regid__ = 'plot'
title = _('generic plot')
@@ -154,7 +158,7 @@
abscissa = [row[0] for row in self.cw_rset]
plots = []
nbcols = len(self.cw_rset.rows[0])
- for col in xrange(1, nbcols):
+ for col in range(1, nbcols):
data = [row[col] for row in self.cw_rset]
plots.append(filterout_nulls(abscissa, data))
plotwidget = FlotPlotWidget(varnames, plots, timemode=self.timemode)
--- a/web/views/primary.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/primary.py Thu Nov 12 10:52:28 2015 +0100
@@ -38,7 +38,7 @@
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from warnings import warn
--- a/web/views/pyviews.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/pyviews.py Thu Nov 12 10:52:28 2015 +0100
@@ -19,6 +19,9 @@
"""
__docformat__ = "restructuredtext en"
+from six import text_type
+from six.moves import range
+
from cubicweb.view import View
from cubicweb.predicates import match_kwargs
from cubicweb.web.views import tableview
@@ -38,7 +41,7 @@
w(self.empty_cell_content)
def render_cell(self, w, rownum):
- w(unicode(self.data[rownum][self.colid]))
+ w(text_type(self.data[rownum][self.colid]))
class PyValTableView(tableview.TableMixIn, View):
@@ -100,7 +103,7 @@
def build_column_renderers(self):
return [self.column_renderer(colid)
- for colid in xrange(len(self.pyvalue[0]))]
+ for colid in range(len(self.pyvalue[0]))]
def facets_form(self, mainvar=None):
return None # not supported
--- a/web/views/rdf.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/rdf.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,7 +18,9 @@
"""base xml and rss views"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
+
+from six.moves import range
from yams import xy
@@ -56,7 +58,7 @@
graph.bind('cw', CW)
for prefix, xmlns in xy.XY.prefixes.items():
graph.bind(prefix, rdflib.Namespace(xmlns))
- for i in xrange(self.cw_rset.rowcount):
+ for i in range(self.cw_rset.rowcount):
entity = self.cw_rset.complete_entity(i, 0)
self.entity2graph(graph, entity)
self.w(graph.serialize(format=self.format))
--- a/web/views/reledit.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/reledit.py Thu Nov 12 10:52:28 2015 +0100
@@ -20,7 +20,7 @@
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
import copy
from warnings import warn
@@ -259,7 +259,7 @@
elif action == 'add':
add_etype = self._compute_ttypes(rschema, role)[0]
_new_entity = self._cw.vreg['etypes'].etype_class(add_etype)(self._cw)
- _new_entity.eid = self._cw.varmaker.next()
+ _new_entity.eid = next(self._cw.varmaker)
edit_entity = _new_entity
# XXX see forms.py ~ 276 and entities.linked_to method
# is there another way?
@@ -292,7 +292,7 @@
cwtarget='eformframe', cssclass='releditForm',
**formargs)
# pass reledit arguments
- for pname, pvalue in event_args.iteritems():
+ for pname, pvalue in event_args.items():
form.add_hidden('__reledit|' + pname, pvalue)
# handle buttons
if form.form_buttons: # edition, delete
--- a/web/views/schema.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/schema.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,7 +18,7 @@
"""Specific views for schema related entities"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from itertools import cycle
@@ -26,6 +26,8 @@
import os, os.path as osp
import codecs
+from six import text_type
+
from logilab.common.graph import GraphGenerator, DotBackend
from logilab.common.ureports import Section, Table
from logilab.common.registry import yes
@@ -114,7 +116,7 @@
def grouped_permissions_table(self, rschema):
# group relation definitions with identical permissions
perms = {}
- for rdef in rschema.rdefs.itervalues():
+ for rdef in rschema.rdefs.values():
rdef_perms = []
for action in rdef.ACTIONS:
groups = sorted(rdef.get_groups(action))
@@ -131,7 +133,7 @@
_ = self._cw._
w(u'<div style="margin: 0px 1.5em">')
tmpl = u'<strong>%s</strong> %s <strong>%s</strong>'
- for perm, rdefs in perms.iteritems():
+ for perm, rdefs in perms.items():
w(u'<div>%s</div>' % u', '.join(
tmpl % (_(s.type), _(rschema.type), _(o.type)) for s, o in rdefs))
# accessing rdef from previous loop by design: only used to get
@@ -279,7 +281,7 @@
def cell_call(self, row, col):
defaultval = self.cw_rset.rows[row][col]
if defaultval is not None:
- self.w(unicode(self.cw_rset.rows[row][col].unzpickle()))
+ self.w(text_type(self.cw_rset.rows[row][col].unzpickle()))
class CWETypeRelationCardinalityCell(baseviews.FinalView):
__regid__ = 'etype-rel-cardinality-cell'
@@ -487,7 +489,7 @@
entity = self.cw_rset.get_entity(row, col)
rschema = self._cw.vreg.schema.rschema(entity.rtype.name)
rdef = rschema.rdefs[(entity.stype.name, entity.otype.name)]
- constraints = [xml_escape(unicode(c)) for c in getattr(rdef, 'constraints')]
+ constraints = [xml_escape(text_type(c)) for c in getattr(rdef, 'constraints')]
self.w(u'<br/>'.join(constraints))
class CWAttributeOptionsCell(EntityView):
@@ -557,8 +559,9 @@
def __init__(self, visitor, cw):
self.visitor = visitor
self.cw = cw
- self.nextcolor = cycle( ('#ff7700', '#000000',
- '#ebbc69', '#888888') ).next
+ self._cycle = iter(cycle(('#ff7700', '#000000', '#ebbc69', '#888888')))
+ self.nextcolor = lambda: next(self._cycle)
+
self.colors = {}
def node_properties(self, eschema):
--- a/web/views/sessions.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/sessions.py Thu Nov 12 10:52:28 2015 +0100
@@ -19,20 +19,28 @@
__docformat__ = "restructuredtext en"
from time import time
+from logging import getLogger
-from cubicweb import RepositoryError, Unauthorized, BadConnectionId
-from cubicweb.web import InvalidSession, component
+from logilab.common.registry import RegistrableObject
+
+from cubicweb import RepositoryError, Unauthorized, BadConnectionId, set_log_methods
+from cubicweb.predicates import yes
+from cubicweb.web import InvalidSession
+
+from cubicweb.web.views import authentication
-class AbstractSessionManager(component.Component):
+class AbstractSessionManager(RegistrableObject):
"""manage session data associated to a session identifier"""
__abstract__ = True
+ __select__ = yes()
+ __registry__ = 'sessions'
__regid__ = 'sessionmanager'
def __init__(self, repo):
vreg = repo.vreg
self.session_time = vreg.config['http-session-time'] or None
- self.authmanager = vreg['components'].select('authmanager', repo=repo)
+ self.authmanager = authentication.RepositoryAuthenticationManager(repo)
interval = (self.session_time or 0) / 2.
if vreg.config.anonymous_user()[0] is not None:
self.cleanup_anon_session_time = vreg.config['cleanup-anonymous-session-time'] or 5 * 60
@@ -53,15 +61,7 @@
closed, total = 0, 0
for session in self.current_sessions():
total += 1
- try:
- last_usage_time = session.cnx.check()
- except AttributeError:
- last_usage_time = session.mtime
- except BadConnectionId:
- self.close_session(session)
- closed += 1
- continue
-
+ last_usage_time = session.mtime
no_use_time = (time() - last_usage_time)
if session.anonymous_session:
if no_use_time >= self.cleanup_anon_session_time:
@@ -95,11 +95,14 @@
raise NotImplementedError()
+set_log_methods(AbstractSessionManager, getLogger('cubicweb.sessionmanager'))
+
+
class InMemoryRepositorySessionManager(AbstractSessionManager):
"""manage session data associated to a session identifier"""
def __init__(self, *args, **kwargs):
- AbstractSessionManager.__init__(self, *args, **kwargs)
+ super(InMemoryRepositorySessionManager, self).__init__(*args, **kwargs)
# XXX require a RepositoryAuthenticationManager which violates
# authenticate interface by returning a session instead of a user
#assert isinstance(self.authmanager, RepositoryAuthenticationManager)
--- a/web/views/sparql.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/sparql.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,7 +18,9 @@
"""SPARQL integration"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
+
+from six.moves import range
from yams import xy
from rql import TypeResolverException
@@ -111,7 +113,7 @@
rqlst = self.cw_rset.syntax_tree().children[0]
varnames = [var.name for var in rqlst.selection]
results = E.results()
- for rowidx in xrange(len(self.cw_rset)):
+ for rowidx in range(len(self.cw_rset)):
result = E.result()
for colidx, varname in enumerate(varnames):
result.append(self.cell_binding(rowidx, colidx, varname))
@@ -140,4 +142,4 @@
def registration_callback(vreg):
if Sparql2rqlTranslator is not None:
- vreg.register_all(globals().itervalues(), __name__)
+ vreg.register_all(globals().values(), __name__)
--- a/web/views/startup.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/startup.py Thu Nov 12 10:52:28 2015 +0100
@@ -22,7 +22,7 @@
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from logilab.common.textutils import unormalize
from logilab.common.deprecation import deprecated
@@ -106,7 +106,7 @@
def entity_types_table(self, eschemas):
infos = sorted(self.entity_types(eschemas),
- key=lambda (l,a,e): unormalize(l))
+ key=lambda t: unormalize(t[0]))
q, r = divmod(len(infos), 2)
if r:
infos.append( (None, ' ', ' ') )
--- a/web/views/staticcontrollers.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/staticcontrollers.py Thu Nov 12 10:52:28 2015 +0100
@@ -140,7 +140,7 @@
"""return the filepath that will be used to cache concatenation of `paths`
"""
_, ext = osp.splitext(paths[0])
- fname = 'cache_concat_' + hashlib.md5(';'.join(paths)).hexdigest() + ext
+ fname = 'cache_concat_' + hashlib.md5((';'.join(paths)).encode('ascii')).hexdigest() + ext
return osp.join(self.config.appdatahome, 'uicache', fname)
def concat_cached_filepath(self, paths):
@@ -167,7 +167,7 @@
with open(osp.join(dirpath, rid), 'rb') as source:
for line in source:
f.write(line)
- f.write('\n')
+ f.write(b'\n')
f.close()
except:
os.remove(tmpfile)
--- a/web/views/tableview.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/tableview.py Thu Nov 12 10:52:28 2015 +0100
@@ -42,7 +42,7 @@
.. autoclass:: cubicweb.web.views.tableview.TableLayout
:members:
-There is by default only on table layout, using the 'table_layout' identifier,
+There is by default only one table layout, using the 'table_layout' identifier,
that is referenced by table views
:attr:`cubicweb.web.views.tableview.TableMixIn.layout_id`. If you want to
customize the look and feel of your table, you can either replace the default
@@ -52,21 +52,24 @@
Notice you can gives options to the layout using a `layout_args` dictionary on
your class.
-If you can still find a view that suit your needs, you should take a look at the
+If you still can't find a view that suit your needs, you should take a look at the
class below that is the common abstract base class for the three views defined
-above and implements you own class.
+above and implement your own class.
.. autoclass:: cubicweb.web.views.tableview.TableMixIn
:members:
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from warnings import warn
from copy import copy
from types import MethodType
+from six import string_types, add_metaclass, create_bound_method
+from six.moves import range
+
from logilab.mtconverter import xml_escape
from logilab.common.decorators import cachedproperty
from logilab.common.deprecation import class_deprecated
@@ -162,7 +165,7 @@
def __init__(self, req, view, **kwargs):
super(TableLayout, self).__init__(req, **kwargs)
- for key, val in self.cw_extra_kwargs.items():
+ for key, val in list(self.cw_extra_kwargs.items()):
if hasattr(self.__class__, key) and not key[0] == '_':
setattr(self, key, val)
self.cw_extra_kwargs.pop(key)
@@ -225,7 +228,7 @@
def render_table_body(self, w, colrenderers):
w(u'<tbody>')
- for rownum in xrange(self.view.table_size):
+ for rownum in range(self.view.table_size):
self.render_row(w, rownum, colrenderers)
w(u'</tbody>')
@@ -284,7 +287,7 @@
attrs = renderer.attributes.copy()
if renderer.sortable:
sortvalue = renderer.sortvalue(rownum)
- if isinstance(sortvalue, basestring):
+ if isinstance(sortvalue, string_types):
sortvalue = sortvalue[:self.sortvalue_limit]
if sortvalue is not None:
attrs[u'cubicweb:sortvalue'] = js_dumps(sortvalue)
@@ -646,10 +649,10 @@
# compute displayed columns
if self.displaycols is None:
if headers is not None:
- displaycols = range(len(headers))
+ displaycols = list(range(len(headers)))
else:
rqlst = self.cw_rset.syntax_tree()
- displaycols = range(len(rqlst.children[0].selection))
+ displaycols = list(range(len(rqlst.children[0].selection)))
else:
displaycols = self.displaycols
# compute table headers
@@ -723,7 +726,7 @@
for aname, member in[('renderfunc', renderfunc),
('sortfunc', sortfunc)]:
if isinstance(member, MethodType):
- member = MethodType(member.im_func, acopy, acopy.__class__)
+ member = create_bound_method(member.__func__, acopy)
setattr(acopy, aname, member)
return acopy
finally:
@@ -918,13 +921,13 @@
################################################################################
+@add_metaclass(class_deprecated)
class TableView(AnyRsetView):
"""The table view accepts any non-empty rset. It uses introspection on the
result set to compute column names and the proper way to display the cells.
It is however highly configurable and accepts a wealth of options.
"""
- __metaclass__ = class_deprecated
__deprecation_warning__ = '[3.14] %(cls)s is deprecated'
__regid__ = 'table'
title = _('table')
@@ -977,9 +980,9 @@
if 'displaycols' in self._cw.form:
displaycols = [int(idx) for idx in self._cw.form['displaycols']]
elif headers is not None:
- displaycols = range(len(headers))
+ displaycols = list(range(len(headers)))
else:
- displaycols = range(len(self.cw_rset.syntax_tree().children[0].selection))
+ displaycols = list(range(len(self.cw_rset.syntax_tree().children[0].selection)))
return displaycols
def _setup_tablesorter(self, divid):
@@ -1143,7 +1146,7 @@
else:
column.append_renderer(subvid or 'incontext', colindex)
if cellattrs and colindex in cellattrs:
- for name, value in cellattrs[colindex].iteritems():
+ for name, value in cellattrs[colindex].items():
column.add_attr(name, value)
# add column
columns.append(column)
@@ -1184,8 +1187,8 @@
title = _('editable-table')
+@add_metaclass(class_deprecated)
class CellView(EntityView):
- __metaclass__ = class_deprecated
__deprecation_warning__ = '[3.14] %(cls)s is deprecated'
__regid__ = 'cell'
__select__ = nonempty_rset()
@@ -1271,6 +1274,7 @@
finalview = 'editable-final'
+@add_metaclass(class_deprecated)
class EntityAttributesTableView(EntityView):
"""This table displays entity attributes in a table and allow to set a
specific method to help building cell content for each attribute as well as
@@ -1282,7 +1286,6 @@
Table will render column header using the method header_for_COLNAME if
defined otherwise COLNAME will be used.
"""
- __metaclass__ = class_deprecated
__deprecation_warning__ = '[3.14] %(cls)s is deprecated'
__abstract__ = True
columns = ()
@@ -1298,7 +1301,7 @@
self.w(u'<table class="%s">' % self.table_css)
self.table_header(sample)
self.w(u'<tbody>')
- for row in xrange(self.cw_rset.rowcount):
+ for row in range(self.cw_rset.rowcount):
self.cell_call(row=row, col=0)
self.w(u'</tbody>')
self.w(u'</table>')
--- a/web/views/tabs.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/tabs.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,7 +18,9 @@
"""base classes to handle tabbed views"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
+
+from six import string_types
from logilab.common.deprecation import class_renamed
from logilab.mtconverter import xml_escape
@@ -114,7 +116,7 @@
active_tab = uilib.domid(default_tab)
viewsvreg = self._cw.vreg['views']
for tab in tabs:
- if isinstance(tab, basestring):
+ if isinstance(tab, string_types):
tabid, tabkwargs = tab, {}
else:
tabid, tabkwargs = tab
--- a/web/views/timetable.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/timetable.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,7 +18,9 @@
"""html timetable views"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
+
+from six.moves import range
from logilab.mtconverter import xml_escape
from logilab.common.date import ONEDAY, date_range, todatetime
@@ -51,7 +53,7 @@
users = []
users_max = {}
# XXX: try refactoring with calendar.py:OneMonthCal
- for row in xrange(self.cw_rset.rowcount):
+ for row in range(self.cw_rset.rowcount):
task = self.cw_rset.get_entity(row, 0)
icalendarable = task.cw_adapt_to('ICalendarable')
if len(self.cw_rset[row]) > 1 and self.cw_rset.description[row][1] == 'CWUser':
@@ -88,7 +90,7 @@
rows = []
# colors here are class names defined in cubicweb.css
- colors = ["col%x" % i for i in xrange(12)]
+ colors = ["col%x" % i for i in range(12)]
next_color_index = 0
visited_tasks = {} # holds a description of a task for a user
--- a/web/views/treeview.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/treeview.py Thu Nov 12 10:52:28 2015 +0100
@@ -20,7 +20,7 @@
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from warnings import warn
@@ -140,7 +140,7 @@
ajaxargs = json.loads(form.pop('morekwargs'))
# got unicode & python keywords must be strings
morekwargs.update(dict((str(k), v)
- for k, v in ajaxargs.iteritems()))
+ for k, v in ajaxargs.items()))
toplevel_thru_ajax = form.pop('treeview_top', False) or initial_thru_ajax
toplevel = toplevel_thru_ajax or (initial_load and not form.get('fname'))
return subvid, treeid, toplevel_thru_ajax, toplevel
--- a/web/views/uicfg.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/uicfg.py Thu Nov 12 10:52:28 2015 +0100
@@ -57,6 +57,8 @@
from warnings import warn
+from six import string_types
+
from cubicweb import neg_role
from cubicweb.rtags import (RelationTags, RelationTagsBool, RelationTagsSet,
RelationTagsDict, NoTargetRelationTagsDict,
@@ -267,7 +269,7 @@
if not 'inlined' in sectdict:
sectdict['inlined'] = sectdict['main']
# recompute formsections and set it to avoid recomputing
- for formtype, section in sectdict.iteritems():
+ for formtype, section in sectdict.items():
formsections.add('%s_%s' % (formtype, section))
def tag_relation(self, key, formtype, section):
@@ -302,7 +304,7 @@
rtags[section] = value
cls = self.tag_container_cls
rtags = cls('_'.join([section,value])
- for section,value in rtags.iteritems())
+ for section,value in rtags.items())
return rtags
def get(self, *key):
@@ -650,7 +652,7 @@
self.tag_relation((sschema, rschema, oschema, role), True)
def _tag_etype_attr(self, etype, attr, desttype='*', *args, **kwargs):
- if isinstance(attr, basestring):
+ if isinstance(attr, string_types):
attr, role = attr, 'subject'
else:
attr, role = attr
@@ -687,5 +689,5 @@
def registration_callback(vreg):
- vreg.register_all(globals().itervalues(), __name__)
+ vreg.register_all(globals().values(), __name__)
indexview_etype_section.init(vreg.schema)
--- a/web/views/undohistory.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/undohistory.py Thu Nov 12 10:52:28 2015 +0100
@@ -17,7 +17,7 @@
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from logilab.common.registry import Predicate
@@ -46,7 +46,7 @@
def __str__(self):
return '%s(%s)' % (self.__class__.__name__, ', '.join(
- "%s=%v" % (str(k), str(v)) for k, v in kwargs.iteritems() ))
+ "%s=%v" % (str(k), str(v)) for k, v in kwargs.items() ))
def __call__(self, cls, req, tx_action=None, **kwargs):
# tx_action is expected to be a transaction.AbstractAction
--- a/web/views/urlpublishing.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/urlpublishing.py Thu Nov 12 10:52:28 2015 +0100
@@ -60,7 +60,7 @@
from rql import TypeResolverException
from cubicweb import RegistryException
-from cubicweb.web import NotFound, Redirect, component
+from cubicweb.web import NotFound, Redirect, component, views
class PathDontMatch(Exception):
@@ -201,18 +201,14 @@
return self.handle_etype_attr(req, cls, attrname, value)
return self.handle_etype(req, cls)
- def set_vid_for_rset(self, req, cls, rset):# cls is there to ease overriding
+ def set_vid_for_rset(self, req, cls, rset): # cls is there to ease overriding
if rset.rowcount == 0:
raise NotFound()
- # we've to set a default vid here, since vid_from_rset may try to use a
- # table view if fetch_rql include some non final relation
- if rset.rowcount == 1:
- req.form.setdefault('vid', 'primary')
- else: # rset.rowcount >= 1
- if len(rset.column_types(0)) > 1:
- req.form.setdefault('vid', 'list')
- else:
- req.form.setdefault('vid', 'sameetypelist')
+ if 'vid' not in req.form:
+ # check_table=False tells vid_from_rset not to try to use a table view if fetch_rql
+ # include some non final relation
+ req.form['vid'] = views.vid_from_rset(req, rset, req.vreg.schema,
+ check_table=False)
def handle_etype(self, req, cls):
rset = req.execute(cls.fetch_rql(req.user))
--- a/web/views/urlrewrite.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/urlrewrite.py Thu Nov 12 10:52:28 2015 +0100
@@ -19,6 +19,8 @@
import re
+from six import string_types, add_metaclass
+
from cubicweb.uilib import domid
from cubicweb.appobject import AppObject
@@ -51,6 +53,7 @@
return super(metarewriter, mcs).__new__(mcs, name, bases, classdict)
+@add_metaclass(metarewriter)
class URLRewriter(AppObject):
"""Base class for URL rewriters.
@@ -64,7 +67,6 @@
should be tried first. The higher the priority is, the earlier the
rewriter will be tried.
"""
- __metaclass__ = metarewriter
__registry__ = 'urlrewriting'
__abstract__ = True
priority = 1
@@ -122,14 +124,14 @@
required_groups = None
if required_groups and not req.user.matching_groups(required_groups):
continue
- if isinstance(inputurl, basestring):
+ if isinstance(inputurl, string_types):
if inputurl == uri:
req.form.update(infos)
break
elif inputurl.match(uri): # it's a regexp
# XXX what about i18n? (vtitle for instance)
for param, value in infos.items():
- if isinstance(value, basestring):
+ if isinstance(value, string_types):
req.form[param] = inputurl.sub(value, uri)
else:
req.form[param] = value
@@ -222,7 +224,7 @@
required_groups = None
if required_groups and not req.user.matching_groups(required_groups):
continue
- if isinstance(inputurl, basestring):
+ if isinstance(inputurl, string_types):
if inputurl == uri:
return callback(inputurl, uri, req, self._cw.vreg.schema)
elif inputurl.match(uri): # it's a regexp
--- a/web/views/vcard.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/vcard.py Thu Nov 12 10:52:28 2015 +0100
@@ -23,7 +23,7 @@
from cubicweb.predicates import is_instance
from cubicweb.view import EntityView
-_ = unicode
+from cubicweb import _
VCARD_PHONE_TYPES = {'home': 'HOME', 'office': 'WORK', 'mobile': 'CELL', 'fax': 'FAX'}
--- a/web/views/wdoc.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/wdoc.py Thu Nov 12 10:52:28 2015 +0100
@@ -35,7 +35,7 @@
from cubicweb.view import StartupView
from cubicweb.uilib import rest_publish
from cubicweb.web import NotFound, action
-_ = unicode
+from cubicweb import _
# table of content management #################################################
@@ -73,7 +73,7 @@
def build_toc(config):
alltocfiles = reversed(tuple(config.locate_all_files('toc.xml')))
- maintoc = parse(alltocfiles.next()).getroot()
+ maintoc = parse(next(alltocfiles)).getroot()
maintoc.parent = None
index = {}
build_toc_index(maintoc, index)
--- a/web/views/workflow.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/workflow.py Thu Nov 12 10:52:28 2015 +0100
@@ -22,11 +22,13 @@
"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
import os
from warnings import warn
+from six import add_metaclass
+
from logilab.mtconverter import xml_escape
from logilab.common.graph import escape
from logilab.common.deprecation import class_deprecated
@@ -116,7 +118,7 @@
'changestate', self._cw, entity=entity, transition=transition,
redirect_path=self.redirectpath(entity), **kwargs)
trinfo = self._cw.vreg['etypes'].etype_class('TrInfo')(self._cw)
- trinfo.eid = self._cw.varmaker.next()
+ trinfo.eid = next(self._cw.varmaker)
subform = self._cw.vreg['forms'].select('edition', self._cw, entity=trinfo,
mainform=False)
subform.field_by_name('wf_info_for', 'subject').value = entity.eid
@@ -429,8 +431,8 @@
return WorkflowDotPropsHandler(self._cw)
+@add_metaclass(class_deprecated)
class TmpPngView(TmpFileViewMixin, EntityView):
- __metaclass__ = class_deprecated
__deprecation_warning__ = '[3.18] %(cls)s is deprecated'
__regid__ = 'tmppng'
__select__ = match_form_params('tmpfile')
--- a/web/views/xbel.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/xbel.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,7 +18,9 @@
"""xbel views"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
+
+from six.moves import range
from logilab.mtconverter import xml_escape
@@ -42,7 +44,7 @@
self.w(u'<!DOCTYPE xbel PUBLIC "+//IDN python.org//DTD XML Bookmark Exchange Language 1.0//EN//XML" "http://www.python.org/topics/xml/dtds/xbel-1.0.dtd">')
self.w(u'<xbel version="1.0">')
self.w(u'<title>%s</title>' % self._cw._('bookmarks'))
- for i in xrange(self.cw_rset.rowcount):
+ for i in range(self.cw_rset.rowcount):
self.cell_call(i, 0)
self.w(u"</xbel>")
--- a/web/views/xmlrss.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/views/xmlrss.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,11 +18,13 @@
"""base xml and rss views"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
from base64 import b64encode
from time import timezone
+from six.moves import range
+
from logilab.mtconverter import xml_escape
from cubicweb.predicates import (is_instance, non_final_entity, one_line_rset,
@@ -64,7 +66,7 @@
"""display a list of entities by calling their <item_vid> view"""
self.w(u'<?xml version="1.0" encoding="%s"?>\n' % self._cw.encoding)
self.w(u'<%s size="%s">\n' % (self.xml_root, len(self.cw_rset)))
- for i in xrange(self.cw_rset.rowcount):
+ for i in range(self.cw_rset.rowcount):
self.cell_call(i, 0)
self.w(u'</%s>\n' % self.xml_root)
@@ -256,7 +258,7 @@
def call(self):
"""display a list of entities by calling their <item_vid> view"""
self._open()
- for i in xrange(self.cw_rset.rowcount):
+ for i in range(self.cw_rset.rowcount):
self.cell_call(i, 0)
self._close()
--- a/web/webconfig.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/webconfig.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,7 +18,7 @@
"""web ui configuration for cubicweb instances"""
__docformat__ = "restructuredtext en"
-_ = unicode
+from cubicweb import _
import os
import hmac
@@ -26,6 +26,8 @@
from os.path import join, exists, split, isdir
from warnings import warn
+from six import text_type
+
from logilab.common.decorators import cached, cachedproperty
from logilab.common.deprecation import deprecated
from logilab.common.configuration import merge_options
@@ -280,18 +282,6 @@
continue
yield key, pdef
- # don't use @cached: we want to be able to disable it while this must still
- # be cached
- def repository(self, vreg=None):
- """return the instance's repository object"""
- try:
- return self.__repo
- except AttributeError:
- from cubicweb.repoapi import get_repository
- repo = get_repository(config=self, vreg=vreg)
- self.__repo = repo
- return repo
-
def vc_config(self):
return self.repository().get_versions()
@@ -305,7 +295,7 @@
user = self['anonymous-user'] or None
passwd = self['anonymous-password']
if user:
- user = unicode(user)
+ user = text_type(user)
except KeyError:
user, passwd = None, None
except UnicodeDecodeError:
@@ -317,17 +307,17 @@
"""This random key/salt is used to sign content to be sent back by
browsers, eg. in the error report form.
"""
- return str(uuid4())
+ return str(uuid4()).encode('ascii')
def sign_text(self, text):
"""sign some text for later checking"""
# hmac.new expect bytes
- if isinstance(text, unicode):
+ if isinstance(text, text_type):
text = text.encode('utf-8')
# replace \r\n so we do not depend on whether a browser "reencode"
# original message using \r\n or not
return hmac.new(self._instance_salt,
- text.strip().replace('\r\n', '\n')).hexdigest()
+ text.strip().replace(b'\r\n', b'\n')).hexdigest()
def check_text_sign(self, text, signature):
"""check the text signature is equal to the given signature"""
@@ -472,7 +462,7 @@
staticdir = join(staticdir, rdir)
if not isdir(staticdir) and 'w' in mode:
os.makedirs(staticdir)
- return file(join(staticdir, filename), mode)
+ return open(join(staticdir, filename), mode)
def static_file_add(self, rpath, data):
stream = self.static_file_open(rpath)
--- a/web/webctl.py Thu Mar 06 15:55:33 2014 +0100
+++ b/web/webctl.py Thu Nov 12 10:52:28 2015 +0100
@@ -18,6 +18,7 @@
"""cubicweb-ctl commands and command handlers common to twisted/modpython
web configuration
"""
+from __future__ import print_function
__docformat__ = "restructuredtext en"
@@ -44,7 +45,7 @@
def bootstrap(self, cubes, automatic=False, inputlevel=0):
"""bootstrap this configuration"""
if not automatic:
- print '\n' + underline_title('Generic web configuration')
+ print('\n' + underline_title('Generic web configuration'))
config = self.config
config.input_config('web', inputlevel)
if ASK.confirm('Allow anonymous access ?', False):
@@ -87,8 +88,8 @@
copy(osp.join(resource_dir, resource_path), dest_resource)
# handle md5 version subdirectory
linkdir(dest, osp.join(dest, config.instance_md5_version()))
- print ('You can use apache rewrite rule below :\n'
- 'RewriteRule ^/data/(.*) %s/$1 [L]' % dest)
+ print('You can use apache rewrite rule below :\n'
+ 'RewriteRule ^/data/(.*) %s/$1 [L]' % dest)
def _datadirs(self, config, repo=None):
if repo is None:
--- a/wsgi/__init__.py Thu Mar 06 15:55:33 2014 +0100
+++ b/wsgi/__init__.py Thu Nov 12 10:52:28 2015 +0100
@@ -27,11 +27,9 @@
__docformat__ = "restructuredtext en"
from email import message, message_from_string
-from Cookie import SimpleCookie
-from StringIO import StringIO
-from cgi import parse_header
from pprint import pformat as _pformat
+from six.moves.http_cookies import SimpleCookie
def pformat(obj):
"""pretty prints `obj` if possible"""
--- a/wsgi/handler.py Thu Mar 06 15:55:33 2014 +0100
+++ b/wsgi/handler.py Thu Nov 12 10:52:28 2015 +0100
@@ -19,7 +19,9 @@
__docformat__ = "restructuredtext en"
-from itertools import chain, repeat, izip
+from itertools import chain, repeat
+
+from six.moves import zip
from cubicweb import AuthenticationError
from cubicweb.web import DirectResponse
@@ -78,7 +80,7 @@
def __init__(self, code, req, body=None):
text = STATUS_CODE_TEXT.get(code, 'UNKNOWN STATUS CODE')
self.status = '%s %s' % (code, text)
- self.headers = list(chain(*[izip(repeat(k), v)
+ self.headers = list(chain(*[zip(repeat(k), v)
for k, v in req.headers_out.getAllRawHeaders()]))
self.headers = [(str(k), str(v)) for k, v in self.headers]
if body:
--- a/wsgi/request.py Thu Mar 06 15:55:33 2014 +0100
+++ b/wsgi/request.py Thu Nov 12 10:52:28 2015 +0100
@@ -27,13 +27,12 @@
import tempfile
-from StringIO import StringIO
-from urllib import quote
-from urlparse import parse_qs
-from warnings import warn
+from io import BytesIO
+
+from six.moves.urllib.parse import parse_qs
from cubicweb.multipart import (
- copy_file, parse_form_data, MultipartError, parse_options_header)
+ copy_file, parse_form_data, parse_options_header)
from cubicweb.web import RequestError
from cubicweb.web.request import CubicWebRequestBase
from cubicweb.wsgi import pformat, normalize_header
@@ -59,7 +58,7 @@
length = 0
# wsgi.input is not seekable, so copy the request contents to a temporary file
if length < 100000:
- self.content = StringIO()
+ self.content = BytesIO()
else:
self.content = tempfile.TemporaryFile()
copy_file(environ['wsgi.input'], self.content, maxread=length)
@@ -82,7 +81,7 @@
headers= headers_in)
self.content = environ['wsgi.input']
if files is not None:
- for key, part in files.iteritems():
+ for key, part in files.items():
self.form[key] = (part.filename, part.file)
def __repr__(self):
@@ -149,15 +148,10 @@
if params is None:
return
encoding = self.encoding
- for param, val in params.iteritems():
+ for param, val in params.items():
if isinstance(val, (tuple, list)):
- val = [
- unicode(x, encoding) if isinstance(x, str) else x
- for x in val]
if len(val) == 1:
val = val[0]
- elif isinstance(val, str):
- val = unicode(val, encoding)
if param in self.no_script_form_params and val:
val = self.no_script_form_param(param, val)
if param == '_cwmsgid':
--- a/wsgi/test/unittest_wsgi.py Thu Mar 06 15:55:33 2014 +0100
+++ b/wsgi/test/unittest_wsgi.py Thu Nov 12 10:52:28 2015 +0100
@@ -1,7 +1,7 @@
# encoding=utf-8
import webtest.app
-from StringIO import StringIO
+from io import BytesIO
from cubicweb.devtools.webtest import CubicWebTestTC
@@ -21,11 +21,11 @@
r = webtest.app.TestRequest.blank('/', {
'CONTENT_LENGTH': 12,
'CONTENT_TYPE': 'text/plain',
- 'wsgi.input': StringIO('some content')})
+ 'wsgi.input': BytesIO(b'some content')})
req = CubicWebWsgiRequest(r.environ, self.vreg)
- self.assertEqual('some content', req.content.read())
+ self.assertEqual(b'some content', req.content.read())
def test_http_scheme(self):
r = webtest.app.TestRequest.blank('/', {
@@ -52,11 +52,11 @@
self.assertTrue(req.https)
def test_big_content(self):
- content = 'x'*100001
+ content = b'x'*100001
r = webtest.app.TestRequest.blank('/', {
'CONTENT_LENGTH': len(content),
'CONTENT_TYPE': 'text/plain',
- 'wsgi.input': StringIO(content)})
+ 'wsgi.input': BytesIO(content)})
req = CubicWebWsgiRequest(r.environ, self.vreg)
@@ -94,14 +94,14 @@
def test_post_files(self):
content_type, params = self.webapp.encode_multipart(
- (), (('filefield', 'aname', 'acontent'),))
+ (), (('filefield', 'aname', b'acontent'),))
r = webtest.app.TestRequest.blank(
'/', POST=params, content_type=content_type)
req = CubicWebWsgiRequest(r.environ, self.vreg)
self.assertIn('filefield', req.form)
fieldvalue = req.form['filefield']
self.assertEqual(u'aname', fieldvalue[0])
- self.assertEqual('acontent', fieldvalue[1].read())
+ self.assertEqual(b'acontent', fieldvalue[1].read())
def test_post_unicode_urlencoded(self):
params = 'arg=%C3%A9'
@@ -115,3 +115,8 @@
super(WSGIAppTC, cls).init_config(config)
config.https_uiprops = None
config.https_datadir_url = None
+
+
+if __name__ == '__main__':
+ import unittest
+ unittest.main()