# HG changeset patch # User Sylvain Thénault # Date 1288798708 -3600 # Node ID 90f2f20367bccb77b93b1b42dd59366170c8b76a # Parent f4d1d5d9ccbb5fb186369d67a8578c972ea64ebc# Parent 1719137de7da49118927f83dd74bb38d40efe50b oldstable is now 3.9 diff -r f4d1d5d9ccbb -r 90f2f20367bc .hgtags --- a/.hgtags Tue Jul 27 12:36:03 2010 +0200 +++ b/.hgtags Wed Nov 03 16:38:28 2010 +0100 @@ -135,5 +135,25 @@ 5d05b08adeab1ea301e49ed8537e35ede6db92f6 cubicweb-debian-version-3.8.5-1 1a24c62aefc5e57f61be3d04affd415288e81904 cubicweb-version-3.8.6 607a90073911b6bb941a49b5ec0b0d2a9cd479af cubicweb-debian-version-3.8.6-1 +d9936c39d478b6701a4adef17bc28888ffa011c6 cubicweb-version-3.9.0 +eda4940ffef8b7d36127e68de63a52388374a489 cubicweb-debian-version-3.9.0-1 a1a334d934390043a4293a4ee42bdceb1343246e cubicweb-version-3.8.7 1cccf88d6dfe42986e1091de4c364b7b5814c54f cubicweb-debian-version-3.8.7-1 +4d75f743ed49dd7baf8bde7b0e475244933fa08e cubicweb-version-3.9.1 +9bd75af3dca36d7be5d25fc5ab1b89b34c811456 cubicweb-debian-version-3.9.1-1 +e51796b9caf389c224c6f66dcb8aa75bf1b82eff cubicweb-version-3.9.2 +8a23821dc1383e14a7e92a931b91bc6eed4d0af7 cubicweb-debian-version-3.9.2-1 +900772fd9caaf068eb2fdd4544b03efec91901e6 cubicweb-version-3.9.3 +ab1f9686ff3e0843b570b98f89fb5ccc8d7dec8c cubicweb-debian-version-3.9.3-1 +6cebb361dcb27ded654426b4c82f6401c862e034 cubicweb-version-3.9.4 +8d32d82134dc1d8eb0ce230191f34fd49084a168 cubicweb-debian-version-3.9.4-1 +0a1fce8ddc672ca9ee7328ed4f88c1aa6e48d286 cubicweb-version-3.9.5 +12038ca95f0fff2205f7ee029f5602d192118aec cubicweb-debian-version-3.9.5-1 +d37428222a6325583be958d7c7fe7c595115663d cubicweb-version-3.9.6 +7d2cab567735a17cab391c1a7f1bbe39118308a2 cubicweb-debian-version-3.9.6-1 +de588e756f4fbe9c53c72159c6b96580a36d3fa6 cubicweb-version-3.9.7 +1c01f9dffd64d507863c9f8f68e3585b7aa24374 cubicweb-debian-version-3.9.7-1 +eed788018b595d46a55805bd8d2054c401812b2b cubicweb-version-3.9.8 +e4dba8ae963701a36be94ae58c790bc97ba029bb cubicweb-debian-version-3.9.8-1 +df0b2de62cec10c84a2fff5233db05852cbffe93 cubicweb-version-3.9.9 +1ba51b00fc44faa0d6d57448000aaa1fd5c6ab57 cubicweb-debian-version-3.9.9-1 diff -r f4d1d5d9ccbb -r 90f2f20367bc MANIFEST.in --- a/MANIFEST.in Tue Jul 27 12:36:03 2010 +0200 +++ b/MANIFEST.in Wed Nov 03 16:38:28 2010 +0100 @@ -5,13 +5,14 @@ include bin/cubicweb-* include man/cubicweb-ctl.1 -recursive-include doc README makefile *.conf *.py *.rst *.txt *.html *.png *.svg *.zargo *.dia +recursive-include doc README makefile *.conf *.css *.py *.rst *.txt *.html *.png *.svg *.zargo *.dia recursive-include misc *.py *.png *.display include web/views/*.pt recursive-include web/data external_resources *.js *.css *.py *.png *.gif *.ico *.ttf recursive-include web/wdoc *.rst *.png *.xml ChangeLog* +recursive-include devtools/data *.js *.css recursive-include i18n *.pot *.po recursive-include schemas *.py *.sql @@ -21,10 +22,15 @@ recursive-include sobjects/test/data bootstrap_cubes *.py recursive-include hooks/test/data bootstrap_cubes *.py recursive-include server/test/data bootstrap_cubes *.py source* -recursive-include web/test/data bootstrap_cubes *.py -recursive-include devtools/test/data bootstrap_cubes *.py *.txt +recursive-include devtools/test/data bootstrap_cubes *.py *.txt *.js +recursive-include web/test/data bootstrap_cubes pouet.css *.py + +recursive-include web/test/jstests *.js *.html *.css *.json +recursive-include web/test/windmill *.py recursive-include skeleton *.py *.css *.js *.po compat *.in *.tmpl +prune doc/book/en/.static +prune doc/book/fr/.static prune misc/cwfs prune goa diff -r f4d1d5d9ccbb -r 90f2f20367bc __init__.py --- a/__init__.py Tue Jul 27 12:36:03 2010 +0200 +++ b/__init__.py Wed Nov 03 16:38:28 2010 +0100 @@ -17,8 +17,8 @@ # with CubicWeb. If not, see . """CubicWeb is a generic framework to quickly build applications which describes relations between entitites. +""" -""" __docformat__ = "restructuredtext en" # ignore the pygments UserWarnings diff -r f4d1d5d9ccbb -r 90f2f20367bc __pkginfo__.py --- a/__pkginfo__.py Tue Jul 27 12:36:03 2010 +0200 +++ b/__pkginfo__.py Wed Nov 03 16:38:28 2010 +0100 @@ -22,7 +22,7 @@ modname = distname = "cubicweb" -numversion = (3, 8, 7) +numversion = (3, 9, 9) version = '.'.join(str(num) for num in numversion) description = "a repository of entities / relations for knowledge management" @@ -40,10 +40,10 @@ ] __depends__ = { - 'logilab-common': '>= 0.50.2', - 'logilab-mtconverter': '>= 0.6.0', + 'logilab-common': '>= 0.51.0', + 'logilab-mtconverter': '>= 0.8.0', 'rql': '>= 0.26.2', - 'yams': '>= 0.28.1', + 'yams': '>= 0.30.1', 'docutils': '>= 0.6', #gettext # for xgettext, msgcat, etc... # web dependancies @@ -52,12 +52,12 @@ 'Twisted': '', # XXX graphviz # server dependencies - 'logilab-database': '>= 1.0.5', + 'logilab-database': '>= 1.3.0', 'pysqlite': '>= 2.5.5', # XXX install pysqlite2 } __recommends__ = { - 'Pyro': '>= 3.9.1', + 'Pyro': '>= 3.9.1, < 4.0.0', 'PIL': '', # for captcha 'pycrypto': '', # for crypto extensions 'fyzz': '>= 0.1.0', # for sparql @@ -77,6 +77,7 @@ join('server', 'test', 'data'), join('hooks', 'test', 'data'), join('web', 'test', 'data'), + join('devtools', 'data'), join('devtools', 'test', 'data'), 'schemas', 'skeleton'] @@ -95,7 +96,13 @@ else: pydir = join('python' + _pyversion, 'site-packages') +# data files that shall be copied into the main package directory +package_data = { + 'cubicweb.web.views':['*.pt'], + } + try: + # data files that shall be copied outside the main package directory data_files = [ # server data [join('share', 'cubicweb', 'schemas'), @@ -118,10 +125,6 @@ [join(_wdocimages_dir, fname) for fname in listdir(_wdocimages_dir)]], [join('share', 'cubicweb', 'cubes', 'shared', 'i18n'), [join(_i18n_dir, fname) for fname in listdir(_i18n_dir)]], - # XXX: drop .pt files - [join('lib', pydir, 'cubicweb', 'web', 'views'), - [join(_views_dir, fname) for fname in listdir(_views_dir) - if fname.endswith('.pt')]], # skeleton ] except OSError: diff -r f4d1d5d9ccbb -r 90f2f20367bc _exceptions.py --- a/_exceptions.py Tue Jul 27 12:36:03 2010 +0200 +++ b/_exceptions.py Wed Nov 03 16:38:28 2010 +0100 @@ -80,6 +80,8 @@ class MultiSourcesError(RepositoryError, InternalError): """usually due to bad multisources configuration or rql query""" +class UniqueTogetherError(RepositoryError): + """raised when a unique_together constraint caused an IntegrityError""" # security exceptions ######################################################### @@ -128,14 +130,20 @@ """ class NoSelectableObject(RegistryException): - """some views with the given vid have been found but no - one is applicable to the result set - """ + """raised when no appobject is selectable for a given context.""" + def __init__(self, args, kwargs, appobjects): + self.args = args + self.kwargs = kwargs + self.appobjects = appobjects + + def __str__(self): + return ('args: %s, kwargs: %s\ncandidates: %s' + % (self.args, self.kwargs.keys(), self.appobjects)) + class UnknownProperty(RegistryException): """property found in database but unknown in registry""" - # query exception ############################################################# class QueryError(CubicWebRuntimeError): diff -r f4d1d5d9ccbb -r 90f2f20367bc appobject.py --- a/appobject.py Tue Jul 27 12:36:03 2010 +0200 +++ b/appobject.py Wed Nov 03 16:38:28 2010 +0100 @@ -39,6 +39,92 @@ from logilab.common.decorators import classproperty from logilab.common.logging_ext import set_log_methods +from cubicweb.cwconfig import CubicWebConfiguration + +def class_regid(cls): + """returns a unique identifier for an appobject class""" + if 'id' in cls.__dict__: + warn('[3.6] %s.%s: id is deprecated, use __regid__' + % (cls.__module__, cls.__name__), DeprecationWarning) + cls.__regid__ = cls.id + if hasattr(cls, 'id') and not isinstance(cls.id, property): + return cls.id + return cls.__regid__ + +# helpers for debugging selectors +TRACED_OIDS = None + +def _trace_selector(cls, selector, args, ret): + # /!\ lltrace decorates pure function or __call__ method, this + # means argument order may be different + if isinstance(cls, Selector): + selname = str(cls) + vobj = args[0] + else: + selname = selector.__name__ + vobj = cls + if TRACED_OIDS == 'all' or class_regid(vobj) in TRACED_OIDS: + #SELECTOR_LOGGER.warning('selector %s returned %s for %s', selname, ret, cls) + print '%s -> %s for %s(%s)' % (selname, ret, vobj, vobj.__regid__) + +def lltrace(selector): + """use this decorator on your selectors so the becomes traceable with + :class:`traced_selection` + """ + # don't wrap selectors if not in development mode + if CubicWebConfiguration.mode == 'system': # XXX config.debug + return selector + def traced(cls, *args, **kwargs): + ret = selector(cls, *args, **kwargs) + if TRACED_OIDS is not None: + _trace_selector(cls, selector, args, ret) + return ret + traced.__name__ = selector.__name__ + traced.__doc__ = selector.__doc__ + return traced + +class traced_selection(object): + """ + Typical usage is : + + .. sourcecode:: python + + >>> from cubicweb.selectors import traced_selection + >>> with traced_selection(): + ... # some code in which you want to debug selectors + ... # for all objects + + Don't forget the 'from __future__ import with_statement' at the module top-level + if you're using python prior to 2.6. + + This will yield lines like this in the logs:: + + selector one_line_rset returned 0 for + + You can also give to :class:`traced_selection` the identifiers of objects on + which you want to debug selection ('oid1' and 'oid2' in the example above). + + .. sourcecode:: python + + >>> with traced_selection( ('regid1', 'regid2') ): + ... # some code in which you want to debug selectors + ... # for objects with __regid__ 'regid1' and 'regid2' + + A potentially usefull point to set up such a tracing function is + the `cubicweb.vregistry.Registry.select` method body. + """ + + def __init__(self, traced='all'): + self.traced = traced + + def __enter__(self): + global TRACED_OIDS + TRACED_OIDS = self.traced + + def __exit__(self, exctype, exc, traceback): + global TRACED_OIDS + TRACED_OIDS = None + return traceback is None # selector base classes and operations ######################################## @@ -175,6 +261,7 @@ class AndSelector(MultiSelector): """and-chained selectors (formerly known as chainall)""" + @lltrace def __call__(self, cls, *args, **kwargs): score = 0 for selector in self.selectors: @@ -187,6 +274,7 @@ class OrSelector(MultiSelector): """or-chained selectors (formerly known as chainfirst)""" + @lltrace def __call__(self, cls, *args, **kwargs): for selector in self.selectors: partscore = selector(cls, *args, **kwargs) @@ -199,6 +287,7 @@ def __init__(self, selector): self.selector = selector + @lltrace def __call__(self, cls, *args, **kwargs): score = self.selector(cls, *args, **kwargs) return int(not score) diff -r f4d1d5d9ccbb -r 90f2f20367bc bin/clone_deps.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/bin/clone_deps.py Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,115 @@ +#!/usr/bin/python +import os +import sys +from subprocess import call, Popen, PIPE +try: + from mercurial.dispatch import dispatch as hg_call +except ImportError: + print '-' * 20 + print "mercurial module is not reachable from this Python interpreter" + print "trying from command line ..." + tryhg = os.system('hg --version') + if tryhg: + print 'mercurial seems to unavailable, please install it' + raise + print 'found it, ok' + print '-' * 20 + def hg_call(args): + call(['hg'] + args) +from urllib import urlopen +from os import path as osp, pardir +from os.path import normpath, join, dirname + +BASE_URL = 'http://www.logilab.org/hg/' + +to_clone = ['fyzz', 'yams', 'rql', + 'logilab/common', 'logilab/constraint', 'logilab/database', + 'logilab/devtools', 'logilab/mtconverter', + 'cubes/blog', 'cubes/calendar', 'cubes/card', 'cubes/comment', + 'cubes/datafeed', 'cubes/email', 'cubes/file', 'cubes/folder', + 'cubes/forgotpwd', 'cubes/keyword', 'cubes/link', + 'cubes/mailinglist', 'cubes/nosylist', 'cubes/person', + 'cubes/preview', 'cubes/registration', 'cubes/rememberme', + 'cubes/tag', 'cubes/vcsfile', 'cubes/zone'] + +# a couple of functions to be used to explore available +# repositories and cubes +def list_repos(repos_root): + assert repos_root.startswith('http://') + hgwebdir_repos = (repo.strip() + for repo in urlopen(repos_root + '?style=raw').readlines() + if repo.strip()) + prefix = osp.commonprefix(hgwebdir_repos) + return (repo[len(prefix):].strip('/') + for repo in hgwebdir_repos) + +def list_all_cubes(base_url=BASE_URL): + all_repos = list_repos(base_url) + #search for cubes + for repo in all_repos: + if repo.startswith('cubes'): + to_clone.append(repo) + +def get_latest_debian_tag(path): + proc = Popen(['hg', '-R', path, 'tags'], stdout=PIPE) + out, _err = proc.communicate() + for line in out.splitlines(): + if 'debian-version' in line: + return line.split()[0] + +def main(): + if len(sys.argv) == 1: + base_url = BASE_URL + elif len(sys.argv) == 2: + base_url = sys.argv[1] + else: + print >> sys.stderr, 'usage %s [base_url]' % sys.argv[0] + sys.exit(1) + print len(to_clone), 'repositories will be cloned' + base_dir = normpath(join(dirname(__file__), pardir, pardir)) + os.chdir(base_dir) + not_updated = [] + for repo in to_clone: + url = base_url + repo + if '/' not in repo: + target_path = repo + else: + assert repo.count('/') == 1, repo + directory, repo = repo.split('/') + if not osp.isdir(directory): + os.mkdir(directory) + open(join(directory, '__init__.py'), 'w').close() + target_path = osp.join(directory, repo) + if osp.exists(target_path): + print target_path, 'seems already cloned. Skipping it.' + else: + hg_call(['clone', '-U', url, target_path]) + tag = get_latest_debian_tag(target_path) + if tag: + print 'updating to', tag + hg_call(['update', '-R', target_path, tag]) + else: + not_updated.append(target_path) + print """ +CubicWeb dependencies and standard set of cubes have been fetched and +update to the latest stable version. + +You should ensure your PYTHONPATH contains `%(basedir)s`. +You might want to read the environment configuration section of the documentation +at http://docs.cubicweb.org/admin/setup.html#environment-configuration + +You can find more cubes at http://www.cubicweb.org. +Clone them from `%(baseurl)scubes/` into the `%(basedir)s%(sep)scubes%(sep)s` directory. + +To get started you may read http://docs.cubicweb.org/tutorials/base/index.html. +""" % {'basedir': os.getcwd(), 'baseurl': base_url, 'sep': os.sep} + if not_updated: + print >> sys.stderr, 'WARNING: The following repositories were not updated (no debian tag found):' + for path in not_updated: + print >> sys.stderr, '\t-', path + +if __name__ == '__main__': + main() + + + diff -r f4d1d5d9ccbb -r 90f2f20367bc cwconfig.py --- a/cwconfig.py Tue Jul 27 12:36:03 2010 +0200 +++ b/cwconfig.py Wed Nov 03 16:38:28 2010 +0100 @@ -51,7 +51,7 @@ CW_INSTANCES_DATA_DIR = /var/lib/cubicweb/instances/ CW_RUNTIME_DIR = /var/run/cubicweb/ - * 'user': :: +* 'user': :: CW_INSTANCES_DIR = ~/etc/cubicweb.d/ CW_INSTANCES_DATA_DIR = ~/etc/cubicweb.d/ @@ -151,7 +151,7 @@ from cubicweb import (CW_SOFTWARE_ROOT, CW_MIGRATION_MAP, ConfigurationError, Binary) -from cubicweb.toolsutils import env_path, create_dir +from cubicweb.toolsutils import create_dir CONFIGURATIONS = [] @@ -201,7 +201,8 @@ old_prefix = None if not isdir(start_path): prefix = dirname(start_path) - while not isdir(join(prefix, 'share', 'cubicweb')) and prefix != old_prefix: + while (not isdir(join(prefix, 'share', 'cubicweb')) + or prefix.endswith('.egg')) and prefix != old_prefix: old_prefix = prefix prefix = dirname(prefix) if isdir(join(prefix, 'share', 'cubicweb')): @@ -283,6 +284,7 @@ _INSTALL_PREFIX = os.environ['CW_INSTALL_PREFIX'] except KeyError: _INSTALL_PREFIX = _find_prefix() +_USR_INSTALL = _INSTALL_PREFIX == '/usr' class CubicWebNoAppConfiguration(ConfigurationMixIn): """base class for cubicweb configuration without a specific instance directory @@ -296,9 +298,6 @@ # log_format = '%(asctime)s - [%(threadName)s] (%(name)s) %(levelname)s: %(message)s' # nor remove appobjects based on unused interface [???] cleanup_interface_sobjects = True - # debug mode - debugmode = False - if (CWDEV and _forced_mode != 'system'): mode = 'user' @@ -307,7 +306,7 @@ mode = _forced_mode or 'system' _CUBES_DIR = join(_INSTALL_PREFIX, 'share', 'cubicweb', 'cubes') - CUBES_DIR = env_path('CW_CUBES_DIR', _CUBES_DIR, 'cubes', checkexists=False) + CUBES_DIR = abspath(os.environ.get('CW_CUBES_DIR', _CUBES_DIR)) CUBES_PATH = os.environ.get('CW_CUBES_PATH', '').split(os.pathsep) options = ( @@ -499,9 +498,26 @@ deps = dict((key, None) for key in deps) warn('[3.8] cube %s should define %s as a dict' % (cube, key), DeprecationWarning) + for depcube in deps: + try: + newname = CW_MIGRATION_MAP[depcube] + except KeyError: + pass + else: + deps[newname] = deps.pop(depcube) return deps @classmethod + def cube_depends_cubicweb_version(cls, cube): + # XXX no backward compat (see _cube_deps above) + try: + pkginfo = cls.cube_pkginfo(cube) + deps = getattr(pkginfo, '__depends__') + return deps.get('cubicweb') + except AttributeError: + return None + + @classmethod def cube_dependencies(cls, cube): """return cubicweb cubes used by the given cube""" return cls._cube_deps(cube, '__depends_cubes__', '__use__') @@ -518,17 +534,17 @@ """ cubes = list(cubes) todo = cubes[:] + if with_recommends: + available = set(cls.available_cubes()) while todo: cube = todo.pop(0) for depcube in cls.cube_dependencies(cube): if depcube not in cubes: - depcube = CW_MIGRATION_MAP.get(depcube, depcube) cubes.append(depcube) todo.append(depcube) if with_recommends: for depcube in cls.cube_recommends(cube): - if depcube not in cubes: - depcube = CW_MIGRATION_MAP.get(depcube, depcube) + if depcube not in cubes and depcube in available: cubes.append(depcube) todo.append(depcube) return cubes @@ -663,12 +679,14 @@ vregpath.append(path + '.py') return vregpath - def __init__(self): + def __init__(self, debugmode=False): register_stored_procedures() ConfigurationMixIn.__init__(self) + self.debugmode = debugmode self.adjust_sys_path() self.load_defaults() - self.translations = {} + # will be properly initialized later by _gettext_init + self.translations = {'en': (unicode, lambda ctx, msgid: unicode(msgid) )} self._site_loaded = set() # don't register ReStructured Text directives by simple import, avoid pb # with eg sphinx. @@ -684,25 +702,23 @@ # overriden in CubicWebConfiguration self.cls_adjust_sys_path() - def init_log(self, logthreshold=None, debug=False, - logfile=None, syslog=False): + def init_log(self, logthreshold=None, logfile=None, syslog=False): """init the log service""" if logthreshold is None: - if debug: + if self.debugmode: logthreshold = 'DEBUG' else: logthreshold = self['log-threshold'] - self.debugmode = debug if sys.platform == 'win32': # no logrotate on win32, so use logging rotation facilities # for now, hard code weekly rotation every sunday, and 52 weeks kept # idea: make this configurable? - init_log(debug, syslog, logthreshold, logfile, self.log_format, + init_log(self.debugmode, syslog, logthreshold, logfile, self.log_format, rotation_parameters={'when': 'W6', # every sunday 'interval': 1, 'backupCount': 52}) else: - init_log(debug, syslog, logthreshold, logfile, self.log_format) + init_log(self.debugmode, syslog, logthreshold, logfile, self.log_format) # configure simpleTal logger logging.getLogger('simpleTAL').setLevel(logging.ERROR) @@ -737,7 +753,7 @@ # XXX extrapath argument to load_module_from_file only in lgc > 0.50.2 from logilab.common.modutils import load_module_from_modpath, modpath_from_file module = load_module_from_modpath(modpath_from_file(sitefile, self.extrapath)) - self.info('%s loaded', sitefile) + self.debug('%s loaded', sitefile) return module def eproperty_definitions(self): @@ -774,11 +790,11 @@ if CubicWebNoAppConfiguration.mode == 'user': _INSTANCES_DIR = expanduser('~/etc/cubicweb.d/') - else: #mode = 'system' - if _INSTALL_PREFIX == '/usr': - _INSTANCES_DIR = '/etc/cubicweb.d/' - else: - _INSTANCES_DIR = join(_INSTALL_PREFIX, 'etc', 'cubicweb.d') + #mode == system' + elif _USR_INSTALL: + _INSTANCES_DIR = '/etc/cubicweb.d/' + else: + _INSTANCES_DIR = join(_INSTALL_PREFIX, 'etc', 'cubicweb.d') if os.environ.get('APYCOT_ROOT'): _cubes_init = join(CubicWebNoAppConfiguration.CUBES_DIR, '__init__.py') @@ -831,7 +847,7 @@ @classmethod def instances_dir(cls): """return the control directory""" - return env_path('CW_INSTANCES_DIR', cls._INSTANCES_DIR, 'registry') + return abspath(os.environ.get('CW_INSTANCES_DIR', cls._INSTANCES_DIR)) @classmethod def migration_scripts_dir(cls): @@ -844,12 +860,12 @@ return mdir @classmethod - def config_for(cls, appid, config=None): + def config_for(cls, appid, config=None, debugmode=False): """return a configuration instance for the given instance identifier """ config = config or guess_configuration(cls.instance_home(appid)) configcls = configuration_cls(config) - return configcls(appid) + return configcls(appid, debugmode) @classmethod def possible_configurations(cls, appid): @@ -903,23 +919,34 @@ path = '%s-%s.log' % (basepath, i) i += 1 return path - return '/var/log/cubicweb/%s-%s.log' % (self.appid, self.name) + if _USR_INSTALL: + return '/var/log/cubicweb/%s-%s.log' % (self.appid, self.name) + else: + log_path = os.path.join(_INSTALL_PREFIX, 'var', 'log', 'cubicweb', '%s-%s.log') + return log_path % (self.appid, self.name) + + def default_pid_file(self): """return default path to the pid file of the instance'server""" if self.mode == 'system': - # XXX not under _INSTALL_PREFIX, right? - rtdir = env_path('CW_RUNTIME_DIR', '/var/run/cubicweb/', 'run time') + if _USR_INSTALL: + default = '/var/run/cubicweb/' + else: + default = os.path.join(_INSTALL_PREFIX, 'var', 'run', 'cubicweb') else: import tempfile - rtdir = env_path('CW_RUNTIME_DIR', tempfile.gettempdir(), 'run time') + default = tempfile.gettempdir() + # runtime directory created on startup if necessary, don't check it + # exists + rtdir = abspath(os.environ.get('CW_RUNTIME_DIR', default)) return join(rtdir, '%s-%s.pid' % (self.appid, self.name)) # instance methods used to get instance specific resources ############# - def __init__(self, appid): + def __init__(self, appid, debugmode=False): self.appid = appid - CubicWebNoAppConfiguration.__init__(self) + CubicWebNoAppConfiguration.__init__(self, debugmode) self._cubes = None self.load_file_configuration(self.main_config_file()) @@ -937,11 +964,13 @@ @property def appdatahome(self): if self.mode == 'system': - # XXX not under _INSTALL_PREFIX, right? - iddir = '/var/lib/cubicweb/instances/' + if _USR_INSTALL: + iddir = os.path.join('/var','lib', 'cubicweb', 'instances') + else: + iddir = os.path.join(_INSTALL_PREFIX, 'var', 'lib', 'cubicweb', 'instances') else: iddir = self.instances_dir() - iddir = env_path('CW_INSTANCES_DATA_DIR', iddir, 'additional data') + iddir = abspath(os.environ.get('CW_INSTANCES_DATA_DIR', iddir)) return join(iddir, self.appid) def init_cubes(self, cubes): @@ -986,6 +1015,32 @@ """write down current configuration""" self.generate_config(open(self.main_config_file(), 'w')) + def check_writeable_uid_directory(self, path): + """check given directory path exists, belongs to the user running the + server process and is writeable. + + If not, try to fix this, letting exception propagate when not possible. + """ + if not exists(path): + os.makedirs(path) + if self['uid']: + try: + uid = int(self['uid']) + except ValueError: + from pwd import getpwnam + uid = getpwnam(self['uid']).pw_uid + else: + try: + uid = os.getuid() + except AttributeError: # we are on windows + return + fstat = os.stat(path) + if fstat.st_uid != uid: + os.chown(path, uid, os.getgid()) + import stat + if not (fstat.st_mode & stat.S_IWUSR): + os.chmod(path, fstat.st_mode | stat.S_IWUSR) + @cached def instance_md5_version(self): import hashlib @@ -1000,7 +1055,7 @@ super(CubicWebConfiguration, self).load_configuration() if self.apphome and self.set_language: # init gettext - self._set_language() + self._gettext_init() def _load_site_cubicweb(self, sitefile): # overriden to register cube specific options @@ -1009,12 +1064,12 @@ self.register_options(mod.options) self.load_defaults() - def init_log(self, logthreshold=None, debug=False, force=False): + def init_log(self, logthreshold=None, force=False): """init the log service""" if not force and hasattr(self, '_logging_initialized'): return self._logging_initialized = True - CubicWebNoAppConfiguration.init_log(self, logthreshold, debug, + CubicWebNoAppConfiguration.init_log(self, logthreshold, logfile=self.get('log-file')) # read a config file if it exists logconfig = join(self.apphome, 'logging.conf') @@ -1035,7 +1090,7 @@ if lang != 'en': yield lang - def _set_language(self): + def _gettext_init(self): """set language for gettext""" from gettext import translation path = join(self.apphome, 'i18n') @@ -1115,6 +1170,7 @@ def register_stored_procedures(): from logilab.database import FunctionDescr from rql.utils import register_function, iter_funcnode_variables + from rql.nodes import SortTerm, Constant, VariableRef global _EXT_REGISTERED if _EXT_REGISTERED: @@ -1160,6 +1216,34 @@ register_function(TEXT_LIMIT_SIZE) + class FTIRANK(FunctionDescr): + """return ranking of a variable that must be used as some has_text + relation subject in the query's restriction. Usually used to sort result + of full-text search by ranking. + """ + supported_backends = ('postgres',) + rtype = 'Float' + + def st_check_backend(self, backend, funcnode): + """overriden so that on backend not supporting fti ranking, the + function is removed when in an orderby clause, or replaced by a 1.0 + constant. + """ + if not self.supports(backend): + parent = funcnode.parent + while parent is not None and not isinstance(parent, SortTerm): + parent = parent.parent + if isinstance(parent, SortTerm): + parent.parent.remove(parent) + else: + funcnode.parent.replace(funcnode, Constant(1.0, 'Float')) + parent = funcnode + for vref in parent.iget_nodes(VariableRef): + vref.unregister_reference() + + register_function(FTIRANK) + + class FSPATH(FunctionDescr): """return path of some bytes attribute stored using the Bytes File-System Storage (bfss) diff -r f4d1d5d9ccbb -r 90f2f20367bc cwctl.py --- a/cwctl.py Tue Jul 27 12:36:03 2010 +0200 +++ b/cwctl.py Wed Nov 03 16:38:28 2010 +0100 @@ -17,9 +17,8 @@ # with CubicWeb. If not, see . """the cubicweb-ctl tool, based on logilab.common.clcommands to provide a pluggable commands system. - +""" -""" __docformat__ = "restructuredtext en" # *ctl module should limit the number of import to be imported as quickly as @@ -36,14 +35,18 @@ def getpgid(): """win32 getpgid implementation""" + from os.path import exists, join, isfile, isdir, dirname, abspath -from logilab.common.clcommands import register_commands, pop_arg +from logilab.common.clcommands import CommandLine from logilab.common.shellutils import ASK from cubicweb import ConfigurationError, ExecutionError, BadCommandUsage from cubicweb.cwconfig import CubicWebConfiguration as cwcfg, CWDEV, CONFIGURATIONS -from cubicweb.toolsutils import Command, main_run, rm, create_dir, underline_title +from cubicweb.toolsutils import Command, rm, create_dir, underline_title +from cubicweb.__pkginfo__ import version + +CWCTL = CommandLine('cubicweb-ctl', 'The CubicWeb swiss-knife.', version=version) def wait_process_end(pid, maxtry=10, waittime=1): """wait for a process to actually die""" @@ -61,7 +64,10 @@ raise ExecutionError('can\'t kill process %s' % pid) def list_instances(regdir): - return sorted(idir for idir in listdir(regdir) if isdir(join(regdir, idir))) + if isdir(regdir): + return sorted(idir for idir in listdir(regdir) if isdir(join(regdir, idir))) + else: + return [] def detect_available_modes(templdir): modes = [] @@ -277,15 +283,15 @@ print 'Warnings:\n', '\n'.join('* '+txt for txt in cfgpb.warnings) if cfgpb.errors: print 'Errors:' - for op, cube, version in cfgpb.errors: + for op, cube, version, src in cfgpb.errors: if op == 'add': print '* cube', cube, if version: print ' version', version, - print 'is not installed, but required by %s' % ' '.join(cfgpb.reverse_constraints[cube]) + print 'is not installed, but required by %s' % src else: - print '* cube %s version %s is installed, but version %s is required by (%s)' % ( - cube, cfgpb.cubes[cube], version, ', '.join(cfgpb.reverse_constraints[cube])) + print '* cube %s version %s is installed, but version %s is required by %s' % ( + cube, cfgpb.cubes[cube], version, src) class CreateInstanceCommand(Command): """Create an instance from a cube. This is an unified @@ -302,6 +308,7 @@ """ name = 'create' arguments = ' ' + min_args = max_args = 2 options = ( ("config-level", {'short': 'l', 'type' : 'int', 'metavar': '', @@ -326,8 +333,8 @@ """run the command with its specific arguments""" from logilab.common.textutils import splitstrip configname = self.config.config - cubes = splitstrip(pop_arg(args, 1)) - appid = pop_arg(args) + cubes, appid = args + cubes = splitstrip(cubes) # get the configuration and helper config = cwcfg.config_for(appid, configname) config.set_language = False @@ -416,12 +423,12 @@ """ name = 'delete' arguments = '' - + min_args = max_args = 1 options = () def run(self, args): """run the command with its specific arguments""" - appid = pop_arg(args, msg="No instance specified !") + appid = args[0] configs = [cwcfg.config_for(appid, configname) for configname in cwcfg.possible_configurations(appid)] if not configs: @@ -477,23 +484,23 @@ def start_instance(self, appid): """start the instance's server""" - debug = self['debug'] - force = self['force'] - loglevel = self['loglevel'] - config = cwcfg.config_for(appid) - if loglevel is not None: - loglevel = 'LOG_%s' % loglevel.upper() - config.global_set_option('log-threshold', loglevel) - config.init_log(loglevel, debug=debug, force=True) + config = cwcfg.config_for(appid, debugmode=self['debug']) + init_cmdline_log_threshold(config, self['loglevel']) if self['profile']: config.global_set_option('profile', self.config.profile) helper = self.config_helper(config, cmdname='start') pidf = config['pid-file'] - if exists(pidf) and not force: + if exists(pidf) and not self['force']: msg = "%s seems to be running. Remove %s by hand if necessary or use \ the --force option." raise ExecutionError(msg % (appid, pidf)) - helper.start_server(config, debug) + helper.start_server(config) + + +def init_cmdline_log_threshold(config, loglevel): + if loglevel is not None: + config.global_set_option('log-threshold', loglevel.upper()) + config.init_log(config['log-threshold'], force=True) class StopInstanceCommand(InstanceCommand): @@ -570,7 +577,7 @@ print '*'*72 if not ASK.confirm('%s instance %r ?' % (self.name, appid)): continue - StopInstanceCommand().stop_instance(appid) + StopInstanceCommand(self.logger).stop_instance(appid) forkcmd = [w for w in sys.argv if not w in args] forkcmd[1] = 'start' forkcmd = ' '.join(forkcmd) @@ -580,7 +587,7 @@ sys.exit(status) def restart_instance(self, appid): - StopInstanceCommand().stop_instance(appid) + StopInstanceCommand(self.logger).stop_instance(appid) self.start_instance(appid) @@ -739,7 +746,7 @@ print '-> migration needed from %s to %s for %s' % (fromversion, toversion, cube) # only stop once we're sure we have something to do if not (CWDEV or self.config.nostartstop): - StopInstanceCommand().stop_instance(appid) + StopInstanceCommand(self.logger).stop_instance(appid) # run cubicweb/componants migration scripts mih.migrate(vcconf, reversed(toupgrade), self.config) # rewrite main configuration file @@ -788,11 +795,16 @@ repository internals (session, etc...) so most migration commands won't be available. + Arguments after bare "--" string will not be processed by the shell command + You can use it to pass extra arguments to your script and expect for + them in '__args__' afterwards. + the identifier of the instance to connect. """ name = 'shell' - arguments = ' [batch command file]' + arguments = ' [batch command file(s)] [-- + + ''' + % data] + if base_url is not None: + html.append('') + html.append('') + html.append('') + + for dep in depends: + html.append(' ' % file_path(dep)) + + html.append(' ') + html.append(' '% (file_path(test_file),)) + html.append(''' + +
+
+

QUnit example

+

+

+
    + +''') + return u'\n'.join(html) + + + +if __name__ == '__main__': + unittest_main() diff -r f4d1d5d9ccbb -r 90f2f20367bc devtools/repotest.py --- a/devtools/repotest.py Tue Jul 27 12:36:03 2010 +0200 +++ b/devtools/repotest.py Wed Nov 03 16:38:28 2010 +0100 @@ -18,8 +18,8 @@ """some utilities to ease repository testing This module contains functions to initialize a new repository. +""" -""" __docformat__ = "restructuredtext en" from pprint import pprint @@ -41,7 +41,7 @@ plan = self._prepare_plan(rql, kwargs) self.planner.build_plan(plan) try: - self.assertEquals(len(plan.steps), len(expected), + self.assertEqual(len(plan.steps), len(expected), 'expected %s steps, got %s' % (len(expected), len(plan.steps))) # step order is important for i, step in enumerate(plan.steps): @@ -52,20 +52,20 @@ def compare_steps(self, step, expected): try: - self.assertEquals(step[0], expected[0], 'expected step type %s, got %s' % (expected[0], step[0])) + self.assertEqual(step[0], expected[0], 'expected step type %s, got %s' % (expected[0], step[0])) if len(step) > 2 and isinstance(step[1], list) and isinstance(expected[1], list): queries, equeries = step[1], expected[1] - self.assertEquals(len(queries), len(equeries), + self.assertEqual(len(queries), len(equeries), 'expected %s queries, got %s' % (len(equeries), len(queries))) for i, (rql, sol) in enumerate(queries): - self.assertEquals(rql, equeries[i][0]) - self.assertEquals(sorted(sol), sorted(equeries[i][1])) + self.assertEqual(rql, equeries[i][0]) + self.assertEqual(sorted(sol), sorted(equeries[i][1])) idx = 2 else: idx = 1 - self.assertEquals(step[idx:-1], expected[idx:-1], + self.assertEqual(step[idx:-1], expected[idx:-1], 'expected step characteristic \n%s\n, got\n%s' % (expected[1:-1], step[1:-1])) - self.assertEquals(len(step[-1]), len(expected[-1]), + self.assertEqual(len(step[-1]), len(expected[-1]), 'got %s child steps, expected %s' % (len(step[-1]), len(expected[-1]))) except AssertionError: print 'error on step ', @@ -134,24 +134,35 @@ schema._eid_index[rdef.eid] = rdef -from logilab.common.testlib import TestCase +from logilab.common.testlib import TestCase, mock_object +from logilab.database import get_db_helper + from rql import RQLHelper + from cubicweb.devtools.fake import FakeRepo, FakeSession from cubicweb.server import set_debug from cubicweb.server.querier import QuerierHelper from cubicweb.server.session import Session -from cubicweb.server.sources.rql2sql import remove_unused_solutions +from cubicweb.server.sources.rql2sql import SQLGenerator, remove_unused_solutions class RQLGeneratorTC(TestCase): - schema = None # set this in concret test + schema = backend = None # set this in concret test def setUp(self): self.repo = FakeRepo(self.schema) + self.repo.system_source = mock_object(dbdriver=self.backend) self.rqlhelper = RQLHelper(self.schema, special_relations={'eid': 'uid', - 'has_text': 'fti'}) + 'has_text': 'fti'}, + backend=self.backend) self.qhelper = QuerierHelper(self.repo, self.schema) ExecutionPlan._check_permissions = _dummy_check_permissions rqlannotation._select_principal = _select_principal + if self.backend is not None: + try: + dbhelper = get_db_helper(self.backend) + except ImportError, ex: + self.skipTest(str(ex)) + self.o = SQLGenerator(self.schema, dbhelper) def tearDown(self): ExecutionPlan._check_permissions = _orig_check_permissions @@ -270,6 +281,7 @@ self.system = self.sources[-1] do_monkey_patch() self._dumb_sessions = [] # by hi-jacked parent setup + self.repo.vreg.rqlhelper.backend = 'postgres' # so FTIRANK is considered def add_source(self, sourcecls, uri): self.sources.append(sourcecls(self.repo, self.o.schema, diff -r f4d1d5d9ccbb -r 90f2f20367bc devtools/test/data/dbfill.conf --- a/devtools/test/data/dbfill.conf Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,22 +0,0 @@ -[BASE] -APPLICATION_SCHEMA = /home/adim/cvs_work/soft_prive/ginco/applications/crm/schema -APPLICATION_HOME = /home/adim/etc/erudi.d/crmadim # ??? -FAKEDB_NAME = crmtest -ENCODING = UTF-8 -HOST = crater -USER = adim -PASSWORD = adim - - -[ENTITIES] -default = 20 #means default is 20 entities -Person = 10 # means 10 Persons -Company = 5# means 5 companies - - -[RELATIONS] -Person works_for Company = 4 -Division subsidiary_of Company = 3 - -[DEFAULT_VALUES] -Person.firstname = data/firstnames.txt diff -r f4d1d5d9ccbb -r 90f2f20367bc devtools/test/data/js_examples/dep_1.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/test/data/js_examples/dep_1.js Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,1 @@ +a = 4; diff -r f4d1d5d9ccbb -r 90f2f20367bc devtools/test/data/js_examples/deps_2.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/test/data/js_examples/deps_2.js Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,1 @@ +b = a +2; diff -r f4d1d5d9ccbb -r 90f2f20367bc devtools/test/data/js_examples/test_simple_failure.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/test/data/js_examples/test_simple_failure.js Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,18 @@ +$(document).ready(function() { + + module("air"); + + test("test 1", function() { + equals(2, 4); + }); + + test("test 2", function() { + equals('', '45'); + equals('1024', '32'); + }); + + module("able"); + test("test 3", function() { + same(1, 1); + }); +}); diff -r f4d1d5d9ccbb -r 90f2f20367bc devtools/test/data/js_examples/test_simple_success.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/test/data/js_examples/test_simple_success.js Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,17 @@ +$(document).ready(function() { + + module("air"); + + test("test 1", function() { + equals(2, 2); + }); + + test("test 2", function() { + equals('45', '45'); + }); + + module("able"); + test("test 3", function() { + same(1, 1); + }); +}); diff -r f4d1d5d9ccbb -r 90f2f20367bc devtools/test/data/js_examples/test_with_dep.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/test/data/js_examples/test_with_dep.js Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,9 @@ +$(document).ready(function() { + + module("air"); + + test("test 1", function() { + equals(a, 4); + }); + +}); diff -r f4d1d5d9ccbb -r 90f2f20367bc devtools/test/data/js_examples/test_with_ordered_deps.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/test/data/js_examples/test_with_ordered_deps.js Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,9 @@ +$(document).ready(function() { + + module("air"); + + test("test 1", function() { + equals(b, 6); + }); + +}); diff -r f4d1d5d9ccbb -r 90f2f20367bc devtools/test/data/js_examples/utils.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/test/data/js_examples/utils.js Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,29 @@ +function datetuple(d) { + return [d.getFullYear(), d.getMonth()+1, d.getDate(), + d.getHours(), d.getMinutes()]; +} + +function pprint(obj) { + print('{'); + for(k in obj) { + print(' ' + k + ' = ' + obj[k]); + } + print('}'); +} + +function arrayrepr(array) { + return '[' + array.join(', ') + ']'; +} + +function assertArrayEquals(array1, array2) { + if (array1.length != array2.length) { + throw new crosscheck.AssertionFailure(array1.join(', ') + ' != ' + array2.join(', ')); + } + for (var i=0; i. -"""only for unit tests ! - -""" +"""only for unit tests !""" from cubicweb.view import EntityView -from cubicweb.selectors import implements +from cubicweb.selectors import is_instance HTML_PAGE = u""" @@ -31,7 +29,7 @@ class SimpleView(EntityView): __regid__ = 'simple' - __select__ = implements('Bug',) + __select__ = is_instance('Bug',) def call(self, **kwargs): self.cell_call(0, 0) @@ -41,7 +39,7 @@ class RaisingView(EntityView): __regid__ = 'raising' - __select__ = implements('Bug',) + __select__ = is_instance('Bug',) def cell_call(self, row, col): raise ValueError() diff -r f4d1d5d9ccbb -r 90f2f20367bc devtools/test/unittest_dbfill.py --- a/devtools/test/unittest_dbfill.py Tue Jul 27 12:36:03 2010 +0200 +++ b/devtools/test/unittest_dbfill.py Wed Nov 03 16:38:28 2010 +0100 @@ -16,9 +16,7 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""unit tests for database value generator - -""" +"""unit tests for database value generator""" import os.path as osp import re @@ -56,7 +54,7 @@ return [f.strip() for f in file(osp.join(DATADIR, 'firstnames.txt'))] def setUp(self): - config = ApptestConfiguration('data') + config = ApptestConfiguration('data', apphome=DATADIR) config.bootstrap_cubes() schema = config.load_schema() e_schema = schema.eschema('Person') @@ -68,7 +66,7 @@ def test_string(self): """test string generation""" surname = self.person_valgen.generate_attribute_value({}, 'surname', 12) - self.assertEquals(surname, u'é&surname12') + self.assertEqual(surname, u'é&surname12') def test_domain_value(self): """test value generation from a given domain value""" @@ -100,21 +98,21 @@ def test_phone(self): """tests make_tel utility""" - self.assertEquals(make_tel(22030405), '22 03 04 05') + self.assertEqual(make_tel(22030405), '22 03 04 05') def test_customized_generation(self): - self.assertEquals(self.bug_valgen.generate_attribute_value({}, 'severity', 12), + self.assertEqual(self.bug_valgen.generate_attribute_value({}, 'severity', 12), u'dangerous') - self.assertEquals(self.bug_valgen.generate_attribute_value({}, 'description', 12), + self.assertEqual(self.bug_valgen.generate_attribute_value({}, 'description', 12), u'yo') - self.assertEquals(self.person_valgen.generate_attribute_value({}, 'description', 12), + self.assertEqual(self.person_valgen.generate_attribute_value({}, 'description', 12), u'yo') class ConstraintInsertionTC(TestCase): def test_writeme(self): - self.skip('Test automatic insertion / Schema Constraints') + self.skipTest('Test automatic insertion / Schema Constraints') if __name__ == '__main__': diff -r f4d1d5d9ccbb -r 90f2f20367bc devtools/test/unittest_httptest.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/test/unittest_httptest.py Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,67 @@ +# copyright 2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unittest for cubicweb.devtools.httptest module""" + +import httplib + +from cubicweb.devtools.httptest import CubicWebServerTC, CubicWebServerConfig + + +class TwistedCWAnonTC(CubicWebServerTC): + + def test_response(self): + try: + response = self.web_get() + except httplib.NotConnected, ex: + self.fail("Can't connection to test server: %s" % ex) + + def test_response_anon(self): + response = self.web_get() + self.assertEqual(response.status, httplib.OK) + + def test_base_url(self): + if self.config['base-url'] not in self.web_get().read(): + self.fail('no mention of base url in retrieved page') + + +class TwistedCWIdentTC(CubicWebServerTC): + anonymous_logged = False + + def test_response_denied(self): + response = self.web_get() + self.assertEqual(response.status, httplib.FORBIDDEN) + + def test_login(self): + response = self.web_get() + if response.status != httplib.FORBIDDEN: + self.skipTest('Already authenticated') + # login + self.web_login(self.admlogin, self.admpassword) + response = self.web_get() + self.assertEqual(response.status, httplib.OK, response.body) + # logout + self.web_logout() + response = self.web_get() + self.assertEqual(response.status, httplib.FORBIDDEN, response.body) + + + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r f4d1d5d9ccbb -r 90f2f20367bc devtools/test/unittest_qunit.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/test/unittest_qunit.py Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,31 @@ +from logilab.common.testlib import unittest_main +from cubicweb.devtools.qunit import make_qunit_html, QUnitTestCase + +from os import path as osp + +JSTESTDIR = osp.abspath(osp.join(osp.dirname(__file__), 'data', 'js_examples')) + + +def js(name): + return osp.join(JSTESTDIR, name) + +class QUnitTestCaseTC(QUnitTestCase): + + all_js_tests = ( + (js('test_simple_success.js'),), + (js('test_with_dep.js'), (js('dep_1.js'),)), + (js('test_with_ordered_deps.js'), (js('dep_1.js'), js('deps_2.js'),)), + ) + + + def test_simple_failure(self): + js_tests = list(self._test_qunit(js('test_simple_failure.js'))) + self.assertEqual(len(js_tests), 3) + test_1, test_2, test_3 = js_tests + self.assertRaises(self.failureException, test_1[0], *test_1[1:]) + self.assertRaises(self.failureException, test_2[0], *test_2[1:]) + test_3[0](*test_3[1:]) + + +if __name__ == '__main__': + unittest_main() diff -r f4d1d5d9ccbb -r 90f2f20367bc devtools/test/unittest_testlib.py --- a/devtools/test/unittest_testlib.py Tue Jul 27 12:36:03 2010 +0200 +++ b/devtools/test/unittest_testlib.py Wed Nov 03 16:38:28 2010 +0100 @@ -20,9 +20,8 @@ """ from cStringIO import StringIO -from unittest import TestSuite -from logilab.common.testlib import (TestCase, unittest_main, +from logilab.common.testlib import (TestCase, unittest_main, TestSuite, SkipAwareTextTestRunner) from cubicweb.devtools import htmlparser @@ -47,9 +46,9 @@ tests = [MyWebTest('test_error_view'), MyWebTest('test_correct_view')] result = self.runner.run(TestSuite(tests)) - self.assertEquals(result.testsRun, 2) - self.assertEquals(len(result.errors), 0) - self.assertEquals(len(result.failures), 1) + self.assertEqual(result.testsRun, 2) + self.assertEqual(len(result.errors), 0) + self.assertEqual(len(result.failures), 1) clean_repo_test_cls(MyWebTest) @@ -104,7 +103,7 @@ def test_source1(self): """make sure source is stored correctly""" - self.assertEquals(self.page_info.source, HTML_PAGE2) + self.assertEqual(self.page_info.source, HTML_PAGE2) def test_source2(self): """make sure source is stored correctly - raise exception""" @@ -114,47 +113,47 @@ def test_has_title_no_level(self): """tests h? tags information""" - self.assertEquals(self.page_info.has_title('Test'), True) - self.assertEquals(self.page_info.has_title('Test '), False) - self.assertEquals(self.page_info.has_title('Tes'), False) - self.assertEquals(self.page_info.has_title('Hello world !'), True) + self.assertEqual(self.page_info.has_title('Test'), True) + self.assertEqual(self.page_info.has_title('Test '), False) + self.assertEqual(self.page_info.has_title('Tes'), False) + self.assertEqual(self.page_info.has_title('Hello world !'), True) def test_has_title_level(self): """tests h? tags information""" - self.assertEquals(self.page_info.has_title('Test', level = 1), True) - self.assertEquals(self.page_info.has_title('Test', level = 2), False) - self.assertEquals(self.page_info.has_title('Test', level = 3), False) - self.assertEquals(self.page_info.has_title('Test', level = 4), False) + self.assertEqual(self.page_info.has_title('Test', level = 1), True) + self.assertEqual(self.page_info.has_title('Test', level = 2), False) + self.assertEqual(self.page_info.has_title('Test', level = 3), False) + self.assertEqual(self.page_info.has_title('Test', level = 4), False) self.assertRaises(IndexError, self.page_info.has_title, 'Test', level = 5) def test_has_title_regexp_no_level(self): """tests has_title_regexp() with no particular level specified""" - self.assertEquals(self.page_info.has_title_regexp('h[23] title'), True) + self.assertEqual(self.page_info.has_title_regexp('h[23] title'), True) def test_has_title_regexp_level(self): """tests has_title_regexp() with a particular level specified""" - self.assertEquals(self.page_info.has_title_regexp('h[23] title', 2), True) - self.assertEquals(self.page_info.has_title_regexp('h[23] title', 3), True) - self.assertEquals(self.page_info.has_title_regexp('h[23] title', 4), False) + self.assertEqual(self.page_info.has_title_regexp('h[23] title', 2), True) + self.assertEqual(self.page_info.has_title_regexp('h[23] title', 3), True) + self.assertEqual(self.page_info.has_title_regexp('h[23] title', 4), False) def test_appears(self): """tests PageInfo.appears()""" - self.assertEquals(self.page_info.appears('CW'), True) - self.assertEquals(self.page_info.appears('Logilab'), True) - self.assertEquals(self.page_info.appears('Logilab introduces'), True) - self.assertEquals(self.page_info.appears('H2 title'), False) + self.assertEqual(self.page_info.appears('CW'), True) + self.assertEqual(self.page_info.appears('Logilab'), True) + self.assertEqual(self.page_info.appears('Logilab introduces'), True) + self.assertEqual(self.page_info.appears('H2 title'), False) def test_has_link(self): """tests has_link()""" - self.assertEquals(self.page_info.has_link('Logilab'), True) - self.assertEquals(self.page_info.has_link('logilab'), False) - self.assertEquals(self.page_info.has_link('Logilab', 'http://www.logilab.org'), True) - self.assertEquals(self.page_info.has_link('Logilab', 'http://www.google.com'), False) + self.assertEqual(self.page_info.has_link('Logilab'), True) + self.assertEqual(self.page_info.has_link('logilab'), False) + self.assertEqual(self.page_info.has_link('Logilab', 'http://www.logilab.org'), True) + self.assertEqual(self.page_info.has_link('Logilab', 'http://www.google.com'), False) def test_has_link_regexp(self): """test has_link_regexp()""" - self.assertEquals(self.page_info.has_link_regexp('L[oi]gilab'), True) - self.assertEquals(self.page_info.has_link_regexp('L[ai]gilab'), False) + self.assertEqual(self.page_info.has_link_regexp('L[oi]gilab'), True) + self.assertEqual(self.page_info.has_link_regexp('L[ai]gilab'), False) if __name__ == '__main__': diff -r f4d1d5d9ccbb -r 90f2f20367bc devtools/testlib.py --- a/devtools/testlib.py Tue Jul 27 12:36:03 2010 +0200 +++ b/devtools/testlib.py Wed Nov 03 16:38:28 2010 +0100 @@ -24,6 +24,8 @@ import os import sys import re +import urlparse +from os.path import dirname, join from urllib import unquote from math import log from contextlib import contextmanager @@ -31,7 +33,7 @@ import yams.schema -from logilab.common.testlib import TestCase, InnerTest +from logilab.common.testlib import TestCase, InnerTest, Tags from logilab.common.pytest import nocoverage, pause_tracing, resume_tracing from logilab.common.debugger import Debugger from logilab.common.umessage import message_from_string @@ -44,8 +46,9 @@ from cubicweb.sobjects import notification from cubicweb.web import Redirect, application from cubicweb.server.session import security_enabled +from cubicweb.server.hook import SendMailOp from cubicweb.devtools import SYSTEM_ENTITIES, SYSTEM_RELATIONS, VIEW_VALIDATORS -from cubicweb.devtools import fake, htmlparser +from cubicweb.devtools import BASE_URL, fake, htmlparser from cubicweb.utils import json # low-level utilities ########################################################## @@ -69,7 +72,6 @@ after = before return center - before <= line_no <= center + after - def unprotected_entities(schema, strict=False): """returned a set of each non final entity type, excluding "system" entities (eg CWGroup, CWUser...) @@ -80,7 +82,6 @@ protected_entities = yams.schema.BASE_TYPES.union(SYSTEM_ENTITIES) return set(schema.entities()) - protected_entities - def refresh_repo(repo, resetschema=False, resetvreg=False): for pool in repo.pools: pool.close(True) @@ -143,6 +144,30 @@ cwconfig.SMTP = MockSMTP +class TestCaseConnectionProxy(object): + """thin wrapper around `cubicweb.dbapi.Connection` context-manager + used in CubicWebTC (cf. `cubicweb.devtools.testlib.CubicWebTC.login` method) + + It just proxies to the default connection context manager but + restores the original connection on exit. + """ + def __init__(self, testcase, cnx): + self.testcase = testcase + self.cnx = cnx + + def __getattr__(self, attrname): + return getattr(self.cnx, attrname) + + def __enter__(self): + return self.cnx.__enter__() + + def __exit__(self, exctype, exc, tb): + try: + return self.cnx.__exit__(exctype, exc, tb) + finally: + self.cnx.close() + self.testcase.restore_connection() + # base class for cubicweb tests requiring a full cw environments ############### class CubicWebTC(TestCase): @@ -163,22 +188,30 @@ appid = 'data' configcls = devtools.ApptestConfiguration reset_schema = reset_vreg = False # reset schema / vreg between tests + tags = TestCase.tags | Tags('cubicweb', 'cw_repo') @classproperty def config(cls): - """return the configuration object. Configuration is cached on the test - class. + """return the configuration object + + Configuration is cached on the test class. """ try: return cls.__dict__['_config'] except KeyError: - config = cls._config = cls.configcls(cls.appid) + home = join(dirname(sys.modules[cls.__module__].__file__), cls.appid) + config = cls._config = cls.configcls(cls.appid, apphome=home) config.mode = 'test' return config @classmethod def init_config(cls, config): - """configuration initialization hooks. You may want to override this.""" + """configuration initialization hooks. + + You may only want to override here the configuraton logic. + + Otherwise, consider to use a different :class:`ApptestConfiguration` + defined in the `configcls` class attribute""" source = config.sources()['system'] cls.admlogin = unicode(source['db-user']) cls.admpassword = source['db-password'] @@ -200,8 +233,9 @@ config.global_set_option('default-dest-addrs', send_to) config.global_set_option('sender-name', 'cubicweb-test') config.global_set_option('sender-addr', 'cubicweb-test@logilab.fr') + # default_base_url on config class isn't enough for TestServerConfiguration + config.global_set_option('base-url', config.default_base_url()) # web resources - config.global_set_option('base-url', devtools.BASE_URL) try: config.global_set_option('embed-allowed', re.compile('.*')) except: # not in server only configuration @@ -266,10 +300,13 @@ # default test setup and teardown ######################################### def setUp(self): + # monkey patch send mail operation so emails are sent synchronously + self._old_mail_commit_event = SendMailOp.commit_event + SendMailOp.commit_event = SendMailOp.sendmails pause_tracing() previous_failure = self.__class__.__dict__.get('_repo_init_failed') if previous_failure is not None: - self.skip('repository is not initialised: %r' % previous_failure) + self.skipTest('repository is not initialised: %r' % previous_failure) try: self._init_repo() except Exception, ex: @@ -287,6 +324,7 @@ for cnx in self._cnxs: if not cnx._closed: cnx.close() + SendMailOp.commit_event = self._old_mail_commit_event def setup_database(self): """add your database setup code by overriding this method""" @@ -313,7 +351,7 @@ req.execute('SET X in_group G WHERE X eid %%(x)s, G name IN(%s)' % ','.join(repr(g) for g in groups), {'x': user.eid}) - user.clear_related_cache('in_group', 'subject') + user.cw_clear_relation_cache('in_group', 'subject') if commit: req.cnx.commit() return user @@ -322,14 +360,18 @@ """return a connection for the given login/password""" if login == self.admlogin: self.restore_connection() - else: - if not kwargs: - kwargs['password'] = str(login) - self.cnx = repo_connect(self.repo, unicode(login), **kwargs) - self.websession = DBAPISession(self.cnx) - self._cnxs.append(self.cnx) + # definitly don't want autoclose when used as a context manager + return self.cnx + autoclose = kwargs.pop('autoclose', True) + if not kwargs: + kwargs['password'] = str(login) + self.cnx = repo_connect(self.repo, unicode(login), **kwargs) + self.websession = DBAPISession(self.cnx) + self._cnxs.append(self.cnx) if login == self.vreg.config.anonymous_user()[0]: self.cnx.anonymous_connection = True + if autoclose: + return TestCaseConnectionProxy(self, self.cnx) return self.cnx def restore_connection(self): @@ -499,9 +541,11 @@ return publisher requestcls = fake.FakeRequest - def request(self, *args, **kwargs): + def request(self, rollbackfirst=False, **kwargs): """return a web ui request""" req = self.requestcls(self.vreg, form=kwargs) + if rollbackfirst: + self.websession.cnx.rollback() req.set_session(self.websession) return req @@ -527,6 +571,30 @@ raise return result + def req_from_url(self, url): + """parses `url` and builds the corresponding CW-web request + + req.form will be setup using the url's query string + """ + req = self.request() + if isinstance(url, unicode): + url = url.encode(req.encoding) # req.setup_params() expects encoded strings + querystring = urlparse.urlparse(url)[-2] + params = urlparse.parse_qs(querystring) + req.setup_params(params) + return req + + def url_publish(self, url): + """takes `url`, uses application's app_resolver to find the + appropriate controller, and publishes the result. + + This should pretty much correspond to what occurs in a real CW server + except the apache-rewriter component is not called. + """ + req = self.req_from_url(url) + ctrlid, rset = self.app.url_resolver.process(req, req.relative_path(False)) + return self.ctrl_publish(req, ctrlid) + def expect_redirect(self, callback, req): """call the given callback with req as argument, expecting to get a Redirect exception @@ -573,18 +641,18 @@ sh = self.app.session_handler path, params = self.expect_redirect(lambda x: self.app.connect(x), req) session = req.session - self.assertEquals(len(self.open_sessions), nbsessions, self.open_sessions) - self.assertEquals(session.login, origsession.login) - self.assertEquals(session.anonymous_session, False) - self.assertEquals(path, 'view') - self.assertEquals(params, {'__message': 'welcome %s !' % req.user.login}) + self.assertEqual(len(self.open_sessions), nbsessions, self.open_sessions) + self.assertEqual(session.login, origsession.login) + self.assertEqual(session.anonymous_session, False) + self.assertEqual(path, 'view') + self.assertEqual(params, {'__message': 'welcome %s !' % req.user.login}) def assertAuthFailure(self, req, nbsessions=0): self.app.connect(req) self.assertIsInstance(req.session, DBAPISession) - self.assertEquals(req.session.cnx, None) - self.assertEquals(req.cnx, None) - self.assertEquals(len(self.open_sessions), nbsessions) + self.assertEqual(req.session.cnx, None) + self.assertEqual(req.cnx, None) + self.assertEqual(len(self.open_sessions), nbsessions) clear_cache(req, 'get_authorization') # content validation ####################################################### @@ -620,7 +688,7 @@ **kwargs): """This method tests the view `vid` on `rset` using `template` - If no error occured while rendering the view, the HTML is analyzed + If no error occurred while rendering the view, the HTML is analyzed and parsed. :returns: an instance of `cubicweb.devtools.htmlparser.PageInfo` @@ -633,10 +701,10 @@ view = viewsreg.select(vid, req, **kwargs) # set explicit test description if rset is not None: - self.set_description("testing %s, mod=%s (%s)" % ( + self.set_description("testing vid=%s defined in %s with (%s)" % ( vid, view.__module__, rset.printable_rql())) else: - self.set_description("testing %s, mod=%s (no rset)" % ( + self.set_description("testing vid=%s defined in %s without rset" % ( vid, view.__module__)) if template is None: # raw view testing, no template viewfunc = view.render @@ -652,7 +720,7 @@ def _test_view(self, viewfunc, view, template='main-template', kwargs={}): """this method does the actual call to the view - If no error occured while rendering the view, the HTML is analyzed + If no error occurred while rendering the view, the HTML is analyzed and parsed. :returns: an instance of `cubicweb.devtools.htmlparser.PageInfo` @@ -704,7 +772,7 @@ validatorclass = self.content_type_validators.get(view.content_type, default_validator) if validatorclass is None: - return None + return output.strip() validator = validatorclass() if isinstance(validator, htmlparser.DTDValidator): # XXX remove used in progress widget, unknown in html dtd @@ -786,6 +854,8 @@ """base class for test with auto-populating of the database""" __abstract__ = True + tags = CubicWebTC.tags | Tags('autopopulated') + pdbclass = CubicWebDebugger # this is a hook to be able to define a list of rql queries # that are application dependent and cannot be guessed automatically @@ -842,6 +912,7 @@ except ValidationError, ex: # failed to satisfy some constraint print 'error in automatic db population', ex + self.session.commit_state = None # reset uncommitable flag self.post_populate(cu) self.commit() @@ -911,6 +982,9 @@ class AutomaticWebTest(AutoPopulateTest): """import this if you wan automatic tests to be ran""" + + tags = AutoPopulateTest.tags | Tags('web', 'generated') + def setUp(self): AutoPopulateTest.setUp(self) # access to self.app for proper initialization of the authentication diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/README --- a/doc/book/README Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/README Wed Nov 03 16:38:28 2010 +0100 @@ -46,9 +46,40 @@ .. [foot note] the foot note content +Boxes +===== -XXX -* lien vers cw.cwconfig.CW_CUBES_PATH par ex. +- warning box: + .. warning:: + + Warning content +- note box: + .. note:: + + Note content + -automodule, autofunction, automethod, autofunction +Cross references +================ + +To arbitrary section +-------------------- + +:ref:`identifier` ou :ref:`label ` + +Label required of referencing node which as no title, else the node's title will be used. + + +To API objects +-------------- +See the autodoc sphinx extension documentation. Quick overview: + +* ref to a class: :class:`cubicweb.devtools.testlib.AutomaticWebTest` + +* if you can to see only the class name in the generated documentation, add a ~: + :class:`~cubicweb.devtools.testlib.AutomaticWebTest` + +* you can also use :mod: (module), :exc: (exception), :func: (function), :meth: (method)... + +* syntax explained above to specify label explicitly may also be used diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/_maybe_to_integrate/treemixin.rst --- a/doc/book/_maybe_to_integrate/treemixin.rst Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,100 +0,0 @@ - -Class `TreeMixIn` ------------------ - -This class provides a tree interface. This mixin has to be inherited -explicitly and configured using the tree_attribute, parent_target and -children_target class attribute to benefit from this default implementation. - -This class provides the following methods: - - * `different_type_children(entities=True)`, returns children entities - of different type as this entity. According to the `entities` parameter, - returns entity objects (if entity=True) or the equivalent result set. - - * `same_type_children(entities=True)`, returns children entities of - the same type as this entity. According to the `entities` parameter, - return entity objects (if entity=True) or the equivalent result set. - - * `iterchildren( _done=None)`, iters on the children of the entity. - - * `prefixiter( _done=None)` - - * `path()`, returns the list of eids from the root object to this object. - - * `iterparents()`, iters on the parents of the entity. - - * `notification_references(view)`, used to control References field - of email send on notification for this entity. `view` is the notification view. - Should return a list of eids which can be used to generate message ids - of previously sent email. - -`TreeMixIn` implements also the ITree interface (``cubicweb.interfaces``): - - * `parent()`, returns the parent entity if any, else None (e.g. if we are on the - root) - - * `children(entities=True, sametype=False)`, returns children entities - according to the `entities` parameter, return entity objects or the - equivalent result set. - - * `children_rql()`, returns the RQL query corresponding to the children - of the entity. - - * `is_leaf()`, returns True if the entity does not have any children. - - * `is_root()`, returns True if the entity does not have any parent. - - * `root()`, returns the root object of the tree representation of - the entity and its related entities. - -Example of use -`````````````` - -Imagine you defined three types of entities in your schema, and they -relates to each others as follows in ``schema.py``:: - - class Entity1(EntityType): - title = String() - is_related_to = SubjectRelation('Entity2', 'subject') - - class Entity2(EntityType): - title = String() - belongs_to = SubjectRelation('Entity3', 'subject') - - class Entity3(EntityType): - name = String() - -You would like to create a view that applies to both entity types -`Entity1` and `Entity2` and which lists the entities they are related to. -That means when you view `Entity1` you want to list all `Entity2`, and -when you view `Entity2` you want to list all `Entity3`. - -In ``entities.py``:: - - class Entity1(TreeMixIn, AnyEntity): - id = 'Entity1' - __implements__ = AnyEntity.__implements__ + (ITree,) - __rtags__ = {('is_related_to', 'Entity2', 'object'): 'link'} - tree_attribute = 'is_related_to' - - def children(self, entities=True): - return self.different_type_children(entities) - - class Entity2(TreeMixIn, AnyEntity): - id = 'Entity2' - __implements__ = AnyEntity.__implements__ + (ITree,) - __rtags__ = {('belongs_to', 'Entity3', 'object'): 'link'} - tree_attribute = 'belongs_to' - - def children(self, entities=True): - return self.different_type_children(entities) - -Once this is done, you can define your common view as follows:: - - class E1E2CommonView(baseviews.PrimaryView): - accepts = ('Entity11, 'Entity2') - - def render_entity_relations(self, entity, siderelations): - self.wview('list', entity.children(entities=False)) - diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/admin/setup.rst --- a/doc/book/en/admin/setup.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/admin/setup.rst Wed Nov 03 16:38:28 2010 +0100 @@ -59,8 +59,27 @@ .. _`CubicWeb.org Forge`: http://www.cubicweb.org/project/ +.. _PipInstallation: + +Installation with pip +````````````````````` + +|cubicweb| and its cubes have been pip_ installable since version 3.8. Search +for them on pypi_:: + + pip install cubicweb + +.. _pip: http://pypi.python.org/pypi/pip +.. _pypi: http://pypi.python.org/pypi?%3Aaction=search&term=cubicweb + .. _SourceInstallation: +.. warning:: + + This method may still have hiccups. If it does not work for you, + please consider installing from version control system + (:ref:`MercurialInstallation`). + Install from source ``````````````````` @@ -70,31 +89,46 @@ .. _`ftp site`: http://ftp.logilab.org/pub/cubicweb/ -Make sure you have installed the dependencies (see appendixes for the list). +Make sure you also have all the :ref:`InstallDependencies`. -|cubicweb| should soon be pip_ installable, stay tuned (expected in 3.8). - -.. _pip: http://pypi.python.org/pypi/pip - +.. _MercurialInstallation: Install from version control system ``````````````````````````````````` -You can keep up to date with on-going development by using Mercurial and its -forest extension:: +You can keep up to date with on-going development by using Mercurial:: - hg fclone http://www.logilab.org/hg/forests/cubicweb + hg clone http://www.logilab.org/hg/forests/cubicweb See :ref:`MercurialPresentation` for more details about Mercurial. +A practical way to get many of CubicWeb's dependencies and a nice set +of base cubes is to run the `clone_deps.py` script located in +`cubicweb/bin/`:: + + python cubicweb/bin/clone_deps.py + +(Windows users should replace slashes with antislashes). + +This script will clone a set of mercurial repositories into in the +directory containing the CubicWeb repository, and update them to the +latest published version tag (if any). + When cloning a repository, you might be set in a development branch (the 'default' branch). You should check that the branches of the repositories are set to 'stable' (using `hg up stable` for each one) if you do not intend to develop the framework itself. -Do not forget to update the forest itself (using `cd path/to/forest ; hg up`). +Even better, `hg tags` will display a list of tags in reverse +chronological order. One reasonnable way to get to a working version +is to pick the latest published version (as done by the `clone_deps` +script). These look like `cubicweb-debian-version-3.9.7-1`. Typing:: -Make sure you have installed the dependencies (see appendixes for the list). + hg update cubicweb-debian-version-3.9.7-1 + +will update the repository files to this version. + +Make sure you also have all the :ref:`InstallDependencies`. .. _WindowsInstallation: @@ -102,6 +136,10 @@ Windows installation ```````````````````` +Your best option is probably the :ref:`PipInstallation`. If it does not work or +if you want more control over the process, continue with the following +instructions. + Base elements ~~~~~~~~~~~~~ @@ -110,14 +148,15 @@ done. We assume everything goes into `C:\\` in this document. Adjusting the installation drive should be straightforward. -You should start by downloading and installing the Python(x,y) distribution. It -contains python 2.5 plus numerous useful third-party modules and applications:: +You should start by downloading and installing Python version >= 2.5 and < 3. - http://www.pythonxy.com/download_fr.php +An alternative option would be installing the Python(x,y) +distribution. Python(x,y) is not a requirement, but it makes things easier for +Windows user by wrapping in a single installer python 2.5 plus numerous useful +third-party modules and applications (including Eclipse + pydev, which is an +arguably good IDE for Python under Windows). Download it from this page:: -At the time of this writting, one gets version 2.1.15. Among the many things -provided, one finds Eclipse + pydev (an arguably good IDE for python under -windows). + http://code.google.com/p/pythonxy/wiki/Downloads Then you must grab Twisted. There is a windows installer directly available from this page:: @@ -166,11 +205,14 @@ http://www.graphviz.org/Download_windows.php -Simplejson will be provided within the forest, but a win32 compiled version will -run much faster:: +Simplejson is needed when installing with Python 2.5, but included in the +standard library for Python >= 2.6. Get it from there:: http://www.osuch.org/python-simplejson%3Awin32 +Make sure you also have all the :ref:`InstallDependencies` that are not specific +to Windows. + Tools ~~~~~ @@ -189,32 +231,13 @@ http://www.vectrace.com/mercurialeclipse/ -Setting up the sources -~~~~~~~~~~~~~~~~~~~~~~ - -You need to enable the mercurial forest extension. To do this, edit the file:: - - C:\Program Files\TortoiseHg\Mercurial.ini - -In the [extensions] section, add the following line:: - - forest=C:\Program Files\TortoiseHg\ext\forest\forest.py +Getting the sources +~~~~~~~~~~~~~~~~~~~ -Now, you need to clone the cubicweb repository. We assume that you use -Eclipse. From the IDE, choose File -> Import. In the box, select `Mercurial/Clone -repository using MercurialEclipse`. - -In the import main panel you just have to: - -* fill the URL field with http://www.logilab.org/hg/forests/cubicwin32 - -* check the 'Repository is a forest' box. - -Then, click on 'Finish'. It might take some time to get it all. Note that the -`cubicwin32` forest contains additional python packages such as yapps, vobject, -simplejson and twisted-web2 which are not provided with Python(x,y). This is -provided for convenience, as we do not ensure the up-to-dateness of these -packages, especially with respect to security fixes. +You can either download the latest release (see +:ref:`SourceInstallation`) or get the development version using +Mercurial (see :ref:`MercurialInstallation` and below), which is more +convenient. Environment variables ~~~~~~~~~~~~~~~~~~~~~ @@ -250,14 +273,14 @@ This currently assumes that the instances configurations is located at C:\\etc\\cubicweb.d. -For a cube 'my_cube', you will then find -C:\\etc\\cubicweb.d\\my_cube\\win32svc.py that has to be used thusly:: +For a cube 'my_instance', you will then find +C:\\etc\\cubicweb.d\\my_instance\\win32svc.py that has to be used as follows:: win32svc install This should just register your instance as a windows service. A simple:: - net start cubicweb-my_cube + net start cubicweb-my_instance should start the service. @@ -280,9 +303,17 @@ Databases configuration ----------------------- -Whatever the backend used, database connection information are stored in the -instance's :file:`sources` file. Currently cubicweb has been tested using -Postgresql (recommanded), MySQL, SQLServer and SQLite. +Each instance can be configured with its own database connection information, +that will be stored in the instance's :file:`sources` file. The database to use +will be chosen when creating the instance. Currently cubicweb has been tested +using Postgresql (recommended), MySQL, SQLServer and SQLite. + +Other possible sources of data include CubicWeb, Subversion, LDAP and Mercurial, +but at least one relational database is required for CubicWeb to work. You do +not need to install a backend that you do not intend to use for one of your +instances. SQLite is not fit for production use, but it works well for testing +and ships with Python, which saves installation time when you want to get +started quickly. .. _PostgresqlConfiguration: @@ -394,7 +425,7 @@ max_allowed_packet = 128M .. Note:: - It is unclear whether mysql supports indexed string of arbitrary lenght or + It is unclear whether mysql supports indexed string of arbitrary length or not. @@ -403,9 +434,10 @@ SQLServer configuration ``````````````````````` -As of this writing, sqlserver support is in progress. You should be able to -connect, create a database and go quite far, but some of the generated SQL is -still currently not accepted by the backend. +As of this writing, support for SQLServer 2005 is functional but incomplete. You +should be able to connect, create a database and go quite far, but some of the +SQL generated from RQL queries is still currently not accepted by the +backend. Porting to SQLServer 2008 is also an item on the backlog. The `source` configuration file may look like this (specific parts only are shown):: @@ -440,14 +472,13 @@ ------------------ If you want to use Pyro to access your instance remotly, or to have multi-source -or distributed configuration, it is required to have a name server Pyro running -on your network. By by default it is detected by a broadcast request, but you can +or distributed configuration, it is required to have a Pyro name server running +on your network. By default it is detected by a broadcast request, but you can specify a location in the instance's configuration file. To do so, you need to : -* launch the server manually before starting cubicweb as a server with `pyro-nsd - start` +* launch the pyro name server with `pyro-nsd start` before starting cubicweb * under debian, edit the file :file:`/etc/default/pyro-nsd` so that the name server pyro will be launched automatically when the machine fire up diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/annexes/depends.rst --- a/doc/book/en/annexes/depends.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/annexes/depends.rst Wed Nov 03 16:38:28 2010 +0100 @@ -1,9 +1,9 @@ .. -*- coding: utf-8 -*- -.. _dependencies: +.. _InstallDependencies: -Dependencies -============ +Installation dependencies +========================= When you run CubicWeb from source, either by downloading the tarball or cloning the mercurial forest, here is the list of tools and libraries you need @@ -27,6 +27,9 @@ * logilab-common - http://www.logilab.org/project/logilab-common - http://pypi.python.org/pypi/logilab-common/ - included in the forest +* logilab-database - http://www.logilab.org/project/logilab-database - + http://pypi.python.org/pypi/logilab-database/ - included in the forest + * logilab-constraint - http://www.logilab.org/project/logilab-constraint - http://pypi.python.org/pypi/constraint/ - included in the forest @@ -44,7 +47,7 @@ To use network communication between cubicweb instances / clients: -* Pyro - http://pyro.sourceforge.net/ - http://pypi.python.org/pypi/Pyro +* Pyro - http://www.xs4all.nl/~irmen/pyro3/ - http://pypi.python.org/pypi/Pyro If you're using a Postgres database (recommended): @@ -52,9 +55,7 @@ * plpythonu extension * tsearch2 extension (for postgres < 8.3, in postgres-contrib) -Other optional packages : - -: +Other optional packages: * fyzz - http://www.logilab.org/project/fyzz - http://pypi.python.org/pypi/fyzz - included in the forest, *to activate Sparql querying* diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/annexes/docstrings-conventions.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/book/en/annexes/docstrings-conventions.rst Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,106 @@ +Javascript docstrings +===================== + +Whereas in Python source code we only need to include a module docstrings +using the directive `.. automodule:: mypythonmodule`, we will have to +explicitely define Javascript modules and functions in the doctrings since +there is no native directive to include Javascript files. + +Rest generation +--------------- + +`pyjsrest` is a small utility parsing Javascript doctrings and generating the +corresponding Restructured file used by Sphinx to generate HTML documentation. +This script will have the following structure:: + + =========== + filename.js + =========== + .. module:: filename.js + +We use the `.. module::` directive to register a javascript library +as a Python module for Sphinx. This provides an entry in the module index. + +The contents of the docstring found in the javascript file will be added as is +following the module declaration. No treatment will be done on the doctring. +All the documentation structure will be in the docstrings and will comply +with the following rules. + +Docstring structure +------------------- + +Basically we document javascript with RestructuredText docstring +following the same convention as documenting Python code. + +The doctring in Javascript files must be contained in standard +Javascript comment signs, starting with `/**` and ending with `*/`, +such as:: + + /** + * My comment starts here. + * This is the second line prefixed with a `*`. + * ... + * ... + * All the follwing line will be prefixed with a `*` followed by a space. + * ... + * ... + */ + + +Comments line prefixed by `//` will be ignored. They are reserved for source +code comments dedicated to developers. + + +Javscript functions docstring +----------------------------- + +By default, the `function` directive describes a module-level function. + +`function` directive +~~~~~~~~~~~~~~~~~~~~ + +Its purpose is to define the function prototype such as:: + + .. function:: loadxhtml(url, data, reqtype, mode) + +If any namespace is used, we should add it in the prototype for now, +until we define an appropriate directive:: + + .. function:: jQuery.fn.loadxhtml(url, data, reqtype, mode) + +Function parameters +~~~~~~~~~~~~~~~~~~~ + +We will define function parameters as a bulleted list, where the +parameter name will be backquoted and followed by its description. + +Example of a javascript function docstring:: + + .. function:: loadxhtml(url, data, reqtype, mode) + + cubicweb loadxhtml plugin to make jquery handle xhtml response + + fetches `url` and replaces this's content with the result + + Its arguments are: + + * `url` + + * `mode`, how the replacement should be done (default is 'replace') + Possible values are : + - 'replace' to replace the node's content with the generated HTML + - 'swap' to replace the node itself with the generated HTML + - 'append' to append the generated HTML to the node's content + + +Optional parameter specification +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Javascript functions handle arguments not listed in the function signature. +In the javascript code, they will be flagged using `/* ... */`. In the docstring, +we flag those optional arguments the same way we would define it in +Python:: + + .. function:: asyncRemoteExec(fname, arg1=None, arg2=None) + + diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/annexes/faq.rst --- a/doc/book/en/annexes/faq.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/annexes/faq.rst Wed Nov 03 16:38:28 2010 +0100 @@ -115,7 +115,7 @@ from cubicweb import dbapi - cnx = dbapi.connection(database='instance-id', user='admin', password='admin') + cnx = dbapi.connect(database='instance-id', user='admin', password='admin') cur = cnx.cursor() for name in ('Personal', 'Professional', 'Computers'): cur.execute('INSERT Blog B: B name %s', name) @@ -302,10 +302,10 @@ import pwd import sys - from logilab.common.db import get_connection + from logilab.database import get_connection def getlogin(): - """avoid usinng os.getlogin() because of strange tty / stdin problems + """avoid using os.getlogin() because of strange tty/stdin problems (man 3 getlogin) Another solution would be to use $LOGNAME, $USER or $USERNAME """ @@ -402,6 +402,31 @@ mydb=> update cw_cwuser set cw_upassword='qHO8282QN5Utg' where cw_login='joe'; UPDATE 1 +if you're running over SQL Server, you need to use the CONVERT +function to convert the string to varbinary(255). The SQL query is +therefore:: + + update cw_cwuser set cw_upassword=CONVERT(varbinary(255), 'qHO8282QN5Utg') where cw_login='joe'; + +Be careful, the encryption algorithm is different on Windows and on +Unix. You cannot therefore use a hash generated on Unix to fill in a +Windows database, nor the other way round. + + +You can prefer use a migration script similar to this shell invocation instead:: + + $ cubicweb-ctl shell + >>> from cubicweb.server.utils import crypt_password + >>> crypted = crypt_password('joepass') + >>> rset = rql('Any U WHERE U is CWUser, U login "joe"') + >>> joe = rset.get_entity(0,0) + >>> joe.set_attributes(upassword=crypted) + +The more experimented people would use RQL request directly:: + + >>> rql('SET X upassword %(a)s WHERE X is CWUser, X login "joe"', + ... {'a': crypted}) + I've just created a user in a group and it doesn't work ! --------------------------------------------------------- diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/annexes/index.rst --- a/doc/book/en/annexes/index.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/annexes/index.rst Wed Nov 03 16:38:28 2010 +0100 @@ -17,3 +17,5 @@ rql/index mercurial depends + javascript-api + docstrings-conventions diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/annexes/rql/Graph-ex.gif Binary file doc/book/en/annexes/rql/Graph-ex.gif has changed diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/annexes/rql/debugging.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/book/en/annexes/rql/debugging.rst Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,56 @@ +.. -*- coding: utf-8 -*- + +.. _DEBUGGING: + +Debugging RQL +------------- + +Available levels +~~~~~~~~~~~~~~~~ + +:DBG_NONE: + no debug information (current mode) + +:DBG_RQL: + rql execution information + +:DBG_SQL: + executed sql + +:DBG_REPO: + repository events + +:DBG_MS: + multi-sources + +:DBG_MORE: + more verbosity + +:DBG_ALL: + all level enabled + + +Enable verbose output +~~~~~~~~~~~~~~~~~~~~~ + +It may be interested to enable a verboser output to debug your RQL statements: + +.. sourcecode:: python + + from cubicweb import server + server.set_debug(server.DBG_RQL|server.DBG_SQL|server.DBG_ALL) + + +Detect largest RQL queries +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +See `Profiling and performance` chapter (see :ref:`PROFILING`). + + +API +~~~ + +.. autofunction:: cubicweb.server.set_debug + +.. autoclass:: cubicweb.server.debugged + diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/annexes/rql/index.rst --- a/doc/book/en/annexes/rql/index.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/annexes/rql/index.rst Wed Nov 03 16:38:28 2010 +0100 @@ -4,8 +4,9 @@ This chapter describes the Relation Query Language syntax and its implementation in CubicWeb. .. toctree:: - :maxdepth: 1 + :maxdepth: 2 intro language + debugging implementation diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/annexes/rql/intro.rst --- a/doc/book/en/annexes/rql/intro.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/annexes/rql/intro.rst Wed Nov 03 16:38:28 2010 +0100 @@ -7,8 +7,13 @@ Goals of RQL ~~~~~~~~~~~~ -The goal is to have a language making relations browsing easy. As -such, attributes will be regarded as cases of special relations (in +The goal is to have a semantic language in order to: + +- query relations in a clear syntax +- empowers access to data repository manipulation +- making attributes/relations browsing easy + +As such, attributes will be regarded as cases of special relations (in terms of usage, the user should see no syntactic difference between an attribute and a relation). @@ -40,6 +45,13 @@ conversion and basic types manipulation, which we may want to look at one time or another. Finally, the syntax is a little esoteric. +Datalog +``````` + +Datalog_ is a prolog derived query langage which applies to relational +databases. It is more expressive than RQL in that it accepts either +extensional_ and intensional_ predicates (or relations). As of now, +RQL only deals with intensional relations. The different types of queries ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -59,7 +71,91 @@ Remove entities or relations existing in the database. +RQL relation expressions +~~~~~~~~~~~~~~~~~~~~~~~~ + +RQL expressions apply to a live database defined by a +:ref:`datamodel_definition`. Apart from the main type, or head, of the +expression (search, insert, etc.) the most common constituent of an +RQL expression is a (set of) relation expression(s). + +An RQL relation expression contains three components: + +* the subject, which is an entity type +* the predicate, which is a relation definition (an arc of the schema) +* the object, which is either an attribute or a relation to another entity + +.. image:: Graph-ex.gif + :alt: + :align: center + +.. warning:: + + A relation is always expressed in the order: ``subject``, + ``predicate``, ``object``. + + It is important to determine if the entity type is subject or object + to construct a valid expression. Inverting the subject/object is an + error since the relation cannot be found in the schema. + + If one does not have access to the code, one can find the order by + looking at the schema image in manager views (the subject is located + at the beginning of the arrow). + +An example of two related relation expressions:: + + P works_for C, P name N + +RQL variables represent typed entities. The type of entities is +either automatically inferred (by looking at the possible relation +definitions, see :ref:`RelationDefinition`) or explicitely constrained +using the ``is`` meta relation. + +In the example above, we barely need to look at the schema. If +variable names (in the RQL expression) and relation type names (in the +schema) are expresssively designed, the human reader can infer as much +as the |cubicweb| querier. + +The ``P`` variable is used twice but it always represent the same set +of entities. Hence ``P works_for C`` and ``P name N`` must be +compatible in the sense that all the Ps (which *can* refer to +different entity types) must accept the ``works_for`` and ``name`` +relation types. This does restrict the set of possible values of P. + +Adding another relation expression:: + + P works_for C, P name N, C name "logilab" + +This further restricts the possible values of P through an indirect +constraint on the possible values of ``C``. The RQL-level unification_ +happening there is translated to one (or several) joins_ at the +database level. + +.. note:: + + In |cubicweb|, the term `relation` is often found without ambiguity + instead of `predicate`. This predicate is also known as the + `property` of the triple in `RDF concepts`_ -.. _Versa: http://uche.ogbuji.net/tech/rdf/versa/ +RQL Operators +~~~~~~~~~~~~~ + +An RQL expression's head can be completed using various operators such +as ``ORDERBY``, ``GROUPBY``, ``HAVING``, ``LIMIT`` etc. + +RQL relation expressions can be grouped with ``UNION`` or +``WITH``. Predicate oriented keywords such as ``EXISTS``, ``OR``, +``NOT`` are available. + +The complete zoo of RQL operators is described extensively in the +following chapter (:ref:`RQL`). + +.. _RDF concepts: http://www.w3.org/TR/rdf-concepts/ +.. _Versa: http://wiki.xml3k.org/Versa .. _SPARQL: http://www.w3.org/TR/rdf-sparql-query/ +.. _unification: http://en.wikipedia.org/wiki/Unification_(computing) +.. _joins: http://en.wikipedia.org/wiki/Join_(SQL) +.. _Datalog: http://en.wikipedia.org/wiki/Datalog +.. _intensional: http://en.wikipedia.org/wiki/Intensional_definition +.. _extensional: http://en.wikipedia.org/wiki/Extension_(predicate_logic) diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/annexes/rql/language.rst --- a/doc/book/en/annexes/rql/language.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/annexes/rql/language.rst Wed Nov 03 16:38:28 2010 +0100 @@ -15,6 +15,7 @@ HAVING, ILIKE, IN, INSERT, LIKE, LIMIT, NOT, NOW, NULL, OFFSET, OR, ORDERBY, SET, TODAY, TRUE, UNION, WHERE, WITH + Variables and Typing ~~~~~~~~~~~~~~~~~~~~ @@ -29,10 +30,11 @@ There is a special type **Any**, referring to a non specific type. We can restrict the possible types for a variable using the -special relation **is**. +special relation **is** in the constraints. + The possible type(s) for each variable is derived from the schema -according to the constraints expressed above and thanks to the relations between -each variable. +according to the constraints expressed above and thanks to the relations +between each variable. Built-in types `````````````` @@ -63,7 +65,7 @@ of logical operators (see :ref:`PriorityOperators`). Mathematical Operators -``````````````````````` +`````````````````````` :: +, -, *, / @@ -74,7 +76,13 @@ =, <, <=, >=, >, ~=, IN, LIKE, ILIKE -* The operator `=` is the default operator. +* Syntax to use comparison operator: + + `VARIABLE relation operator VALUE` + +* The operator `=` is the default operator and can be omitted. + +* `relation` name is always attended * The operator `LIKE` equivalent to `~=` can be used with the special character `%` in a string to indicate that the chain @@ -89,7 +97,7 @@ * The operator `IN` provides a list of possible values: :: - Any X WHERE X name IN ( 'chauvat', 'fayolle', 'di mascio', 'thenault') + Any X WHERE X name IN ('chauvat', 'fayolle', 'di mascio', 'thenault') .. XXX nico: "A trick <> 'bar'" wouldn't it be more convenient than "NOT A trick 'bar'" ? @@ -99,17 +107,13 @@ Operators priority `````````````````` -1. '*', '/' - -2. '+', '-' - -3. 'not' - -4 'and' - -5 'or' - -6 ',' +#. "(", ")" +#. '*', '/' +#. '+', '-' +#. 'NOT' +#. 'AND' +#. 'OR' +#. ',' Search Query @@ -141,25 +145,53 @@ `````````````````` - For grouped queries (e.g. with a GROUPBY clause), all - selected variables should be grouped. - -- To group and/or sort by attributes, we can do: "X,L user U, U - login L GROUPBY L, X ORDERBY L" + selected variables should be grouped at the right of the keyword. - If the sorting method (SORT_METHOD) is not specified, then the sorting is - ascendant. + ascendant (`ASC`). + +- Aggregate Functions: COUNT, MIN, MAX, AVG, SUM, GROUP_CONCAT + +Having +`````` + +The HAVING clause, as in SQL, has been originally introduced to restrict a query +according to value returned by an aggregate function, e.g.:: + + Any X GROUPBY X WHERE X relation Y HAVING COUNT(Y) > 10 + +It may however be used for something else... -- Aggregate Functions: COUNT, MIN, MAX, AVG, SUM +In the WHERE clause, we are limited to 3-expression, such thing can't be +expressed directly as in the SQL's way. But this can be expressed using HAVING +comparison expression. + +For instance, let's say you want to get people whose uppercased first name equals +to another person uppercased first name:: + + Person X WHERE X firstname XFN, Y firstname YFN HAVING X > Y, UPPER(XFN) = UPPER(YFN) +This open some new possibilities. Another example:: + + Person X WHERE X birthday XB HAVING YEAR(XB) = 2000 + +That lets you use transformation functions not only in selection but for +restriction as well and to by-pass limitation of the WHERE clause, which was the +major flaw in the RQL language. + +Notice that while we would like this to work without the HAVING clause, this +can't be currently be done because it introduces an ambiguity in RQL's grammar +that can't be handled by Yapps_, the parser's generator we're using. Negation ```````` -* A query such as `Document X WHERE NOT X owned_by U` means "the - documents have no relation `owned_by`". -* But the query `Document X WHERE NOT X owned_by U, U login "syt"` - means "the documents have no relation `owned_by` with the user - syt". They may have a relation "owned_by" with another user. +* A query such as `Document X WHERE NOT X owned_by U` means "the documents have + no relation `owned_by`". + +* But the query `Document X WHERE NOT X owned_by U, U login "syt"` means "the + documents have no relation `owned_by` with the user syt". They may have a + relation "owned_by" with another user. Identity ```````` @@ -170,9 +202,8 @@ Any A WHERE A comments B, A identity B -return all objects that comment themselves. The relation -`identity` is especially useful when defining the rules for securities -with `RQLExpressions`. +return all objects that comment themselves. The relation `identity` is +especially useful when defining the rules for securities with `RQLExpressions`. Limit / offset @@ -181,13 +212,6 @@ Any P ORDERBY N LIMIT 5 OFFSET 10 WHERE P is Person, P firstname N -Function calls -`````````````` -:: - - Any UPPER(N) WHERE P firstname N - -Functions on string: UPPER, LOWER Exists `````` @@ -199,8 +223,14 @@ OR EXISTS(T tags X, T name "priority") -Optional relations (Left outer join) -```````````````````````````````````` +Optional relations +`````````````````` + +It is a similar concept that the `Left outer join`_: + + the result of a left outer join (or simply left join) for table A and B + always contains all records of the "left" table (A), even if the + join-condition does not find any matching record in the "right" table (B). * They allow you to select entities related or not to another. @@ -218,12 +248,6 @@ Any T,P,V WHERE T is Ticket, T concerns P, T done_in V? -Having -`````` -:: - - Any X GROUPBY X WHERE X knows Y HAVING COUNT(Y) > 10 - Subqueries `````````` :: @@ -234,16 +258,29 @@ DISTINCT Any W, REF WITH W, REF BEING ( - (Any W, REF WHERE W is Workcase, W ref REF, + (Any W, REF WHERE W is Workcase, W ref REF, W concerned_by D, D name "Logilab") UNION (Any W, REF WHERE W is Workcase, W ref REF, ' W split_into WP, WP name "WP1") ) +Function calls +`````````````` +:: + + Any UPPER(N) WHERE P firstname N + Any LOWER(N) WHERE P firstname N + +Functions available on string: `UPPER`, `LOWER` + +.. XXX retrieve available function automatically + +For a performance issue, you can enrich the RQL dialect by RDMS (Relational database management system) functions. + Examples -```````` +~~~~~~~~ - *Search for the object of identifier 53* :: @@ -280,11 +317,11 @@ P is Person, (P interested_by T, T name 'training') OR (P city 'Paris') -- *The name and surname of all people* +- *The surname and firstname of all people* :: Any N, P WHERE - X is Person, X name N, X first_name P + X is Person, X name N, X firstname P Note that the selection of several entities generally force the use of "Any" because the type specification applies otherwise @@ -304,7 +341,7 @@ Insertion query -~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~ `INSERT` V1 (, V2) \ * `:` [ `WHERE` ] @@ -336,6 +373,7 @@ Update and relation creation queries ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + `SET` [ `WHERE` ] @@ -345,7 +383,7 @@ - *Renaming of the person named 'foo' to 'bar' with the first name changed* :: - SET X name 'bar', X first_name 'original' WHERE X is Person, X name 'foo' + SET X name 'bar', X firstname 'original' WHERE X is Person, X name 'foo' - *Insert a relation of type 'know' between objects linked by the relation of type 'friend'* @@ -356,6 +394,7 @@ Deletion query ~~~~~~~~~~~~~~ + `DELETE` ( V) | (V1 relation v2 ),... [ `WHERE` ] @@ -372,6 +411,7 @@ DELETE X friend Y WHERE X is Person, X name 'foo' + Virtual RQL relations ~~~~~~~~~~~~~~~~~~~~~ @@ -381,6 +421,13 @@ * `has_text`: relation to use to query the full text index (only for entities having fulltextindexed attributes). -* `identity`: relation to use to tell that a RQL variable should be +* `identity`: `Identity`_ relation to use to tell that a RQL variable should be the same as another (but you've to use two different rql variables for querying purpose) + +* `is`: relation to enforce possible types for a variable + + + +.. _Yapps: http://theory.stanford.edu/~amitp/yapps/ +.. _Left outer join: http://en.wikipedia.org/wiki/Join_(SQL)#Left_outer_join diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/conf.py --- a/doc/book/en/conf.py Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/conf.py Wed Nov 03 16:38:28 2010 +0100 @@ -32,8 +32,16 @@ # serve to show the default value. import sys, os +from os import path as osp -from cubicweb import __pkginfo__ as cw +path = __file__ +path = osp.dirname(path) #./doc/book/en +path = osp.dirname(path) #./doc/book/ +path = osp.dirname(path) #./doc/ +path = osp.dirname(path) #./ +path = osp.join(path,'__pkginfo__.py') #./__pkginfo__.py +cw = {} +execfile(path,{},cw) # If your extensions are in another directory, add it here. If the directory # is relative to the documentation root, use os.path.abspath to make it @@ -64,9 +72,9 @@ # other places throughout the built documents. # # The short X.Y version. -version = '.'.join(str(n) for n in cw.numversion[:2]) +version = '.'.join(str(n) for n in cw['numversion'][:2]) # The full version, including alpha/beta/rc tags. -release = cw.version +release = cw['version'] # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/devrepo/cubes/layout.rst --- a/doc/book/en/devrepo/cubes/layout.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/devrepo/cubes/layout.rst Wed Nov 03 16:38:28 2010 +0100 @@ -108,8 +108,24 @@ The :file:`__pkginfo__.py` file ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -It contains metadata describing your cube, mostly useful for -packaging. +It contains metadata describing your cube, mostly useful for packaging. + +Two important attributes of this module are __depends__ and __recommends__ +dictionaries that indicates what should be installed (and each version if +necessary) for the cube to work. + +Dependency on other cubes are expected to be of the form 'cubicweb-'. + +When an instance is created, dependencies are automatically installed, while +recommends are not. + +Recommends may be seen as a kind of 'weak dependency'. Eg, the most important +effect of recommending a cube is that, if cube A recommends cube B, the cube B +will be loaded before the cube A (same thing happend when A depends on B). + +Having this behaviour is sometime desired: on schema creation, you may rely on +something defined in the other's schema; on database creation, on something +created by the other's postcreate, and so on. :file:`migration/precreate.py` and :file:`migration/postcreate.py` diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/devrepo/datamodel/definition.rst --- a/doc/book/en/devrepo/datamodel/definition.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/devrepo/datamodel/definition.rst Wed Nov 03 16:38:28 2010 +0100 @@ -1,5 +1,7 @@ .. -*- coding: utf-8 -*- +.. _datamodel_definition: + Yams *schema* ------------- @@ -11,6 +13,8 @@ .. _`Yams`: http://www.logilab.org/project/yams +.. _datamodel_overview: + Overview ~~~~~~~~ @@ -78,10 +82,16 @@ a set of attributes and relations, and some permissions which define who can add, read, update or delete entities of this type. -The following built-in types are available: ``String``, ``Int``, -``Float``, ``Decimal``, ``Boolean``, ``Date``, ``Datetime``, ``Time``, -``Interval``, ``Byte`` and ``Password``. They can only be used as -attributes of an other entity type. +The following built-in types are available: ``String``, +``Int``, ``Float``, ``Decimal``, ``Boolean``, +``Date``, ``Datetime``, ``Time``, ``Interval``, ``Byte`` and +``Password``. They can only be used as attributes of an other entity +type. + +There is also a `RichString` kindof type: + + .. autoclass:: yams.buildobjs.RichString + You can find more base entity types in :ref:`pre_defined_entity_types`. @@ -293,36 +303,38 @@ For *CubicWeb* in particular: * we associate rights at the entities/relations schema level -* for each entity, we distinguish four kinds of permissions: `read`, - `add`, `update` and `delete` -* for each relation, we distinguish three kinds of permissions: `read`, - `add` and `delete` (it is not possible to `modify` a relation) + * the default groups are: `administrators`, `users` and `guests` -* by default, users belong to the `users` group -* there is a virtual group called `owners` to which we - can associate only `delete` and `update` permissions + +* users belong to the `users` group + +* there is a virtual group called `owners` to which we can associate only + `delete` and `update` permissions - * we can not add users to the `Owners` group, they are - implicitly added to it according to the context of the objects - they own - * the permissions of this group are only checked on `update`/`delete` - actions if all the other groups the user belongs to do not provide - those permissions + * we can not add users to the `owners` group, they are implicitly added to it + according to the context of the objects they own + + * the permissions of this group are only checked on `update`/`delete` actions + if all the other groups the user belongs to do not provide those permissions Setting permissions is done with the attribute `__permissions__` of entities and -relation types. The value of this attribute is a dictionary where the keys are the access types -(action), and the values are the authorized groups or expressions. +relation definition. The value of this attribute is a dictionary where the keys +are the access types (action), and the values are the authorized groups or +expressions. For an entity type, the possible actions are `read`, `add`, `update` and `delete`. -For a relation type, the possible actions are `read`, `add`, and `delete`. +For a relation, the possible actions are `read`, `add`, and `delete`. + +For an attribute, the possible actions are `read`, and `update`. For each access type, a tuple indicates the name of the authorized groups and/or one or multiple RQL expressions to satisfy to grant access. The access is provided if the user is in one of the listed groups or if one of the RQL condition is satisfied. + The standard user groups ```````````````````````` @@ -336,66 +348,77 @@ This can only be used for the actions `update` and `delete` of an entity type. -It is also possible to use specific groups if they are defined in the -precreate script of the cube (``migration/precreate.py``). Defining groups in -postcreate script or later makes them unavailable for security -purposes (in this case, an `sync_schema_props_perms` command has to -be issued in a CubicWeb shell). +It is also possible to use specific groups if they are defined in the precreate +script of the cube (``migration/precreate.py``). Defining groups in postcreate +script or later makes them unavailable for security purposes (in this case, an +`sync_schema_props_perms` command has to be issued in a CubicWeb shell). Use of RQL expression for write permissions ``````````````````````````````````````````` -It is possible to define RQL expression to provide update permission -(`add`, `delete` and `update`) on relation and entity types. -RQL expression for entity type permission: +It is possible to define RQL expression to provide update permission (`add`, +`delete` and `update`) on entity type / relation definitions. An rql expression +is a piece of query (corresponds to the WHERE statement of an RQL query), and the +expression will be considered as satisfied if it returns some results. They can +not be used in `read` permission. -* you have to use the class `ERQLExpression` +To use RQL expression in entity type permission: -* the used expression corresponds to the WHERE statement of an RQL query +* you have to use the class :class:`~cubicweb.schema.ERQLExpression` * in this expression, the variables `X` and `U` are pre-defined references - respectively on the current entity (on which the action is verified) and - on the user who send the request + respectively on the current entity (on which the action is verified) and on the + user who send the request + +For RQL expressions on a relation type, the principles are the same except for +the following: -* it is possible to use, in this expression, a special relation - "has__permission" where the subject is the user and the - object is any variable, meaning that the user needs to have - permission to execute the action on the entities related - to this variable +* you have to use the class :class:`~cubicweb.schema.RRQLExpression` instead of + :class:`~cubicweb.schema.ERQLExpression` -For RQL expressions on a relation type, the principles are the same except -for the following: +* in the expression, the variables `S`, `O` and `U` are pre-defined references to + respectively the subject and the object of the current relation (on which the + action is being verified) and the user who executed the query + +To define security for attributes of an entity (non-final relation), you have to +use the class :class:`~cubicweb.schema.ERQLExpression` in which `X` represents +the entity the attribute belongs to. -* you have to use the class `RRQLExpression` in the case of a non-final relation +It is possible to use in those expression a special relation +`has__permission` where the subject is the user (eg 'U') and the object +is any variable representing an entity (usually 'X' in +:class:`~cubicweb.schema.ERQLExpression`, 'S' or 'O' in +:class:`~cubicweb.schema.RRQLExpression`), meaning that the user needs to have +permission to execute the action on the entities represented by this +variable. It's recommanded to use this feature whenever possible since it +simplify greatly complex security definition and upgrade. -* in the expression, the variables `S`, `O` and `U` are pre-defined references - to respectively the subject and the object of the current relation (on - which the action is being verified) and the user who executed the query -* we can also define rights over attributes of an entity (non-final relation), - knowing that: +.. sourcecode:: python - - to define RQL expression, we have to use the class `ERQLExpression` - in which `X` represents the entity the attribute belongs to + class my_relation(RelationDefinition): + __permissions__ = {'read': ('managers', 'users'), + 'add': ('managers', RRQLExpression('U has_update_permission S')), + 'delete': ('managers', RRQLExpression('U has_update_permission S')) + } - - the permissions `add` and `delete` are equivalent. Only `add`/`read` - are actually taken in consideration. +In the above example, user will be allowed to add/delete `my_relation` if he has +the `update` permission on the subject of the relation. .. note:: - Potentially, the `use of an RQL expression to add an entity or a - relation` can cause problems for the user interface, because if the - expression uses the entity or the relation to create, then we are - not able to verify the permissions before we actually add the entity - (please note that this is not a problem for the RQL server at all, - because the permissions checks are done after the creation). In such - case, the permission check methods (CubicWebEntitySchema.check_perm - and has_perm) can indicate that the user is not allowed to create - this entity but can obtain the permission. To compensate this - problem, it is usually necessary, for such case, to use an action - that reflects the schema permissions but which enables to check - properly the permissions so that it would show up if necessary. + Potentially, the `use of an RQL expression to add an entity or a relation` can + cause problems for the user interface, because if the expression uses the + entity or the relation to create, we are not able to verify the permissions + before we actually added the entity (please note that this is not a problem for + the RQL server at all, because the permissions checks are done after the + creation). In such case, the permission check methods + (CubicWebEntitySchema.check_perm and has_perm) can indicate that the user is + not allowed to create this entity while it would obtain the permission. To + compensate this problem, it is usually necessary in such case to use an action + that reflects the schema permissions but which check properly the permissions + so that it would show up only if possible. Use of RQL expression for reading rights @@ -403,12 +426,54 @@ The principles are the same but with the following restrictions: -* we can not use `RRQLExpression` on relation types for reading +* you can not use rql expression for the `read` permission of relations and + attributes, -* special relations "has__permission" can not be used +* you can not use special `has__permission` relation in the rql + expression. +Important notes about write permissions checking +```````````````````````````````````````````````` +Write permissions (e.g. 'add', 'update', 'delete') are checked in core hooks. + +When a permission is checked slightly vary according to if it's an entity or +relation, and if the relation is an attribute relation or not). It's important to +understand that since according to when a permission is checked, values returned +by rql expressions may changes, hence the permission being granted or not. + +Here are the current rules: + +1. permission to add/update entity and its attributes are checked: + + - on commit if the entity has been added + + - in an 'after_update_entity' hook if the entity has been updated. If it fails + at this time, it will be retried on commit (hence you get the permission if + you have it just after the modification or *at* commit time) + +2. permission to delete an entity is checked in 'before_delete_entity' hook + +3. permission to add a relation is checked either: + + - in 'before_add_relation' hook if the relation type is in the + `BEFORE_ADD_RELATIONS` set + + - else at commit time if the relation type is in the `ON_COMMIT_ADD_RELATIONS` + set + + - else in 'after_add_relation' hook (the default) + +4. permission to delete a relation is checked in 'before_delete_relation' hook + +Last but not least, remember queries issued from hooks and operation are by +default 'unsafe', eg there are no read or write security checks. + +See :mod:`cubicweb.hooks.security` for more details. + + +.. _yams_example: Defining your schema using yams ------------------------------- @@ -459,6 +524,8 @@ birth and a relation that connects a `Person` to another entity of type `Company` through the semantic `works_for`. + + :Naming convention: Entity class names must start with an uppercase letter. The common @@ -494,15 +561,15 @@ means that you need two separate entities that implement the `ITree` interface and get the result from `.children()` which ever entity is concerned. -Inheritance -``````````` -XXX feed me +.. Inheritance +.. ``````````` +.. XXX feed me Definition of relations ~~~~~~~~~~~~~~~~~~~~~~~ -XXX add note about defining relation type / definition +.. XXX add note about defining relation type / definition A relation is defined by a Python class heriting `RelationType`. The name of the class corresponds to the name of the type. The class then contains @@ -524,12 +591,7 @@ * a string corresponding to an entity type * a tuple of string corresponding to multiple entity types -* special string such as follows: - - - "**": all types of entities - - "*": all types of non-meta entities - - "@": all types of meta entities but not system entities (e.g. used for - the basic schema description) +* the '*' special string, meaning all types of entities When a relation is not inlined and not symmetrical, and it does not require specific permissions, it can be defined using a `SubjectRelation` @@ -546,7 +608,7 @@ :Historical note: It has been historically possible to use `ObjectRelation` which - defines a relation in the opposite direction. This feature is soon to be + defines a relation in the opposite direction. This feature is deprecated and therefore should not be used in newly written code. :Future deprecation note: diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/devrepo/devcore/dbapi.rst --- a/doc/book/en/devrepo/devcore/dbapi.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/devrepo/devcore/dbapi.rst Wed Nov 03 16:38:28 2010 +0100 @@ -22,10 +22,14 @@ .. note:: - While executing update queries (SET, INSERT, DELETE), if a query generates - an error related to security, a rollback is automatically done on the current + If a query generates an error related to security (:exc:`Unauthorized`) or to + integrity (:exc:`ValidationError`), the transaction can still continue but you + won't be able to commit it, a rollback will be necessary to start a new transaction. + Also, a rollback is automatically done if an error occurs during commit. + + Executing RQL queries from a view or a hook ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/devrepo/entityclasses/adapters.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/book/en/devrepo/entityclasses/adapters.rst Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,173 @@ +.. _adapters: + +Interfaces and Adapters +----------------------- + +Interfaces are the same thing as object-oriented programming `interfaces`_. +Adapter refers to a well-known `adapter`_ design pattern that helps separating +concerns in object oriented applications. + +.. _`interfaces`: http://java.sun.com/docs/books/tutorial/java/concepts/interface.html +.. _`adapter`: http://en.wikipedia.org/wiki/Adapter_pattern + +In |cubicweb| adapters provide logical functionalities to entity types. They +are introduced in version `3.9`. Before that one had to implement Interfaces in +entity classes to achieve a similar goal. However, the problem with this +approach is that is clutters the entity class's namespace, exposing name +collision risks with schema attributes/relations or even methods names +(different interfaces may define the same method with not necessarily the same +behaviour expected). + +Definition of an adapter is quite trivial. An excerpt from cubicweb +itself (found in :mod:`cubicweb.entities.adapters`): + +.. sourcecode:: python + + + class ITreeAdapter(EntityAdapter): + """This adapter has to be overriden to be configured using the + tree_relation, child_role and parent_role class attributes to + benefit from this default implementation + """ + __regid__ = 'ITree' + + child_role = 'subject' + parent_role = 'object' + + def children_rql(self): + """returns RQL to get children """ + return self.entity.cw_related_rql(self.tree_relation, self.parent_role) + +The adapter object has ``self.entity`` attribute which represents the +entity being adapted. + +.. Note:: + + Adapters came with the notion of service identified by the registry identifier + of an adapters, hence dropping the need for explicit interface and the + :class:`cubicweb.selectors.implements` selector. You should instead use + :class:`cubicweb.selectors.is_instance` when you want to select on an entity + type, or :class:`cubicweb.selectors.adaptable` when you want to select on a + service. + + +Specializing and binding an adapter +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. sourcecode:: python + + from cubicweb.entities.adapters import ITreeAdapter + + class MyEntityITreeAdapter(ITreeAdapter): + __select__ = is_instance('MyEntity') + tree_relation = 'filed_under' + +The ITreeAdapter here provides a default implementation. The +tree_relation class attribute is actually used by this implementation +to help implement correct behaviour. + +Here we provide a specific implementation which will be bound for +``MyEntity`` entity type (the `adaptee`). + + +.. _interfaces_to_adapters: + +Converting code from Interfaces/Mixins to Adapters +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Here we go with a small example. Before: + +.. sourcecode:: python + + from cubicweb.selectors import implements + from cubicweb.interfaces import ITree + from cubicweb.mixins import ITreeMixIn + + class MyEntity(ITreeMixIn, AnyEntity): + __implements__ = AnyEntity.__implements__ + (ITree,) + + + class ITreeView(EntityView): + __select__ = implements('ITree') + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + children = entity.children() + +After: + +.. sourcecode:: python + + from cubicweb.selectors import adaptable, is_instance + from cubicweb.entities.adapters import ITreeAdapter + + class MyEntityITreeAdapter(ITreeAdapter): + __select__ = is_instance('MyEntity') + + class ITreeView(EntityView): + __select__ = adaptable('ITree') + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + itree = entity.cw_adapt_to('ITree') + children = itree.children() + +As we can see, the interface/mixin duality disappears and the entity +class itself is completely freed from these concerns. When you want +to use the ITree interface of an entity, call its `cw_adapt_to` method +to get an adapter for this interface, then access to members of the +interface on the adapter + +Let's look at an example where we defined everything ourselves. We +start from: + +.. sourcecode:: python + + class IFoo(Interface): + def bar(self, *args): + raise NotImplementedError + + class MyEntity(AnyEntity): + __regid__ = 'MyEntity' + __implements__ = AnyEntity.__implements__ + (IFoo,) + + def bar(self, *args): + return sum(captain.age for captain in self.captains) + + class FooView(EntityView): + __regid__ = 'mycube.fooview' + __select__ = implements('IFoo') + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + self.w('bar: %s' % entity.bar()) + +Converting to: + +.. sourcecode:: python + + class IFooAdapter(EntityAdapter): + __regid__ = 'IFoo' + __select__ = is_instance('MyEntity') + + def bar(self, *args): + return sum(captain.age for captain in self.entity.captains) + + class FooView(EntityView): + __regid__ = 'mycube.fooview' + __select__ = adaptable('IFoo') + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + self.w('bar: %s' % entity.cw_adapt_to('IFoo').bar()) + +.. note:: + + When migrating an entity method to an adapter, the code can be moved as is + except for the `self` of the entity class, which in the adapter must become `self.entity`. + +Adapters defined in the library +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. automodule:: cubicweb.entities.adapters + :members: + +More are defined in web/views. diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/devrepo/entityclasses/application-logic.rst --- a/doc/book/en/devrepo/entityclasses/application-logic.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/devrepo/entityclasses/application-logic.rst Wed Nov 03 16:38:28 2010 +0100 @@ -1,5 +1,5 @@ -How to use entities objects ---------------------------- +How to use entities objects and adapters +---------------------------------------- The previous chapters detailed the classes and methods available to the developper at the so-called `ORM`_ level. However they say little @@ -7,9 +7,9 @@ .. _`ORM`: http://en.wikipedia.org/wiki/Object-relational_mapping -Entities objects are used in the repository and web sides of -CubicWeb. On the repository side of things, one should manipulate them -in Hooks and Operations. +Entities objects (and their adapters) are used in the repository and +web sides of CubicWeb. On the repository side of things, one should +manipulate them in Hooks and Operations. Hooks and Operations provide support for the implementation of rules such as computed attributes, coherency invariants, etc (they play the @@ -32,21 +32,22 @@ wire. There is no way state can be shared between these processes (there is a specific API for that). Hence, it is not possible to use entity objects as messengers between these components of an -application. It means that an attribute set as in `obj.x = 42`, +application. It means that an attribute set as in ``obj.x = 42``, whether or not x is actually an entity schema attribute, has a short life span, limited to the hook, operation or view within which the object was built. Setting an attribute or relation value can be done in the context of a -Hook/Operation, using the obj.set_attributes(x=42) notation or a plain +Hook/Operation, using the obj.set_relations(x=42) notation or a plain RQL SET expression. In views, it would be preferable to encapsulate the necessary logic in -a method of the concerned entity class(es). But of course, this advice -is also reasonnable for Hooks/Operations, though the separation of -concerns here is less stringent than in the case of views. +a method of an adapter for the concerned entity class(es). But of +course, this advice is also reasonnable for Hooks/Operations, though +the separation of concerns here is less stringent than in the case of +views. -This leads to the practical role of entity objects: it's where an +This leads to the practical role of objects adapters: it's where an important part of the application logic lie (the other part being located in the Hook/Operations). @@ -58,26 +59,31 @@ .. sourcecode:: python - class Project(TreeMixIn, AnyEntity): + from cubicweb.entities.adapters import ITreeAdapter + + class ProjectAdapter(ITreeAdapter): + __select__ = is_instance('Project') + tree_relation = 'subproject_of' + + class Project(AnyEntity): __regid__ = 'Project' - __implements__ = AnyEntity.__implements__ + (ITree,) fetch_attrs, fetch_order = fetch_config(('name', 'description', 'description_format', 'summary')) TICKET_DEFAULT_STATE_RESTR = 'S name IN ("created","identified","released","scheduled")' - tree_attribute = 'subproject_of' - parent_target = 'subject' - children_target = 'object' - def dc_title(self): return self.name -First we see that it uses an ITree interface and the TreeMixIn default -implementation. The attributes `tree_attribute`, `parent_target` and -`children_target` are used by the TreeMixIn code. This is typically -used in views concerned with the representation of tree-like -structures (CubicWeb provides several such views). +The fact that the `Project` entity type implements an ``ITree`` +interface is materialized by the ``ProjectAdapter`` class (inheriting +the pre-defined ``ITreeAdapter`` whose __regid__ is of course +``ITree``), which will be selected on `Project` entity types because +of its selector. On this adapter, we redefine the ``tree_relation`` +attribute of the ITreeAdapter class. + +This is typically used in views concerned with the representation of +tree-like structures (CubicWeb provides several such views). It is important that the views themselves try not to implement this logic, not only because such views would be hardly applyable to other @@ -89,7 +95,17 @@ about the transitive closure of the child relation). This is a further argument to implement it at entity class level. -The `dc_title` method provides a (unicode string) value likely to be +The fetch_attrs, fetch_order class attributes are parameters of the +`ORM`_ layer. They tell which attributes should be loaded at once on +entity object instantiation (by default, only the eid is known, other +attributes are loaded on demand), and which attribute is to be used to +order the .related() and .unrelated() methods output. + +We can observe the big TICKET_DEFAULT_STATE_RESTR is a pure +application domain piece of data. There is, of course, no limitation +to the amount of class attributes of this kind. + +The ``dc_title`` method provides a (unicode string) value likely to be consummed by views, but note that here we do not care about output encodings. We care about providing data in the most universal format possible, because the data could be used by a web view (which would be @@ -97,17 +113,14 @@ oriented output (which would have the necessary context about the needed byte stream encoding). -The fetch_attrs, fetch_order class attributes are parameters of the -`ORM`_ layer. They tell which attributes should be loaded at once on -entity object instantiation (by default, only the eid is known, other -attributes are loaded on demand), and which attribute is to be used to -order the .related() and .unrelated() methods output. +.. note:: -Finally, we can observe the big TICKET_DEFAULT_STATE_RESTR is a pure -application domain piece of data. There is, of course, no limitation -to the amount of class attributes of this kind. + The dublin code `dc_xxx` methods are not moved to an adapter as they + are extremely prevalent in cubicweb and assorted cubes and should be + available for all entity types. -Let us now dig into more substantial pieces of code. +Let us now dig into more substantial pieces of code, continuing the +Project class. .. sourcecode:: python @@ -151,7 +164,7 @@ * it is NOT concerned with database coherency (this is the realm of Hooks/Operations); in other words, it assumes a coherent world -* it is NOT concerned with end-user interfaces +* it is NOT (directly) concerned with end-user interfaces * however it can be used in both contexts diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/devrepo/entityclasses/data-as-objects.rst --- a/doc/book/en/devrepo/entityclasses/data-as-objects.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/devrepo/entityclasses/data-as-objects.rst Wed Nov 03 16:38:28 2010 +0100 @@ -4,23 +4,22 @@ Python-level access to persistent data is provided by the :class:`Entity ` class. -An entity class is bound to a schema entity type. Descriptors are added when +.. XXX this part is not clear. refactor it. + +An entity class is bound to a schema entity type. Descriptors are added when classes are registered in order to initialize the class according to its schema: -* we can access the defined attributes in the schema thanks to the attributes of - the same name on instances (typed value) +* the attributes defined in the schema appear as attributes of these classes -* we can access the defined relations in the schema thanks to the relations of - the same name on instances (entities instances list) - +* the relations defined in the schema appear as attributes of these classes, + but are lists of instances `Formatting and output generation`: * `view(__vid, __registry='views', **kwargs)`, applies the given view to the entity (and returns an unicode string) -* `absolute_url(*args, **kwargs)`, returns an absolute URL to access the primary view - of an entity +* `absolute_url(*args, **kwargs)`, returns an absolute URL including the base-url * `rest_path()`, returns a relative REST URL to get the entity @@ -31,7 +30,7 @@ `Data handling`: * `as_rset()`, converts the entity into an equivalent result set simulating the - request `Any X WHERE X eid _eid_` + request `Any X WHERE X eid _eid_` * `complete(skip_bytes=True)`, executes a request that recovers at once all the missing attributes of an entity @@ -52,10 +51,10 @@ values given named parameters * `set_relations(**kwargs)`, add relations to the given object. To - set a relation where this entity is the object of the relation, - use `reverse_` as argument name. Values may be an - entity, a list of entities, or None (meaning that all relations of - the given type from or to this object should be deleted). + set a relation where this entity is the object of the relation, + use `reverse_` as argument name. Values may be an + entity, a list of entities, or None (meaning that all relations of + the given type from or to this object should be deleted). * `copy_relations(ceid)`, copies the relations of the entities having the eid given in the parameters on the current entity @@ -66,7 +65,7 @@ The :class:`AnyEntity` class ---------------------------- -To provide a specific behavior for each entity, we have to define a class +To provide a specific behavior for each entity, we can define a class inheriting from `cubicweb.entities.AnyEntity`. In general, we define this class in `mycube.entities` module (or in a submodule if we want to split code among multiple files) so that it will be available on both server and client side. @@ -111,7 +110,7 @@ `Misc methods`: * `after_deletion_path`, return (path, parameters) which should be - used as redirect information when this entity is being deleted + used as redirect information when this entity is being deleted * `pre_web_edit`, callback called by the web editcontroller when an entity will be created/modified, to let a chance to do some entity @@ -139,5 +138,18 @@ one in OTHER_CUBE. These types are stored in the `etype` section of the `vregistry`. -Notice this is different than yams schema inheritance. +Notice this is different than yams schema inheritance, which is an +experimental undocumented feature. + + +Application logic +----------------- +While a lot of custom behaviour and application logic can be +implemented using entity classes, the programmer must be aware that +adding new attributes and method on an entity class adds may shadow +schema-level attribute or relation definitions. + +To keep entities clean (mostly data structures plus a few universal +methods such as listed above), one should use `adapters` (see +:ref:`adapters`). diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/devrepo/entityclasses/index.rst --- a/doc/book/en/devrepo/entityclasses/index.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/devrepo/entityclasses/index.rst Wed Nov 03 16:38:28 2010 +0100 @@ -9,5 +9,5 @@ data-as-objects load-sort - interfaces + adapters application-logic diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/devrepo/entityclasses/interfaces.rst --- a/doc/book/en/devrepo/entityclasses/interfaces.rst Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,65 +0,0 @@ -Interfaces ----------- - -This is the same thing as object-oriented programming `interfaces`_. - -.. _`interfaces`: http://java.sun.com/docs/books/tutorial/java/concepts/interface.html - -Definition of an interface is quite trivial. An example from cubicweb -itself (found in cubicweb/interfaces.py): - -.. sourcecode:: python - - class ITree(Interface): - - def parent(self): - """returns the parent entity""" - - def children(self): - """returns the item's children""" - - def children_rql(self): - """returns RQL to get children""" - - def iterchildren(self): - """iterates over the item's children""" - - def is_leaf(self): - """returns true if this node as no child""" - - def is_root(self): - """returns true if this node has no parent""" - - def root(self): - """returns the root object""" - - -Declaration of interfaces implemented by a class -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -.. sourcecode:: python - - from cubicweb.interfaces import ITree - from cubicweb.mixins import TreeMixIn - - class MyEntity(TreeMixIn, AnyEntity): - __regid__ = 'MyEntity' - __implements__ = AnyEntity.__implements__ + ('ITree',) - - tree_attribute = 'filed_under' - -The TreeMixIn here provides a default implementation for the -interface. The tree_attribute class attribute is actually used by this -implementation to help implement correct behaviour. - -Interfaces (and some implementations as mixins) defined in the library -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -.. automodule:: cubicweb.interfaces - :members: - -.. automodule:: cubicweb.mixins - :members: - - - diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/devrepo/migration.rst --- a/doc/book/en/devrepo/migration.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/devrepo/migration.rst Wed Nov 03 16:38:28 2010 +0100 @@ -91,6 +91,24 @@ * `session`, repository session object +New cube dependencies +--------------------- + +If your code depends on some new cubes, you have to add them in a migration +script by using: + +* `add_cube(cube, update_database=True)`, add a cube. +* `add_cubes(cubes, update_database=True)`, add a list of cubes. + +The `update_database` parameter is telling if the database schema +should be updated or if only the relevant persistent property should be +inserted (for the case where a new cube has been extracted from an +existing one, so the new cube schema is actually already in there). + +If some of the added cubes are already used by an instance, they'll simply be +silently skipped. + + Schema migration ---------------- The following functions for schema migration are available in `repository` @@ -182,6 +200,40 @@ * `option_removed(oldname, newname)`, indicates that an option has been deleted. +The `config` variable is an object which can be used to access the +configuration values, for reading and updating, with a dictionary-like +syntax. + +Example 1: migration script changing the variable 'sender-addr' in +all-in-one.conf. The script also checks that in that the instance is +configured with a known value for that variable, and only updates the +value in that case. + +.. sourcecode:: python + + wrong_addr = 'cubicweb@loiglab.fr' # known wrong address + fixed_addr = 'cubicweb@logilab.fr' + configured_addr = config.get('sender-addr') + # check that the address has not been hand fixed by a sysadmin + if configured_addr == wrong_addr: + config['sender-addr'] = fixed-addr + config.save() + +Example 2: checking the value of the database backend driver, which +can be useful in case you need to issue backend-dependent raw SQL +queries in a migration script. + +.. sourcecode:: python + + dbdriver = config.sources()['system']['db-driver'] + if dbdriver == "sqlserver2005": + # this is now correctly handled by CW :-) + sql('ALTER TABLE cw_Xxxx ALTER COLUMN cw_name varchar(64) NOT NULL;') + commit() + else: # postgresql + sync_schema_props_perms(ertype=('Xxxx', 'name', 'String'), + syncperms=False) + Others migration functions -------------------------- diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/devrepo/profiling.rst --- a/doc/book/en/devrepo/profiling.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/devrepo/profiling.rst Wed Nov 03 16:38:28 2010 +0100 @@ -1,3 +1,5 @@ +.. _PROFILING: + Profiling and performance ========================= diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/devrepo/repo/hooks.rst --- a/doc/book/en/devrepo/repo/hooks.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/devrepo/repo/hooks.rst Wed Nov 03 16:38:28 2010 +0100 @@ -58,6 +58,10 @@ implementing `precommit_event` and other standard methods (wholly described in :ref:`operations_api`). +.. hint:: + + It is a good practice, to write unit tests for each hook. See an example in :ref:`hook_test` + Events ------ @@ -157,13 +161,13 @@ .. sourcecode:: python from cubicweb import ValidationError - from cubicweb.selectors import implements + from cubicweb.selectors import is_instance from cubicweb.server.hook import Hook class PersonAgeRange(Hook): __regid__ = 'person_age_range' events = ('before_add_entity', 'before_update_entity') - __select__ = Hook.__select__ & implements('Person') + __select__ = Hook.__select__ & is_instance('Person') def __call__(self): if 0 >= self.entity.age <= 120: @@ -173,7 +177,7 @@ Hooks being AppObjects like views, they have a __regid__ and a __select__ class attribute. The base __select__ is augmented with an -`implements` selector matching the desired entity type. The `events` +`is_instance` selector matching the desired entity type. The `events` tuple is used by the Hook.__select__ base selector to dispatch the hook on the right events. In an entity hook, it is possible to dispatch on any entity event (e.g. 'before_add_entity', @@ -241,6 +245,8 @@ .. sourcecode:: python + from cubicweb.server.hook import Hook, Operation, match_rtype + def check_cycle(self, session, eid, rtype, role='subject'): parents = set([eid]) parent = session.entity_from_eid(eid) @@ -300,7 +306,7 @@ class CheckSubsidiaryCycleOp(Operation): def precommit_event(self): - for eid in self._cw.transaction_data['subsidiary_cycle_detection']: + for eid in self.session.transaction_data['subsidiary_cycle_detection']: check_cycle(self.session, eid, self.rtype) Here, we call set_operation with a session object, a specially forged diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/devrepo/repo/sessions.rst --- a/doc/book/en/devrepo/repo/sessions.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/devrepo/repo/sessions.rst Wed Nov 03 16:38:28 2010 +0100 @@ -3,15 +3,16 @@ Sessions ======== -There are three kinds of sessions. - -* `user sessions` are the most common: they are related to users and - carry security checks coming with user credentials +Sessions are object carrying the `.execute` method to query the data +sources. -* `super sessions` are children of ordinary user sessions and allow to - bypass security checks (they are created by calling unsafe_execute - on a user session); this is often convenient in hooks which may - touch data that is not directly updatable by users +Kinds of sessions +----------------- + +There are two kinds of sessions. + +* `normal sessions` are the most common: they are related to users and + carry security checks coming with user credentials * `internal sessions` have all the powers; they are also used in only a few situations where you don't already have an adequate session at @@ -20,8 +21,181 @@ .. note:: Do not confuse the session type with their connection mode, for - instance : 'in memory' or 'pyro'. + instance : `in memory` or `pyro`. + +Normal sessions are typically named `_cw` in most appobjects or +sometimes just `session`. + +Internal sessions are available from the `Repository` object and are +to be used like this: + +.. sourcecode:: python + + session = self.repo.internal_session() + try: + do_stuff_with(session) + finally: + session.close() + +.. warning:: + Do not forget to close such a session after use for a session leak + will quickly lead to an application crash. + +Authentication and management of sessions +----------------------------------------- + +The authentication process is a ballet involving a few dancers: + +* through its `connect` method the top-level application object (the + `CubicWebPublisher`) will open a session whenever a web request + comes in; it asks the `session manager` to open a session (giving + the web request object as context) using `open_session` + + * the session manager asks its authentication manager (which is a + `component`) to authenticate the request (using `authenticate`) + + * the authentication manager asks, in order, to its authentication + information retrievers, a login and an opaque object containing + other credentials elements (calling `authentication_information`), + giving the request object each time + + * the default retriever (bizarrely named + `LoginPaswordRetreiver`) will in turn defer login and password + fetching to the request object (which, depending on the + authentication mode (`cookie` or `http`), will do the + appropriate things and return a login and a password) + + * the authentication manager, on success, asks the `Repository` + object to connect with the found credentials (using `connect`) + + * the repository object asks authentication to all of its + sources which support the `CWUser` entity with the given + credentials; when successful it can build the cwuser entity, + from which a regular `Session` object is made; it returns the + session id + + * the source in turn will defer work to an authentifier class + that define the ultimate `authenticate` method (for instance + the native source will query the database against the + provided credentials) + + * the authentication manager, on success, will call back _all_ + retrievers with `authenticated` and return its authentication + data (on failure, it will try the anonymous login or, if the + configuration forbids it, raise an `AuthenticationError`) + +Writing authentication plugins +------------------------------ + +Sometimes CubicWeb's out-of-the-box authentication schemes (cookie and +http) are not sufficient. Nowadays there is a plethore of such schemes +and the framework cannot provide them all, but as the sequence above +shows, it is extensible. + +Two levels have to be considered when writing an authentication +plugin: the web client and the repository. + +We invented a scenario where it makes sense to have a new plugin in +each side: some middleware will do pre-authentication and under the +right circumstances add a new HTTP `x-foo-user` header to the query +before it reaches the CubicWeb instance. For a concrete example of +this, see the `apachekerberos`_ cube. + +.. _`apachekerberos`: http://www.cubicweb.org/project/cubicweb-apachekerberos + +Repository authentication plugins +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +On the repository side, it is possible to register a source +authentifier using the following kind of code: -[WRITE ME] +.. sourcecode:: python + + from cubicweb.server.sources import native + + class FooAuthentifier(native.LoginPasswordAuthentifier): + """ a source authentifier plugin + if 'foo' in authentication information, no need to check + password + """ + auth_rql = 'Any X WHERE X is CWUser, X login %(login)s' + + def authenticate(self, session, login, **kwargs): + """return CWUser eid for the given login + if this account is defined in this source, + else raise `AuthenticationError` + """ + session.debug('authentication by %s', self.__class__.__name__) + if 'foo' not in kwargs: + return super(FooAuthentifier, self).authenticate(session, login, **kwargs) + try: + rset = session.execute(self.auth_rql, {'login': login}) + return rset[0][0] + except Exception, exc: + session.debug('authentication failure (%s)', exc) + raise AuthenticationError('foo user is unknown to us') + +Since repository authentifiers are not appobjects, we have to register +them through a `server_startup` hook. + +.. sourcecode:: python + + class ServerStartupHook(hook.Hook): + """ register the foo authenticator """ + __regid__ = 'fooauthenticatorregisterer' + events = ('server_startup',) + + def __call__(self): + self.debug('registering foo authentifier') + self.repo.system_source.add_authentifier(FooAuthentifier()) + +Web authentication plugins +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. sourcecode:: python -* authentication and management of sessions + class XFooUserRetriever(authentication.LoginPasswordRetreiver): + """ authenticate by the x-foo-user http header + or just do normal login/password authentication + """ + __regid__ = 'x-foo-user' + order = 0 + + def authentication_information(self, req): + """retrieve authentication information from the given request, raise + NoAuthInfo if expected information is not found + """ + self.debug('web authenticator building auth info') + try: + login = req.get_header('x-foo-user') + if login: + return login, {'foo': True} + else: + return super(XFooUserRetriever, self).authentication_information(self, req) + except Exception, exc: + self.debug('web authenticator failed (%s)', exc) + raise authentication.NoAuthInfo() + + def authenticated(self, retriever, req, cnx, login, authinfo): + """callback when return authentication information have opened a + repository connection successfully. Take care req has no session + attached yet, hence req.execute isn't available. + + Here we set a flag on the request to indicate that the user is + foo-authenticated. Can be used by a selector + """ + self.debug('web authenticator running post authentication callback') + cnx.foo_user = authinfo.get('foo') + +In the `authenticated` method we add (in an admitedly slightly hackish +way) an attribute to the connection object. This, in turn, can be used +to build a selector dispatching on the fact that the user was +preauthenticated or not. + +.. sourcecode:: python + + @objectify_selector + def foo_authenticated(cls, req, rset=None, **kwargs): + if hasattr(req.cnx, 'foo_user') and req.foo_user: + return 1 + return 0 diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/devrepo/testing.rst --- a/doc/book/en/devrepo/testing.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/devrepo/testing.rst Wed Nov 03 16:38:28 2010 +0100 @@ -6,24 +6,24 @@ Unit tests ---------- -The *CubicWeb* framework provides the `CubicWebTC` test base class in -the module `cubicweb.devtools.testlib`. +The *CubicWeb* framework provides the +:class:`cubicweb.devtools.testlib.CubicWebTC` test base class . Tests shall be put into the mycube/test directory. Additional test data shall go into mycube/test/data. -It is much advised to write tests concerning entities methods, hooks -and operations, security. The CubicWebTC base class has convenience -methods to help test all of this. - -.. note:: +It is much advised to write tests concerning entities methods, +actions, hooks and operations, security. The +:class:`~cubicweb.devtools.testlib.CubicWebTC` base class has +convenience methods to help test all of this. - In the realm of views, there is not much to do but check that the - views are valid XHTML. See :ref:`automatic_views_tests` for - details. Integration of CubicWeb tests with UI testing tools such as - `selenium`_ are currently under invesitgation. +In the realm of views, automatic tests check that views are valid +XHTML. See :ref:`automatic_views_tests` for details. Since 3.9, bases +for web functional testing using `windmill +`_ are set. See test cases in +cubicweb/web/test/windmill and python wrapper in +cubicweb/web/test_windmill/ if you want to use this in your own cube. -.. _selenium: http://seleniumhq.org/projects/ide/ Most unit tests need a live database to work against. This is achieved by CubicWeb using automatically sqlite (bundled with Python, see @@ -42,6 +42,8 @@ `sync_schema_props_perms()` fonction of the migration environment need not a database regeneration step. +.. _hook_test: + Unit test by example ```````````````````` @@ -77,13 +79,29 @@ self.kw1.set_relations(subkeyword_of=kw3) self.assertRaises(ValidationError, self.commit) -The test class defines a `setup_database` method which populates the +The test class defines a :meth:`setup_database` method which populates the database with initial data. Each test of the class runs with this -pre-populated database. +pre-populated database. A commit is done automatically after the +:meth:`setup_database` call. You don't have to call it explicitely. The test case itself checks that an Operation does it job of preventing cycles amongst Keyword entities. +`create_entity` is a useful method, which easily allows to create an +entity. You can link this entity to others entities, by specifying as +argument, the relation name, and the entity to link, as value. In the +above example, the `Classification` entity is linked to a `CWEtype` +via the relation `classifies`. Conversely, if you are creating a +`CWEtype` entity, you can link it to a `Classification` entity, by +adding `reverse_classifies` as argument. + +.. note:: + + :meth:`commit` method is not called automatically in test_XXX + methods. You have to call it explicitely if needed (notably to test + operations). It is a good practice to call :meth:`clear_all_caches` + on entities after a commit to avoid request cache effects. + You can see an example of security tests in the :ref:`adv_tuto_security`. @@ -145,11 +163,11 @@ connection from another ! Email notifications tests -------------------------- +````````````````````````` When running tests potentially generated e-mails are not really sent but is found in the list `MAILBOX` of module -`cubicweb.devtools.testlib`. +:mod:`cubicweb.devtools.testlib`. You can test your notifications by analyzing the contents of this list, which contains objects with two attributes: @@ -184,15 +202,70 @@ mail = MAILBOX[1] self.assertEquals(mail.subject, '[data] yes') +Visible actions tests +````````````````````` + +It is easy to write unit tests to test actions which are visible to +user or to a category of users. Let's take an example in the +`conference cube`_. + +.. _`conference cube`: http://www.cubicweb.org/project/cubicweb-conference +.. sourcecode:: python + + class ConferenceActionsTC(CubicWebTC): + + def setup_database(self): + self.conf = self.create_entity('Conference', + title=u'my conf', + url_id=u'conf', + start_on=date(2010, 1, 27), + end_on = date(2010, 1, 29), + call_open=True, + reverse_is_chair_at=chair, + reverse_is_reviewer_at=reviewer) + + def test_admin(self): + req = self.request() + rset = req.execute('Any C WHERE C is Conference') + self.assertListEquals(self.pactions(req, rset), + [('workflow', workflow.WorkflowActions), + ('edit', confactions.ModifyAction), + ('managepermission', actions.ManagePermissionsAction), + ('addrelated', actions.AddRelatedActions), + ('delete', actions.DeleteAction), + ('generate_badge_action', badges.GenerateBadgeAction), + ('addtalkinconf', confactions.AddTalkInConferenceAction) + ]) + self.assertListEquals(self.action_submenu(req, rset, 'addrelated'), + [(u'add Track in_conf Conference object', + u'http://testing.fr/cubicweb/add/Track' + u'?__linkto=in_conf%%3A%(conf)s%%3Asubject&' + u'__redirectpath=conference%%2Fconf&' + u'__redirectvid=' % {'conf': self.conf.eid}), + ]) + +You just have to execute a rql query corresponding to the view you want to test, +and to compare the result of +:meth:`~cubicweb.devtools.testlib.CubicWebTC.pactions` with the list of actions +that must be visible in the interface. This is a list of tuples. The first +element is the action's `__regid__`, the second the action's class. + +To test actions in submenu, you just have to test the result of +:meth:`~cubicweb.devtools.testlib.CubicWebTC.action_submenu` method. The last +parameter of the method is the action's category. The result is a list of +tuples. The first element is the action's title, and the second element the +action's url. + + .. _automatic_views_tests: Automatic views testing ----------------------- -This is done automatically with the AutomaticWebTest class. At cube -creation time, the mycube/test/test_mycube.py file contains such a -test. The code here has to be uncommented to be usable, without -further modification. +This is done automatically with the :class:`cubicweb.devtools.testlib.AutomaticWebTest` +class. At cube creation time, the mycube/test/test_mycube.py file +contains such a test. The code here has to be uncommented to be +usable, without further modification. The ``auto_populate`` method uses a smart algorithm to create pseudo-random data in the database, thus enabling the views to be @@ -212,6 +285,61 @@ auto_populate cannot guess by itself; these must yield resultsets against which views may be selected. +.. warning:: + + Take care to not let the imported `AutomaticWebTest` in your test module + namespace, else both your subclass *and* this parent class will be run. + +Testing on a real-life database +------------------------------- + +The ``CubicWebTC`` class uses the `cubicweb.devtools.ApptestConfiguration` +configuration class to setup its testing environment (database driver, +user password, application home, and so on). The `cubicweb.devtools` +module also provides a `RealDatabaseConfiguration` +class that will read a regular cubicweb sources file to fetch all +this information but will also prevent the database to be initalized +and reset between tests. + +For a test class to use a specific configuration, you have to set +the `_config` class attribute on the class as in: + +.. sourcecode:: python + + from cubicweb.devtools import RealDatabaseConfiguration + from cubicweb.devtools.testlib import CubicWebTC + + class BlogRealDatabaseTC(CubicWebTC): + _config = RealDatabaseConfiguration('blog', + sourcefile='/path/to/realdb_sources') + + def test_blog_rss(self): + req = self.request() + rset = req.execute('Any B ORDERBY D DESC WHERE B is BlogEntry, ' + 'B created_by U, U login "logilab", B creation_date D') + self.view('rss', rset) + + + +Testing with other cubes +------------------------ + +Sometimes a small component cannot be tested all by itself, so one +needs to specify other cubes to be used as part of the the unit test +suite. This is handled by the ``bootstrap_cubes`` file located under +``mycube/test/data``. One example from the `preview` cube:: + + card, file, preview + +The format is: + +* possibly several empy lines or lines starting with ``#`` (comment lines) +* one line containing a coma separated list of cube names. + +It is also possible to add a ``schema.py`` file in +``mycube/test/data``, which will be used by the testing framework, +therefore making new entity types and relations available to the +tests. Test APIS --------- diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/devrepo/vreg.rst --- a/doc/book/en/devrepo/vreg.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/devrepo/vreg.rst Wed Nov 03 16:38:28 2010 +0100 @@ -37,6 +37,7 @@ .. autoclass:: cubicweb.appobject.yes .. autoclass:: cubicweb.selectors.match_kwargs .. autoclass:: cubicweb.selectors.appobject_selectable +.. autoclass:: cubicweb.selectors.adaptable Result set selectors @@ -66,7 +67,7 @@ match or not according to entity's (instance or class) properties. .. autoclass:: cubicweb.selectors.non_final_entity -.. autoclass:: cubicweb.selectors.implements +.. autoclass:: cubicweb.selectors.is_instance .. autoclass:: cubicweb.selectors.score_entity .. autoclass:: cubicweb.selectors.rql_condition .. autoclass:: cubicweb.selectors.relation_possible @@ -75,6 +76,8 @@ .. autoclass:: cubicweb.selectors.partial_has_related_entities .. autoclass:: cubicweb.selectors.has_permission .. autoclass:: cubicweb.selectors.has_add_permission +.. autoclass:: cubicweb.selectors.has_mimetype +.. autoclass:: cubicweb.selectors.implements Logged user selectors diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/devweb/controllers.rst --- a/doc/book/en/devweb/controllers.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/devweb/controllers.rst Wed Nov 03 16:38:28 2010 +0100 @@ -15,13 +15,12 @@ `Browsing`: -* the View controlleris associated with most browsing actions within a - CubicWeb application: it always instantiates a - :ref:`the_main_template` and lets the ResultSet/Views dispatch - system build up the whole content; it handles ObjectNotFound and - NoSelectableObject errors that may bubble up to its entry point, in - an end-user-friendly way (but other programming errors will slip - through) +* the View controller is associated with most browsing actions within a + CubicWeb application: it always instantiates a :ref:`the_main_template` and + lets the ResultSet/Views dispatch system build up the whole content; it + handles :exc:`ObjectNotFound` and :exc:`NoSelectableObject` errors that may + bubble up to its entry point, in an end-user-friendly way (but other + programming errors will slip through) * the JSon controller (same module) provides services for Ajax calls, typically using JSON as a serialization format for input, and @@ -49,7 +48,7 @@ for outgoing email notifications * the MailBugReport controller (web/views/basecontrollers.py) allows - to quickly have a `repotbug` feature in one's application + to quickly have a `reportbug` feature in one's application Registration ++++++++++++ diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/devweb/edition/examples.rst --- a/doc/book/en/devweb/edition/examples.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/devweb/edition/examples.rst Wed Nov 03 16:38:28 2010 +0100 @@ -18,7 +18,7 @@ from cubicweb.web import formfields as ff, formwidgets as fwdgs class SendToReviewerStatusChangeView(ChangeStateFormView): __select__ = (ChangeStateFormView.__select__ & - implements('Talk') & + is_instance('Talk') & rql_condition('X in_state S, S name "submitted"')) def get_form(self, entity, transition, **kwargs): @@ -126,7 +126,7 @@ class MassMailingFormView(form.FormViewMixIn, EntityView): __regid__ = 'massmailing' - __select__ = implements(IEmailable) & authenticated_user() + __select__ = is_instance(IEmailable) & authenticated_user() def call(self): form = self._cw.vreg['forms'].select('massmailing', self._cw, diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/devweb/edition/form.rst --- a/doc/book/en/devweb/edition/form.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/devweb/edition/form.rst Wed Nov 03 16:38:28 2010 +0100 @@ -1,3 +1,5 @@ +.. _webform: + HTML form construction ---------------------- diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/devweb/facets.rst --- a/doc/book/en/devweb/facets.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/devweb/facets.rst Wed Nov 03 16:38:28 2010 +0100 @@ -1,172 +1,23 @@ The facets system ----------------- -Facets allow to restrict searches according to some criteria. CubicWeb -has a builtin `facet`_ system to define restrictions `filters`_ really -as easily as possible. A few base classes for facets are provided in -``cubicweb.web.facet.py``. All classes inherits from the base class -``AbstractFacet``. +Facets allow to restrict searches according to some user friendly criterias. +CubicWeb has a builtin `facet`_ system to define restrictions `filters`_ really +as easily as possible. -Here is an overview of the facets rendering pick from the `tracker` cube: +Here is an exemple of the facets rendering picked from our +http://www.cubicweb.org web site: .. image:: ../images/facet_overview.png -Facets will appear on each page presenting more than one entity. - - - -VocabularyFacet -~~~~~~~~~~~~~~~~ -The ``VocabularyFacet`` inherits from the ``AbstractFacet``. -A class which inherits from VocabularyFacets must redefine these methods: - -.. automethod:: cubicweb.web.facet.VocabularyFacet.vocabulary -.. automethod:: cubicweb.web.facet.VocabularyFacet.possible_values - -RelationFacet -~~~~~~~~~~~~~~ - -The ``RelationFacet`` inherits from the ``VocabularyFacet``. It allows to filter entities according to certain relation's values. Generally, you just have to define some class attributes like: - -- rtype: the name of the relation -- role: the default value is set to `subject` -- target_attr: needed if it is not the default attribute of the entity - - -To illustrate this facet, let's take for example an *excerpt* of the schema of an office location search application: - -.. sourcecode:: python - - class Office(WorkflowableEntityType): - price = Int(description='euros / m2 / HC / HT') - surface = Int(description='m2') - description = RichString(fulltextindexed=True) - has_address = SubjectRelation('PostalAddress', - cardinality='1?', - composite='subject') - proposed_by = SubjectRelation('Agency') - comments = ObjectRelation('Comment', - cardinality='1*', - composite='object') - screenshots = SubjectRelation(('File', 'Image'), - cardinality='*1', - composite='subject') - - -We define a facet to filter offices according to the attribute -`postalcode` of their associated `PostalAdress`. - -.. sourcecode:: python - - class PostalCodeFacet(RelationFacet): - __regid__ = 'postalcode-facet' # every registered class must have an id - __select__ = implements('Office') # this facet should only be selected when - # visualizing offices - rtype = 'has_address' # this facet is a filter on the entity linked to - # the office thrhough the relation - # has_address - target_attr = 'postalcode' # the filter's key is the attribute "postal_code" - # of the target PostalAddress entity - - -AttributeFacet -~~~~~~~~~~~~~~ - -The ``AttributeFacet`` inherits from the ``RelationFacet``. It allows to filter entities according to certain attribute's values. - -The example below resumes the former schema. We define now a filter based on the `surface` attribute of the -`Office`. - -.. sourcecode:: python +Facets will appear on each page presenting more than one entity that may be +filtered according to some known criteria. - class SurfaceFacet(AttributeFacet): - __regid__ = 'surface-facet' # every registered class must have an id - __select__ = implements('Office') # this facet should only be selected when - # visualizing offices - rtype = 'surface' # the filter's key is the attribute "surface" - comparator = '>=' # override the default value of operator since - # we want to filter according to a - # minimal - # value, not an exact one - - def rset_vocabulary(self, ___): - """override the default vocabulary method since we want to hard-code - our threshold values. - Not overriding would generate a filter box with all existing surfaces - defined in the database. - """ - return [('> 200', '200'), ('> 250', '250'), - ('> 275', '275'), ('> 300', '300')] - -RangeFacet -~~~~~~~~~~ -The ``RangeFacet`` inherits from the ``AttributeFacet``. It allows to filter entities according to certain attributes of numerical type. - -The ``RangeFacet`` displays a slider using `jquery`_ to choose a lower bound and an upper bound. - -The example below defines a facet to filter a selection of books according to their number of pages. - -.. sourcecode:: python - - class BookPagesFacet(RangeFacet): - __regid__ = 'priority-facet' - __select__ = RangeFacet.__select__ & implements('Book') - rtype = 'pages' - -The image below display the rendering of the ``RangeFacet``: - -.. image:: ../images/facet_range.png - -DateRangeFacet -~~~~~~~~~~~~~~ -The ``DateRangeFacet`` inherits from the ``RangeFacet``. It allows to filter entities according to certain attributes of date type. +Base classes for facets +~~~~~~~~~~~~~~~~~~~~~~~ +.. automodule:: cubicweb.web.facet -Here is an example of code that defines a facet to filter -musical works according to their composition date: - -.. sourcecode:: python - - class CompositionDateFacet(DateRangeFacet): - # 1. make sure this facet is displayed only on Track selection - __select__ = DateRangeFacet.__select__ & implements('Track') - # 2. give the facet an id required by CubicWeb) - __regid__ = 'compdate-facet' - # 3. specify the attribute name that actually stores the date in the DB - rtype = 'composition_date' - -With this facet, on each page displaying tracks, you'll be able to filter them -according to their composition date with a jquery slider. - -The image below display the rendering of the ``DateRangeFacet``: - -.. image:: ../images/facet_date_range.png - - -HasRelationFacet -~~~~~~~~~~~~~~~~ - -The ``DateRangeFacet`` inherits from the ``AbstractFacet``. It will -display a simple checkbox and lets you refine your selection in order -to get only entities that actually use this relation. - -Here is an example of the rendering of the ``HasRelationFacet`` to -filter entities with image and the corresponding code: - -.. image:: ../images/facet_has_image.png - -.. sourcecode:: python - - class HasImageFacet(HasRelationFacet): - __regid__ = 'hasimage-facet' - __select__ = HasRelationFacet.__select__ & implements('Book') - rtype = 'has_image' - - - -To use ``HasRelationFacet`` on a reverse relation add ``role = 'object'`` in -it's definitions. .. _facet: http://en.wikipedia.org/wiki/Faceted_browser .. _filters: http://www.cubicweb.org/blogentry/154152 -.. _jquery: http://www.jqueryui.com/ diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/devweb/js.rst --- a/doc/book/en/devweb/js.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/devweb/js.rst Wed Nov 03 16:38:28 2010 +0100 @@ -350,3 +350,47 @@ There is also javascript support for massmailing, gmap (google maps), fckcwconfig (fck editor), timeline, calendar, goa (CubicWeb over AppEngine), flot (charts drawing), tabs and bookmarks. + +API +~~~ + +.. toctree:: + :maxdepth: 1 + + js_api/index + + +Testing javascript +~~~~~~~~~~~~~~~~~~~~~~ + +You with the ``cubicweb.qunit.QUnitTestCase`` can include standard Qunit tests +inside the python unittest run . You simply have to define a new class that +inherit from ``QUnitTestCase`` and register your javascript test file in the +``all_js_tests`` lclass attribut. This ``all_js_tests`` is a sequence a +3-tuple ( ,] []): + +The should contains the qunit test. defines the list +of javascript file that must be imported before the test script. Dependencies +are included their definition order. are additional files copied in the +test directory. both and are optionnal. +``jquery.js`` is preincluded in for all test. + +.. sourcecode:: python + + from cubicweb.qunit import QUnitTestCase + + class MyQUnitTest(QUnitTestCase): + + all_js_tests = ( + ("relative/path/to/my_simple_testcase.js",) + ("relative/path/to/my_qunit_testcase.js",( + "rel/path/to/dependency_1.js", + "rel/path/to/dependency_2.js",)), + ("relative/path/to/my_complexe_qunit_testcase.js",( + "rel/path/to/dependency_1.js", + "rel/path/to/dependency_2.js", + ),( + "rel/path/file_dependency.html", + "path/file_dependency.json") + ), + ) diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/devweb/views/basetemplates.rst --- a/doc/book/en/devweb/views/basetemplates.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/devweb/views/basetemplates.rst Wed Nov 03 16:38:28 2010 +0100 @@ -11,17 +11,33 @@ in :ref:`views_base_class`, there are two kinds of views: the templatable and non-templatable. -Non-templatable views are standalone. They are responsible for all the -details such as setting a proper content type (or mime type), the -proper document headers, namespaces, etc. Examples are pure xml views -such as RSS or Semantic Web views (`SIOC`_, `DOAP`_, `FOAF`_, `Linked -Data`_, etc.). + +Non-templatable views +--------------------- + +Non-templatable views are standalone. They are responsible for all the details +such as setting a proper content type (or mime type), the proper document +headers, namespaces, etc. Examples are pure xml views such as RSS or Semantic Web +views (`SIOC`_, `DOAP`_, `FOAF`_, `Linked Data`_, etc.), and views which generate +binary files (pdf, excel files, etc.) .. _`SIOC`: http://sioc-project.org/ .. _`DOAP`: http://trac.usefulinc.com/doap .. _`FOAF`: http://www.foaf-project.org/ .. _`Linked Data`: http://linkeddata.org/ + +To notice that a view is not templatable, you just have to set the +view's class attribute `templatable` to `False`. In this case, it +should set the `content_type` class attribute to the correct MIME +type. By default, it is text/xhtml. Additionally, if your view +generate a binary file, you have to set the view's class attribute +`binary` to `True` too. + + +Templatable views +----------------- + Templatable views are not concerned with such pesky details. They leave it to the template. Conversely, the template's main job is to: @@ -30,14 +46,14 @@ * invoke adequate views in the various sections of the document -Look at :mod:`cubicweb.web.views.basetemplates` and you will find the -base templates used to generate (X)HTML for your application. The most -important template there is `TheMainTemplate`. +Look at :mod:`cubicweb.web.views.basetemplates` and you will find the base +templates used to generate (X)HTML for your application. The most important +template there is :class:`~cubicweb.web.views.basetemplates.TheMainTemplate`. .. _the_main_template_layout: TheMainTemplate ---------------- +~~~~~~~~~~~~~~~ .. _the_main_template_sections: @@ -88,28 +104,60 @@ How and why a view object is given to the main template is explained in the :ref:`publisher` chapter. -Class attributes -```````````````` +Configure the main template +``````````````````````````` + +You can overload some methods of the +:class:`~cubicweb.web.views.basetemplates.TheMainTemplate`, in order to fulfil +your needs. There are also some attributes and methods which can be defined on a +view to modify the base template behaviour: + +* `paginable`: if the result set is bigger than a configurable size, your result + page will be paginated by default. You can set this attribute to `False` to + avoid this. + +* `binary`: boolean flag telling if the view generates some text or a binary + stream. Default to False. When view generates text argument given to `self.w` + **must be an unicode string**, encoded string otherwise. -We can also control certain aspects of the main template thanks to the following -forms parameters: +* `content_type`, view's content type, default to 'text/xhtml' + +* `templatable`, boolean flag telling if the view's content should be returned + directly (when `False`) or included in the main template layout (including + header, boxes and so on). + +* `page_title()`, method that should return a title that will be set as page + title in the html headers. + +* `html_headers()`, method that should return a list of HTML headers to be + included the html headers. + + +You can also modify certain aspects of the main template of a page +when building an url or setting these parameters in the req.form: * `__notemplate`, if present (whatever the value assigned), only the content view is returned -* `__force_display`, if present and its value is not null, no navigation - whatever the number of entities to display + +* `__force_display`, if present and its value is not null, no pagination whatever + the number of entities to display (e.g. similar effect as view's `paginable` + attribute described above. + * `__method`, if the result set to render contains only one entity and this - parameter is set, it refers to a method to call on the entity by passing it - the dictionary of the forms parameters, before going the classic way (through - step 1 and 2 described juste above) + parameter is set, it refers to a method to call on the entity by passing it the + dictionary of the forms parameters, before going the classic way (through step + 1 and 2 described juste above) + +* `vtitle`, a title to be set as

    of the content Other templates ---------------- +~~~~~~~~~~~~~~~ -Other standard templates include: +There are also the following other standard templates: -* `login` and `logout` - -* `error-template` specializes TheMainTemplate to do proper end-user - output if an error occurs during the computation of TheMainTemplate - (it is a fallback view). +* :class:`cubicweb.web.views.basetemplates.LogInTemplate` +* :class:`cubicweb.web.views.basetemplates.LogOutTemplate` +* :class:`cubicweb.web.views.basetemplates.ErrorTemplate` specializes + :class:`~cubicweb.web.views.basetemplates.TheMainTemplate` to do + proper end-user output if an error occurs during the computation of + TheMainTemplate (it is a fallback view). diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/devweb/views/baseviews.rst --- a/doc/book/en/devweb/views/baseviews.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/devweb/views/baseviews.rst Wed Nov 03 16:38:28 2010 +0100 @@ -4,11 +4,12 @@ ---------- *CubicWeb* provides a lot of standard views, that can be found in - :mod:`cubicweb.web.views` and :mod:`cubicweb.web.views.baseviews`. +:mod:`cubicweb.web.views` sub-modules. -A certain number of views are used to build the web interface, which -apply to one or more entities. Their identifier is what distinguish -them from each others and the main ones are: +A certain number of views are used to build the web interface, which apply to one +or more entities. As other appobject, Their identifier is what distinguish them +from each others. The most generic ones, found in +:mod:`cubicweb.web.views.baseviews`, are described below. HTML views ~~~~~~~~~~ @@ -32,53 +33,105 @@ This view is the default view used when nothing needs to be rendered. It is always applicable. + Entity views ```````````` *incontext, outofcontext* - Those are used to display a link to an entity, depending on the - entity having to be displayed in or out of context - (of another entity). By default it respectively produces the - result of `textincontext` and `textoutofcontext` wrapped in a link - leading to the primary view of the entity. + + Those are used to display a link to an entity, whose label depends on the + entity having to be displayed in or out of context (of another entity): some + entities make sense in the context of another entity. For instance, the + `Version` of a `Project` in forge. So one may expect that 'incontext' will + be called when display a version from within the context of a project, while + 'outofcontext"' will be called in other cases. In our example, the + 'incontext' view of the version would be something like '0.1.2', while the + 'outofcontext' view would include the project name, e.g. 'baz 0.1.2' (since + only a version number without the associated project doesn't make sense if + you don't know yet that you're talking about the famous 'baz' project. |cubicweb| + tries to make guess and call 'incontext'/'outofcontext' nicely. When it can't + know, the 'oneline' view should be used. + + By default it respectively produces the result of `textincontext` and + `textoutofcontext` wrapped in a link leading to the primary view of the + entity. + *oneline* + This view is used when we can't tell if the entity should be considered as - displayed in or out of context. By default it produces the result of `text` + displayed in or out of context. By default it produces the result of `text` in a link leading to the primary view of the entity. + List ````` *list* - This view displays a list of entities by creating a HTML list (`
      `) - and call the view `listitem` for each entity of the result set. + + This view displays a list of entities by creating a HTML list (`
        `) and + call the view `listitem` for each entity of the result set. The 'list' view + will generate html like: + + .. sourcecode:: html + +
          +
        • "result of 'subvid' view for a row
        • + ... +
        + -*listitem* - This view redirects by default to the `outofcontext` view. +*simplelist* + + This view is not 'ul' based, and rely on div behaviour to separate items. html + will look like + + .. sourcecode:: html + +
        "result of 'subvid' view for a row
        + ... + + + It relies on base :class:`~cubicweb.view.View` class implementation of the + :meth:`call` method to insert those
        . + *sameetypelist* - This view displays a list of entities of the same type, in HTML section (`
        `) - and call the view `sameetypelistitem` for each entity of the result set. -*sameetypelistitem* - This view redirects by default to the `listitem` view. + This view displays a list of entities of the same type, in HTML section + (`
        `) and call the view `sameetypelistitem` for each entity of the result + set. It's designed to get a more adapted global list when displayed entities + are all of the same type. + *csv* - This view applies to entity groups, which are individually - displayed using the `incontext` view. It displays each entity as a - coma separated list. It is NOT related to the well-known text file - format. + + This view displays each entity in a coma separated list. It is NOT related to + the well-known text file format. + + +Those list view can be given a 'subvid' arguments, telling the view to use of +each item in the list. When not specified, the value of the 'redirect_vid' +attribute of :class:`ListItemView` (for 'listview') or of :class:`SimpleListView` +will be used. This default to 'outofcontext' for 'list' / 'incontext' for +'simplelist' + Text entity views ~~~~~~~~~~~~~~~~~ +Basic html view have some variantsto be used when generating raw text, not html +(for notifications for instance). + *text* + This is the simplest text view for an entity. By default it returns the result of the `.dc_title` method, which is cut to fit the `navigation.short-line-size` property if necessary. *textincontext, textoutofcontext* - Similar to the `text` view, but called when an entity is considered out or - in context. By default it returns respectively the result of the - methods `.dc_title` and `.dc_long_title` of the entity. + + Similar to the `text` view, but called when an entity is considered out or in + context (see description of incontext/outofcontext html views for more + information on this). By default it returns respectively the result of the + methods `.dc_title()` and `.dc_long_title()` of the entity. diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/devweb/views/breadcrumbs.rst --- a/doc/book/en/devweb/views/breadcrumbs.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/devweb/views/breadcrumbs.rst Wed Nov 03 16:38:28 2010 +0100 @@ -8,11 +8,11 @@ ~~~~~~~ Breadcrumbs are displayed by default in the header section (see -:ref:`the_main_template_sections`). With the default main -template, the header section is composed by the logo, the application -name, breadcrumbs and, at the most right, the login box. Breadcrumbs -are displayed just next to the application name, thus breadcrumbs -begin with a separator. +:ref:`the_main_template_sections`). With the default main template, +the header section is composed by the logo, the application name, +breadcrumbs and, at the most right, the login box. Breadcrumbs are +displayed just next to the application name, thus they begin with a +separator. Here is the header section of the CubicWeb's forge: @@ -22,29 +22,31 @@ :mod:`cubicweb.web.views.ibreadcrumbs`: - `BreadCrumbEntityVComponent`: displayed for a result set with one line - if the entity implements the ``IBreadCrumbs`` interface. + if the entity is adaptable to ``IBreadCrumbsAdapter``. - `BreadCrumbETypeVComponent`: displayed for a result set with more than - one line, but with all entities of the same type which implement the - ``IBreadCrumbs`` interface. + one line, but with all entities of the same type which can adapt to + ``IBreadCrumbsAdapter``. - `BreadCrumbAnyRSetVComponent`: displayed for any other result set. Building breadcrumbs ~~~~~~~~~~~~~~~~~~~~ -The ``IBreadCrumbs`` interface is defined in the -:mod:`cubicweb.interfaces` module. It specifies that an entity which -implements this interface must have a ``breadcrumbs`` method. +The ``IBreadCrumbsAdapter`` adapter is defined in the +:mod:`cubicweb.web.views.ibreadcrumbs` module. It specifies that an +entity which implements this interface must have a ``breadcrumbs`` and +a ``parent_entity`` method. A default implementation for each is +provided. This implementation expoits the ITreeAdapter. .. note:: Redefining the breadcrumbs is the hammer way to do it. Another way - is to define the `parent` method on an entity (as defined in the - `ITree` interface). If available, it will be used to compute - breadcrumbs. + is to define an `ITreeAdapter` adapter on an entity type. If + available, it will be used to compute breadcrumbs. -Here is the API of the ``breadcrumbs`` method: +Here is the API of the ``IBreadCrumbsAdapter`` class: -.. automethod:: cubicweb.interfaces.IBreadCrumbs.breadcrumbs +.. automethod:: cubicweb.web.views.ibreadcrumbs.IBreadCrumbsAdapter.parent_entity +.. automethod:: cubicweb.web.views.ibreadcrumbs.IBreadCrumbsAdapter.breadcrumbs If the breadcrumbs method return a list of entities, the ``cubicweb.web.views.ibreadcrumbs.BreadCrumbView`` is used to display diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/devweb/views/index.rst --- a/doc/book/en/devweb/views/index.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/devweb/views/index.rst Wed Nov 03 16:38:28 2010 +0100 @@ -12,6 +12,7 @@ views basetemplates primary + reledit baseviews startup boxes diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/devweb/views/primary.rst --- a/doc/book/en/devweb/views/primary.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/devweb/views/primary.rst Wed Nov 03 16:38:28 2010 +0100 @@ -36,15 +36,16 @@ Attributes/relations display location ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -In the primary view, there are 3 sections where attributes and +In the primary view, there are three sections where attributes and relations can be displayed (represented in pink in the image above): -* attributes -* relations -* sideboxes +* 'attributes' +* 'relations' +* 'sideboxes' **Attributes** can only be displayed in the attributes section (default - behavior). They can also be hidden. + behavior). They can also be hidden. By default, attributes of type `Password` + and `Bytes` are hidden. For instance, to hide the ``title`` attribute of the ``Blog`` entity: @@ -95,6 +96,10 @@ * ``order``: int used to control order within a section. When not specified, automatically set according to order in which tags are added. +* ``label``: label for the relations section or side box + +* ``showlabel``: boolean telling whether the label is displayed + .. sourcecode:: python # let us remind the schema of a blog entry @@ -110,15 +115,31 @@ for index, attr in enumerate('title', 'content', 'publish_date'): view_ctrl.tag_attribute(('BlogEntry', attr), {'order': index}) -Keys for relations only: +By default, relations displayed in the 'relations' section are being displayed by +the 'autolimited' view. This view will use comma separated values, or list view +and/or limit your rset if there is too much items in it (and generate the "view +all" link in this case). -* ``label``: label for the relations section or side box +You can control this view by setting the following values in the +`primaryview_display_ctrl` relation tag: + +* `limit`, maximum number of entities to display. The value of the + 'navigation.related-limit' cwproperty is used by default (which is 8 by default). + If None, no limit. -* ``showlabel``: boolean telling whether the label is displayed +* `use_list_limit`, number of entities until which they should be display as a list + (eg using the 'list' view). Below that limit, the 'csv' view is used. If None, + display using 'csv' anyway. + +* `subvid`, the subview identifier (eg view that should be used of each item in the + list) -* ``limit``: boolean telling if the results should be limited. If so, a link to all results is displayed +Notice you can also use the `filter` key to set up a callback taking the related +result set as argument and returning it filtered, to do some arbitrary filtering +that can't be done using rql for instance. -* ``filter``: callback taking the related result set as argument and returning it filtered + + .. sourcecode:: python @@ -153,22 +174,19 @@ are: *render_entity_title(self, entity)* - Renders the entity title using the ``def dc_title(self)`` method. - -*render_entity_metadata(self, entity)* - Renders the entity metadata by calling the ``metadata`` view on the - entity. This generic view is in cubicweb.views.baseviews. + Renders the entity title, by default using entity's :meth:`dc_title()` method. *render_entity_attributes(self, entity)* - Renders all the attribute of an entity with the exception of - attribute of type `Password` and `Bytes`. The skip_none class - attribute controls the display of None valued attributes. + Renders all attributes and relations in the 'attributes' section . The + :attr:`skip_none` attribute controls the display of `None` valued attributes. *render_entity_relations(self, entity)* - Renders all the relations of the entity in the main section of the page. + Renders all relations in the 'relations' section. *render_side_boxes(self, entity, boxes)* - Renders relations of the entity in a side box. + Renders side boxes on the right side of the content. This will generate a box + for each relation in the 'sidebox' section, as well as explicit box + appobjects selectable in this context. The placement of relations in the relations section or in side boxes can be controlled through the :ref:`primary_view_configuration` mechanism. @@ -184,24 +202,25 @@ subclass, you can already customize some of the rendering: *show_attr_label* - Renders the attribute label next to the attribute value if set to True. + Renders the attribute label next to the attribute value if set to `True`. Otherwise, does only display the attribute value. *show_rel_label* - Renders the relation label next to the relation value if set to True. + Renders the relation label next to the relation value if set to `True`. Otherwise, does only display the relation value. *skip_none* - Does not render an attribute value that is None if set to True. + Does not render an attribute value that is None if set to `True`. *main_related_section* - Renders the relations of the entity if set to True. + Renders the relations of the entity if set to `True`. A good practice is for you to identify the content of your entity type for which the default rendering does not answer your need so that you can focus on the specific method (from the list above) that needs to be modified. We do not advise you to overwrite ``render_entity`` unless you want a completely different layout. + Example of customization and creation ````````````````````````````````````` @@ -215,11 +234,11 @@ .. sourcecode:: python - from cubicweb.selectors import implements + from cubicweb.selectors import is_instance from cubicweb.web.views.primary import Primaryview class BlogEntryPrimaryView(PrimaryView): - __select__ = PrimaryView.__select__ & implements('BlogEntry') + __select__ = PrimaryView.__select__ & is_instance('BlogEntry') def render_entity_attributes(self, entity): self.w(u'

        published on %s

        ' % @@ -245,12 +264,12 @@ .. sourcecode:: python from logilab.mtconverter import xml_escape - from cubicweb.selectors import implements, one_line_rset + from cubicweb.selectors import is_instance, one_line_rset from cubicweb.web.views.primary import Primaryview class BlogPrimaryView(PrimaryView): __regid__ = 'primary' - __select__ = PrimaryView.__select__ & implements('Blog') + __select__ = PrimaryView.__select__ & is_instance('Blog') rql = 'Any BE ORDERBY D DESC WHERE BE entry_of B, BE publish_date D, B eid %(b)s' def render_entity_relations(self, entity): @@ -260,7 +279,7 @@ class BlogEntryInBlogView(EntityView): __regid__ = 'inblogcontext' - __select__ = implements('BlogEntry') + __select__ = is_instance('BlogEntry') def cell_call(self, row, col): entity = self.cw_rset.get_entity(row, col) diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/devweb/views/reledit.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/book/en/devweb/views/reledit.rst Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,122 @@ +.. _reledit: + +The "Click and Edit" (also `reledit`) View +------------------------------------------ + +The principal way to update data through the Web UI is through the +`modify` action on entities, which brings a full form. This is +described in the :ref:`webform` chapter. + +There is however another way to perform piecewise edition of entities +and relations, using a specific `reledit` (for *relation edition*) +view from the :mod:`cubicweb.web.views.reledit` module. + +This is typically applied from the default Primary View (see +:ref:`primary_view`) on the attributes and relation section. It makes +small editions more convenient. + +Of course, this can be used customely in any other view. Here come +some explanation about its capabilities and instructions on the way to +use it. + +Using `reledit` +*************** + +Let's start again with a simple example: + +.. sourcecode:: python + + class Company(EntityType): + name = String(required=True, unique=True) + boss = SubjectRelation('Person', cardinality='1*') + status = SubjectRelation('File', cardinality='?*', composite='subject') + +In some view code we might want to show these attributes/relations and +allow the user to edit each of them in turn without having to leave +the current page. We would write code as below: + +.. sourcecode:: python + + company.view('reledit', rtype='name', default_value='') # editable name attribute + company.view('reledit', rtype='boss') # editable boss relation + company.view('reledit', rtype='status') # editable attribute-like relation + +If one wanted to edit the company from a boss's point of view, one +would have to indicate the proper relation's role. By default the role +is `subject`. + +.. sourcecode:: python + + person.view('reledit', rtype='boss', role='object') + +Each of these will provide with a different editing widget. The `name` +attribute will obviously get a text input field. The `boss` relation +will be edited through a selection box, allowing to pick another +`Person` as boss. The `status` relation, given that it defines Company +as a composite entity with one file inside, will provide additional actions + +* to `add` a `File` when there is one +* to `delete` the `File` (if the cardinality allows it) + +Moreover, editing the relation or using the `add` action leads to an +embedded edition/creation form allowing edition of the target entity +(which is `File` in our example) instead of merely allowing to choose +amongst existing files. + +The `reledit_ctrl` rtag +*********************** + +The behaviour of reledited attributes/relations can be finely +controlled using the reledit_ctrl rtag, defined in +:mod:`cubicweb.web.uicfg`. + +This rtag provides four control variables: + +* ``default_value``: alternative default value + The default value is what is shown when there is no value. +* ``reload``: boolean, eid (to reload to) or function taking subject + and returning bool/eid This is useful when editing a relation (or + attribute) that impacts the url or another parts of the current + displayed page. Defaults to false. +* ``rvid``: alternative view id (as str) for relation or composite + edition Default is 'incontext' or 'csv' depending on the + cardinality. They can also be statically changed by subclassing + ClickAndEditFormView and redefining _one_rvid (resp. _many_rvid). +* ``edit_target``: 'rtype' (to edit the relation) or 'related' (to + edit the related entity) This controls whether to edit the relation + or the target entity of the relation. Currently only one-to-one + relations support target entity edition. By default, the 'related' + option is taken whenever the relation is composite and one-to-one. + +Let's see how to use these controls. + +.. sourcecode:: python + + from logilab.mtconverter import xml_escape + from cubicweb.web.uicfg import reledit_ctrl + reledit_ctrl.tag_attribute(('Company', 'name'), + {'reload': lambda x:x.eid, + 'default_value': xml_escape(u'')}) + reledit_ctrl.tag_object_of(('*', 'boss', 'Person'), {'edit_target': 'related'}) + +The `default_value` needs to be an xml escaped unicode string. + +The `edit_target` tag on the `boss` relation being set to `related` will +ensure edition of the `Person` entity instead (using a standard +automatic form) of the association of Company and Person. + +Finally, the `reload` key accepts either a boolean, an eid or an +unicode string representing an url. If an eid is provided, it will be +internally transformed into an url. The eid/url case helps when one +needs to reload and the current url is inappropriate. A common case is +edition of a key attribute, which is part of the current url. If one +user changed the Company's name from `lozilab` to `logilab`, reloading +on http://myapp/company/lozilab would fail. Providing the entity's +eid, then, forces to reload on something like http://myapp/company/42, +which always work. + + + + + + diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/devweb/views/table.rst --- a/doc/book/en/devweb/views/table.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/devweb/views/table.rst Wed Nov 03 16:38:28 2010 +0100 @@ -7,6 +7,10 @@ Creates a HTML table (``) and call the view `cell` for each cell of the result set. Applicable on any result set. +*editable-table* + Creates an **editable** HTML table (`
        `) and call the view `cell` for each cell of + the result set. Applicable on any result set. + *cell* By default redirects to the `final` view if this is a final entity or `outofcontext` view otherwise @@ -17,3 +21,58 @@ .. autoclass:: cubicweb.web.views.tableview.TableView :members: + +Example +``````` + +Let us take an example from the timesheet cube: + +.. sourcecode:: python + + class ActivityTable(EntityView): + __regid__ = 'activitytable' + __select__ = is_instance('Activity') + title = _('activitytable') + + def call(self, showresource=True): + _ = self._cw._ + headers = [_("diem"), _("duration"), _("workpackage"), _("description"), _("state"), u""] + eids = ','.join(str(row[0]) for row in self.cw_rset) + rql = ('Any R, D, DUR, WO, DESCR, S, A, SN, RT, WT ORDERBY D DESC ' + 'WHERE ' + ' A is Activity, A done_by R, R title RT, ' + ' A diem D, A duration DUR, ' + ' A done_for WO, WO title WT, ' + ' A description DESCR, A in_state S, S name SN, A eid IN (%s)' % eids) + if showresource: + displaycols = range(7) + headers.insert(0, display_name(self._cw, 'Resource')) + else: # skip resource column if asked to + displaycols = range(1, 7) + rset = self._cw.execute(rql) + self.wview('editable-table', rset, 'null', + displayfilter=True, displayactions=False, + headers=headers, displaycols=displaycols, + cellvids={3: 'editable-final'}) + +To obtain an editable table, specify 'edtitable-table' as vid. You +have to select the entity in the rql request too (in order to kwnow +which entity must be edited). You can specify an optional +`displaycols` argument which defines column's indexes that will be +displayed. In the above example, setting `showresource` to `False` +will only render columns from index 1 to 7. + +The previous example results in: + +.. image:: ../../images/views-table-shadow.png + + +In order to activate table filter mechanism, set the `displayfilter` +argument to True. A small arrow will be displayed at the table's top +right corner. Clicking on `show filter form` action, will display the +filter form as below: + +.. image:: ../../images/views-table-filter-shadow.png + +By the same way, you can display all registered actions for the +selected entity, setting `displayactions` argument to True. diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/devweb/views/views.rst --- a/doc/book/en/devweb/views/views.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/devweb/views/views.rst Wed Nov 03 16:38:28 2010 +0100 @@ -121,7 +121,7 @@ """ __regid__ = 'search-associate' title = _('search for association') - __select__ = one_line_rset() & match_search_state('linksearch') & implements('Any') + __select__ = one_line_rset() & match_search_state('linksearch') & is_instance('Any') XML views, binaries views... diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/images/views-table-filter-shadow.png Binary file doc/book/en/images/views-table-filter-shadow.png has changed diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/images/views-table-filter.png Binary file doc/book/en/images/views-table-filter.png has changed diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/images/views-table-shadow.png Binary file doc/book/en/images/views-table-shadow.png has changed diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/images/views-table.png Binary file doc/book/en/images/views-table.png has changed diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/index.rst --- a/doc/book/en/index.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/index.rst Wed Nov 03 16:38:28 2010 +0100 @@ -62,5 +62,3 @@ * the :ref:`genindex`, * the :ref:`modindex`, - -.. |cubicweb| replace:: *CubicWeb* diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/makefile --- a/doc/book/en/makefile Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/makefile Wed Nov 03 16:38:28 2010 +0100 @@ -11,6 +11,10 @@ PAPER = #BUILDDIR = build BUILDDIR = ~/tmp/cwdoc +CWDIR = ../../.. +JSDIR = ${CWDIR}/web/data +JSTORST = ${CWDIR}/doc/tools/pyjsrest.py +BUILDJS = devweb/js_api # Internal variables for sphinx PAPEROPT_a4 = -D latex_paper_size=a4 @@ -18,6 +22,7 @@ ALLSPHINXOPTS = -d ${BUILDDIR}/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + .PHONY: help clean html web pickle htmlhelp latex changes linkcheck help: @@ -36,6 +41,7 @@ rm -rf apidoc/ rm -f *.html -rm -rf ${BUILDDIR}/* + -rm -rf ${BUILDJS} all: ${TARGET} apidoc html @@ -48,12 +54,16 @@ epydoc --html -o apidoc -n "cubicweb" --exclude=setup --exclude=__pkginfo__ ../../../ # run sphinx ### -html: +html: js mkdir -p ${BUILDDIR}/html ${BUILDDIR}/doctrees $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) ${BUILDDIR}/html @echo @echo "Build finished. The HTML pages are in ${BUILDDIR}/html." +js: + mkdir -p ${BUILDJS} + $(JSTORST) -p ${JSDIR} -o ${BUILDJS} + pickle: mkdir -p ${BUILDDIR}/pickle ${BUILDDIR}/doctrees $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) ${BUILDDIR}/pickle diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/tutorials/advanced/index.rst --- a/doc/book/en/tutorials/advanced/index.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/tutorials/advanced/index.rst Wed Nov 03 16:38:28 2010 +0100 @@ -335,7 +335,7 @@ .. sourcecode:: python - from cubicweb.selectors import implements + from cubicweb.selectors import is_instance from cubicweb.server import hook class SetVisibilityOp(hook.Operation): @@ -347,7 +347,7 @@ class SetVisibilityHook(hook.Hook): __regid__ = 'sytweb.setvisibility' - __select__ = hook.Hook.__select__ & implements('Folder', 'File', 'Image', 'Comment') + __select__ = hook.Hook.__select__ & is_instance('Folder', 'File', 'Image', 'Comment') events = ('after_add_entity',) def __call__(self): hook.set_operation(self._cw, 'pending_visibility', self.entity.eid, diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/tutorials/base/create-cube.rst --- a/doc/book/en/tutorials/base/create-cube.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/tutorials/base/create-cube.rst Wed Nov 03 16:38:28 2010 +0100 @@ -307,11 +307,11 @@ .. sourcecode:: python - from cubicweb.selectors import implements + from cubicweb.selectors import is_instance from cubicweb.web.views import primary class BlogEntryPrimaryView(primary.PrimaryView): - __select__ = implements('BlogEntry') + __select__ = is_instance('BlogEntry') def render_entity_attributes(self, entity): self.w(u'

        published on %s

        ' % @@ -357,7 +357,6 @@ class BlogEntry(AnyEntity): """customized class for BlogEntry entities""" __regid__ = 'BlogEntry' - __implements__ = AnyEntity.__implements__ def display_cw_logo(self): if 'CW' in self.title: @@ -376,7 +375,7 @@ .. sourcecode:: python class BlogEntryPrimaryView(primary.PrimaryView): - __select__ = implements('BlogEntry') + __select__ = is_instance('BlogEntry') ... diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/tutorials/base/maintemplate.rst --- a/doc/book/en/tutorials/base/maintemplate.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/tutorials/base/maintemplate.rst Wed Nov 03 16:38:28 2010 +0100 @@ -123,8 +123,8 @@ .. image:: ../../images/lax-book_06-simple-main-template_en.png -XXX -[WRITE ME] +.. XXX +.. [WRITE ME] * customize MainTemplate and show that everything in the user interface can be changed diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/tutorials/index.rst --- a/doc/book/en/tutorials/index.rst Tue Jul 27 12:36:03 2010 +0200 +++ b/doc/book/en/tutorials/index.rst Wed Nov 03 16:38:28 2010 +0100 @@ -17,3 +17,4 @@ base/index advanced/index + tools/windmill.rst diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/book/en/tutorials/tools/windmill.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/book/en/tutorials/tools/windmill.rst Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,227 @@ +========================== +Use Windmill with CubicWeb +========================== + +Windmill_ implements cross browser testing, in-browser recording and playback, +and functionality for fast accurate debugging and test environment integration. + +.. _Windmill: http://www.getwindmill.com/ + +`Online features list `_ is available. + + +Installation +============ + +Windmill +-------- + +You have to install Windmill manually for now. If you're using Debian, there is +no binary package (`yet `_). + +The simplest solution is to use a *setuptools/pip* command (for a clean +environment, take a look to the `virtualenv +`_ project as well):: + + pip install windmill + curl -O http://github.com/windmill/windmill/tarball/master + +However, the Windmill project doesn't release frequently. Our recommandation is +to used the last snapshot of the Git repository: + +.. sourcecode:: bash + + git clone git://github.com/windmill/windmill.git HEAD + cd windmill + python setup.py develop + +Install instructions are `available `_. + +Be sure to have the windmill module in your PYTHONPATH afterwards:: + + python -c "import windmill" + +X dummy +------- + +In order to reduce unecessary system load from your test machines, It's +recommended to use X dummy server for testing the Unix web clients, you need a +dummy video X driver (as xserver-xorg-video-dummy package in Debian) coupled +with a light X server as `Xvfb `_. + + The dummy driver is a special driver available with the XFree86 DDX. To use + the dummy driver, simply substitue it for your normal card driver in the + Device section of your xorg.conf configuration file. For example, if you + normally uses an ati driver, then you will have a Device section with + Driver "ati" to let the X server know that you want it to load and use the + ati driver; however, for these conformance tests, you would change that + line to Driver "dummy" and remove any other ati specific options from the + Device section. + + *From: http://www.x.org/wiki/XorgTesting* + +Then, you can run the X server with the following command : + + /usr/bin/X11/Xvfb :1 -ac -screen 0 1280x1024x8 -fbdir /tmp + + +Windmill usage +============== + +Record your use case +-------------------- + +- start your instance manually +- start Windmill_ with url site as last argument (read Usage_ or use *'-h'* + option to find required command line arguments) +- use the record button +- click on save to obtain python code of your use case +- copy the content to a new file in a *windmill* directory + +.. _Usage: http://wiki.github.com/windmill/windmill/running-tests + +If you are using firefox as client, consider the "firebug" option. + +If you have a running instance, you can refine the test by the *loadtest* windmill option: + + windmill -m firebug loadtest= + +Or use the internal windmill shell to explore available commands: + + windmill -m firebug shell + +.. sourcecode:: python + + >>> load_test() + >>> run_test() + + + +Integrate Windmill tests into CubicWeb +====================================== + +Set environment +--------------- + +You have to create a new unit test file and a `windmill` directory and copy all +your windmill use case into it. + +.. sourcecode:: python + + # test_windmill.py + + # Run all scenarii found in windmill directory + from cubicweb.devtools.cwwindmill import (CubicWebWindmillUseCase, + unittest_main) + + if __name__ == '__main__': + unittest_main() + +Run your tests +-------------- + +You can easily run your windmill test suite through `pytest` or :mod:`unittest`. +You have to copy a *test_windmill.py* file from :mod:`web.test`. + +To run your test series:: + + % pytest test/test_windmill.py + +By default, CubicWeb will use **firefox** as the default browser and will try +to run test instance server on localhost. In the general case, You've no need +to change anything. + +Check :class:`cubicweb.devtools.cwwindmill.CubicWebWindmillUseCase` for +Windmill configuration. You can edit windmill settings with following class attributes: + +* browser + identification string (firefox|ie|safari|chrome) (firefox by default) +* test_dir + testing file path or directory (windmill directory under your unit case + file by default) +* edit_test + load and edit test for debugging (False by default) + +Examples: + + browser = 'firefox' + test_dir = osp.join(__file__, 'windmill') + edit_test = False + +If you want to change cubicweb test server parameters, you can check class +variables from :class:`CubicWebServerConfig` or inherit it with overriding the +:var:`configcls` attribute in :class:`CubicWebServerTC` :: + +.. sourcecode:: python + + class OtherCubicWebServerConfig(CubicWebServerConfig): + port = 9999 + + class NewCubicWebServerTC(CubicWebServerTC): + configcls = OtherCubicWebServerConfig + +For instance, CubicWeb framework windmill tests can be manually run by:: + + % pytest web/test/test_windmill.py + +Edit your tests +--------------- + +You can toggle the `edit_test` variable to enable test edition. + +But if you are using `pytest` as test runner, use the `-i` option directly. +The test series will be loaded and you can run assertions step-by-step:: + + % pytest -i test/test_windmill.py + +In this case, the `firebug` extension will be loaded automatically for you. + +Afterwards, don't forget to save your edited test into the right file (no autosave feature). + +Best practises +-------------- + +Don't run another instance on the same port. You risk to silence some +regressions (test runner will automatically fail in further versions). + +Start your use case by using an assert on the expected primary url page. +Otherwise all your tests could fail without clear explanation of the used +navigation. + +In the same location of the *test_windmill.py*, create a *windmill/* with your +windmill recorded use cases. + + +Caveats +======= + +File Upload +----------- + +Windmill can't do file uploads. This is a limitation of browser Javascript +support / sandboxing, not of Windmill per se. It would be nice if there were +some command that would prime the Windmill HTTP proxy to add a particular file +to the next HTTP request that comes through, so that uploads could at least be +faked. + +.. http://groups.google.com/group/windmill-dev/browse_thread/thread/cf9dc969722bd6bb/01aa18fdd652f7ff?lnk=gst&q=input+type+file#01aa18fdd652f7ff + +.. http://davisagli.com/blog/in-browser-integration-testing-with-windmill + +.. http://groups.google.com/group/windmill-dev/browse_thread/thread/b7bebcc38ed30dc7 + + +Preferences +=========== + +A *.windmill/prefs.py* could be used to redefine default configuration values. + +.. define CubicWeb preferences in the parent test case instead with a dedicated firefox profile + +For managing browser extensions, read `advanced topic chapter +`_. + +More configuration examples could be seen in *windmill/conf/global_settings.py* +as template. + + diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/coding_standards_css.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/coding_standards_css.rst Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,33 @@ +CSS Coding Standards +-------------------- + +(Draft, to be continued) + +:Naming: camelCase + +Indentation rules +~~~~~~~~~~~~~~~~~ +- 2 espaces avant les propriétés + +- pas d'espace avant les ":", un espace après + +- 1 seul espace entre les différentes valeurs pour une même propriété + + +Documentation +~~~~~~~~~~~~~ +Please keep rules semantically linked grouped together, with a comment about +what they are for. + +Recommendation +~~~~~~~~~~~~~~ +- Try to use existing classes rather than introduce new ones + +- Keep things as simple as possible while in the framework + +- Think about later customization by application + +- Avoid introducing a new CSS file for a few lines of CSS, at least while the + framework doesn't include packing functionalities + + diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/coding_standards_js.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/coding_standards_js.rst Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,37 @@ +Javascript Coding Standards +--------------------------- + +(Draft, to be continued) + +:Naming: camelCase, except for CONSTANTS + +Indentation rules +~~~~~~~~~~~~~~~~~ +- espace avant accolade ouvrante + +- retour à la ligne après accolade ouvrante (éventuellement pas + de retour à la ligne s'il y a tout sur la même ligne, mais ce n'est + pas le cas ici. + +- no tabs + + +Documentation +~~~~~~~~~~~~~ +XXX explain comment format for documentation generation + + +Coding +~~~~~~ +- Don't forget 'var' before variable definition, and semi-colon (';') after **each** statement. +- Check the firebug console for deprecation warnings + + +API usage +~~~~~~~~~ +- unless intended, use jQuery('container') rather than jqNode('container') + + +See also +~~~~~~~~ +http://google-styleguide.googlecode.com/svn/trunk/javascriptguide.xml \ No newline at end of file diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/refactoring-the-css-with-uiprops.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/refactoring-the-css-with-uiprops.rst Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,73 @@ +========================================= +Refactoring the CSSs with UI properties +========================================= + +Overview +========= + +Managing styles progressively became difficult in CubicWeb. The +introduction of uiprops is an attempt to fix this problem. + +The goal is to make it possible to use variables in our CSSs. + +These variables are defined or computed in the uiprops.py python file +and inserted in the CSS using the Python string interpolation syntax. + +A quick example, put in ``uiprops.py``:: + + defaultBgColor = '#eee' + +and in your css:: + + body { background-color: %(defaultBgColor)s; } + + +The good practices are: + +- define a variable in uiprops to avoid repetitions in the CSS + (colors, borders, fonts, etc.) + +- define a variable in uiprops when you need to compute values + (compute a color palette, etc.) + +The algorithm implemented in CubicWeb is the following: + +- read uiprops file while walk up the chain of cube dependencies: if + cube myblog depends on cube comment, the variables defined in myblog + will have precedence over the ones in comment + +- replace the %(varname)s in all the CSSs of all the cubes + +Keep in mind that the browser will then interpret the CSSs and apply +the standard cascading mechanism. + +FAQ +==== + +- How do I keep the old style? + + Put ``STYLESHEET = [data('cubicweb.old.css')]`` in your uiprops.py + file and think about something else. + +- What are the changes in cubicweb.css? + + Version 3.9.0 of cubicweb changed the following in the default html + markup and css: + + =============== ================================== + old new + =============== ================================== + .navcol #navColumnLeft, #navColumnRight + #contentcol #contentColumn + .footer #footer + .logo #logo + .simpleMessage .loginMessage + .appMsg (styles are removed from css) + .searchMessage (styles are removed from css) + =============== ================================== + + Introduction of the new cubicweb.reset.css based on Eric Meyer's + reset css. + + Lots of margin, padding, etc. + diff -r f4d1d5d9ccbb -r 90f2f20367bc doc/tools/pyjsrest.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/tools/pyjsrest.py Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,110 @@ +#!/usr/bin/env python +""" +Parser for Javascript comments. +""" +from __future__ import with_statement + +import sys, os, getopt, re + +def clean_comment(match): + comment = match.group() + comment = strip_stars(comment) + return comment + +# Rest utilities +def rest_title(title, level, level_markups=['=', '=', '-', '~', '+', '`']): + size = len(title) + if level == 0: + return '\n'.join((level_markups[level] * size, title, level_markups[0] * size)) + '\n' + return '\n'.join(('\n' + title, level_markups[level] * size)) + '\n' + +def get_doc_comments(text): + """ + Return a list of all documentation comments in the file text. Each + comment is a pair, with the first element being the comment text and + the second element being the line after it, which may be needed to + guess function & arguments. + + >>> get_doc_comments(read_file('examples/module.js'))[0][0][:40] + '/**\n * This is the module documentation.' + >>> get_doc_comments(read_file('examples/module.js'))[1][0][7:50] + 'This is documentation for the first method.' + >>> get_doc_comments(read_file('examples/module.js'))[1][1] + 'function the_first_function(arg1, arg2) ' + >>> get_doc_comments(read_file('examples/module.js'))[2][0] + '/** This is the documentation for the second function. */' + + """ + return [clean_comment(match) for match in re.finditer('/\*\*.*?\*/', + text, re.DOTALL|re.MULTILINE)] + +RE_STARS = re.compile('^\s*?\* ?', re.MULTILINE) + + +def strip_stars(doc_comment): + """ + Strip leading stars from a doc comment. + + >>> strip_stars('/** This is a comment. */') + 'This is a comment.' + >>> strip_stars('/**\n * This is a\n * multiline comment. */') + 'This is a\n multiline comment.' + >>> strip_stars('/** \n\t * This is a\n\t * multiline comment. \n*/') + 'This is a\n multiline comment.' + + """ + return RE_STARS.sub('', doc_comment[3:-2]).strip() + +def parse_js_files(args=sys.argv): + """ + Main command-line invocation. + """ + try: + opts, args = getopt.gnu_getopt(args[1:], 'p:o:h', [ + 'jspath=', 'output=', 'help']) + opts = dict(opts) + except getopt.GetoptError: + usage() + sys.exit(2) + + rst_dir = opts.get('--output') or opts.get('-o') + if rst_dir is None and len(args) != 1: + rst_dir = 'apidocs' + js_dir = opts.get('--jspath') or opts.get('-p') + if not os.path.exists(os.path.join(rst_dir)): + os.makedirs(os.path.join(rst_dir)) + + f_index = open(os.path.join(rst_dir, 'index.rst'), 'wb') + f_index.write(''' +.. toctree:: + :maxdepth: 1 + +''' +) + for js_path, js_dirs, js_files in os.walk(js_dir): + rst_path = re.sub('%s%s*' % (js_dir, os.path.sep), '', js_path) + for js_file in js_files: + if not js_file.endswith('.js'): + continue + if not os.path.exists(os.path.join(rst_dir, rst_path)): + os.makedirs(os.path.join(rst_dir, rst_path)) + rst_content = extract_rest(js_path, js_file) + filename = os.path.join(rst_path, js_file[:-3]) + # add to index + f_index.write(' %s\n' % filename) + # save rst file + with open(os.path.join(rst_dir, filename) + '.rst', 'wb') as f_rst: + f_rst.write(rst_content) + f_index.close() + +def extract_rest(js_dir, js_file): + js_filepath = os.path.join(js_dir, js_file) + filecontent = open(js_filepath, 'U').read() + comments = get_doc_comments(filecontent) + rst = rest_title(js_file, 0) + rst += '.. module:: %s\n\n' % js_file + rst += '\n\n'.join(comments) + return rst + +if __name__ == '__main__': + parse_js_files() diff -r f4d1d5d9ccbb -r 90f2f20367bc entities/__init__.py --- a/entities/__init__.py Tue Jul 27 12:36:03 2010 +0200 +++ b/entities/__init__.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""base application's entities class implementation: `AnyEntity` +"""base application's entities class implementation: `AnyEntity`""" -""" __docformat__ = "restructuredtext en" from warnings import warn @@ -28,33 +27,13 @@ from cubicweb import Unauthorized, typed_eid from cubicweb.entity import Entity -from cubicweb.interfaces import IBreadCrumbs, IFeed - class AnyEntity(Entity): """an entity instance has e_schema automagically set on the class and instances have access to their issuing cursor """ __regid__ = 'Any' - __implements__ = (IBreadCrumbs, IFeed) - - fetch_attrs = ('modification_date',) - @classmethod - def fetch_order(cls, attr, var): - """class method used to control sort order when multiple entities of - this type are fetched - """ - return cls.fetch_unrelated_order(attr, var) - - @classmethod - def fetch_unrelated_order(cls, attr, var): - """class method used to control sort order when multiple entities of - this type are fetched to use in edition (eg propose them to create a - new relation on an edited entity). - """ - if attr == 'modification_date': - return '%s DESC' % var - return None + __implements__ = () # meta data api ########################################################### @@ -63,7 +42,7 @@ for rschema, attrschema in self.e_schema.attribute_definitions(): if rschema.meta: continue - value = self.get_value(rschema.type) + value = self.cw_attr_value(rschema.type) if value: # make the value printable (dates, floats, bytes, etc.) return self.printable_value(rschema.type, value, attrschema.type, @@ -120,32 +99,6 @@ except (Unauthorized, IndexError): return None - def breadcrumbs(self, view=None, recurs=False): - path = [self] - if hasattr(self, 'parent'): - parent = self.parent() - if parent is not None: - try: - path = parent.breadcrumbs(view, True) + [self] - except TypeError: - warn("breadcrumbs method's now takes two arguments " - "(view=None, recurs=False), please update", - DeprecationWarning) - path = parent.breadcrumbs(view) + [self] - if not recurs: - if view is None: - if 'vtitle' in self._cw.form: - # embeding for instance - path.append( self._cw.form['vtitle'] ) - elif view.__regid__ != 'primary' and hasattr(view, 'title'): - path.append( self._cw._(view.title) ) - return path - - ## IFeed interface ######################################################## - - def rss_feed_url(self): - return self.absolute_url(vid='rss') - # abstractions making the whole things (well, some at least) working ###### def sortvalue(self, rtype=None): @@ -154,7 +107,7 @@ """ if rtype is None: return self.dc_title().lower() - value = self.get_value(rtype) + value = self.cw_attr_value(rtype) # do not restrict to `unicode` because Bytes will return a `str` value if isinstance(value, basestring): return self.printable_value(rtype, format='text/plain').lower() @@ -189,35 +142,8 @@ self.__linkto[(rtype, role)] = linkedto return linkedto - # edit controller callbacks ############################################### - - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - if hasattr(self, 'parent') and self.parent(): - return self.parent().rest_path(), {} - return str(self.e_schema).lower(), {} - - def pre_web_edit(self): - """callback called by the web editcontroller when an entity will be - created/modified, to let a chance to do some entity specific stuff. - - Do nothing by default. - """ - pass - # server side helpers ##################################################### - def notification_references(self, view): - """used to control References field of email send on notification - for this entity. `view` is the notification view. - - Should return a list of eids which can be used to generate message ids - of previously sent email - """ - return () - # XXX: store a reference to the AnyEntity class since it is hijacked in goa # configuration and we need the actual reference to avoid infinite loops # in mro diff -r f4d1d5d9ccbb -r 90f2f20367bc entities/adapters.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/entities/adapters.py Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,465 @@ +# copyright 2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""some basic entity adapter implementations, for interfaces used in the +framework itself. +""" + +__docformat__ = "restructuredtext en" + +from itertools import chain +from warnings import warn + +from logilab.mtconverter import TransformError +from logilab.common.decorators import cached + +from cubicweb.view import EntityAdapter, implements_adapter_compat +from cubicweb.selectors import implements, is_instance, relation_possible +from cubicweb.interfaces import IDownloadable, ITree, IProgress, IMileStone + + +class IEmailableAdapter(EntityAdapter): + __regid__ = 'IEmailable' + __select__ = relation_possible('primary_email') | relation_possible('use_email') + + def get_email(self): + if getattr(self.entity, 'primary_email', None): + return self.entity.primary_email[0].address + if getattr(self.entity, 'use_email', None): + return self.entity.use_email[0].address + return None + + def allowed_massmail_keys(self): + """returns a set of allowed email substitution keys + + The default is to return the entity's attribute list but you might + override this method to allow extra keys. For instance, a Person + class might want to return a `companyname` key. + """ + return set(rschema.type + for rschema, attrtype in self.entity.e_schema.attribute_definitions() + if attrtype.type not in ('Password', 'Bytes')) + + def as_email_context(self): + """returns the dictionary as used by the sendmail controller to + build email bodies. + + NOTE: the dictionary keys should match the list returned by the + `allowed_massmail_keys` method. + """ + return dict( (attr, getattr(self.entity, attr)) + for attr in self.allowed_massmail_keys() ) + + +class INotifiableAdapter(EntityAdapter): + __regid__ = 'INotifiable' + __select__ = is_instance('Any') + + @implements_adapter_compat('INotifiableAdapter') + def notification_references(self, view): + """used to control References field of email send on notification + for this entity. `view` is the notification view. + + Should return a list of eids which can be used to generate message + identifiers of previously sent email(s) + """ + itree = self.entity.cw_adapt_to('ITree') + if itree is not None: + return itree.path()[:-1] + return () + + +class IFTIndexableAdapter(EntityAdapter): + __regid__ = 'IFTIndexable' + __select__ = is_instance('Any') + + def fti_containers(self, _done=None): + if _done is None: + _done = set() + entity = self.entity + _done.add(entity.eid) + containers = tuple(entity.e_schema.fulltext_containers()) + if containers: + for rschema, target in containers: + if target == 'object': + targets = getattr(entity, rschema.type) + else: + targets = getattr(entity, 'reverse_%s' % rschema) + for entity in targets: + if entity.eid in _done: + continue + for container in entity.cw_adapt_to('IFTIndexable').fti_containers(_done): + yield container + yielded = True + else: + yield entity + + # weight in ABCD + entity_weight = 1.0 + attr_weight = {} + + def get_words(self): + """used by the full text indexer to get words to index + + this method should only be used on the repository side since it depends + on the logilab.database package + + :rtype: list + :return: the list of indexable word of this entity + """ + from logilab.database.fti import tokenize + # take care to cases where we're modyfying the schema + entity = self.entity + pending = self._cw.transaction_data.setdefault('pendingrdefs', set()) + words = {} + for rschema in entity.e_schema.indexable_attributes(): + if (entity.e_schema, rschema) in pending: + continue + weight = self.attr_weight.get(rschema, 'C') + try: + value = entity.printable_value(rschema, format='text/plain') + except TransformError: + continue + except: + self.exception("can't add value of %s to text index for entity %s", + rschema, entity.eid) + continue + if value: + words.setdefault(weight, []).extend(tokenize(value)) + for rschema, role in entity.e_schema.fulltext_relations(): + if role == 'subject': + for entity_ in getattr(entity, rschema.type): + merge_weight_dict(words, entity_.cw_adapt_to('IFTIndexable').get_words()) + else: # if role == 'object': + for entity_ in getattr(entity, 'reverse_%s' % rschema.type): + merge_weight_dict(words, entity_.cw_adapt_to('IFTIndexable').get_words()) + return words + +def merge_weight_dict(maindict, newdict): + for weight, words in newdict.iteritems(): + maindict.setdefault(weight, []).extend(words) + +class IDownloadableAdapter(EntityAdapter): + """interface for downloadable entities""" + __regid__ = 'IDownloadable' + __select__ = implements(IDownloadable, warn=False) # XXX for bw compat, else should be abstract + + @implements_adapter_compat('IDownloadable') + def download_url(self, **kwargs): # XXX not really part of this interface + """return an url to download entity's content""" + raise NotImplementedError + @implements_adapter_compat('IDownloadable') + def download_content_type(self): + """return MIME type of the downloadable content""" + raise NotImplementedError + @implements_adapter_compat('IDownloadable') + def download_encoding(self): + """return encoding of the downloadable content""" + raise NotImplementedError + @implements_adapter_compat('IDownloadable') + def download_file_name(self): + """return file name of the downloadable content""" + raise NotImplementedError + @implements_adapter_compat('IDownloadable') + def download_data(self): + """return actual data of the downloadable content""" + raise NotImplementedError + + +class ITreeAdapter(EntityAdapter): + """This adapter has to be overriden to be configured using the + tree_relation, child_role and parent_role class attributes to benefit from + this default implementation. + + This adapter provides a tree interface. It has to be overriden to be + configured using the tree_relation, child_role and parent_role class + attributes to benefit from this default implementation. + + This class provides the following methods: + + .. automethod: iterparents + .. automethod: iterchildren + .. automethod: prefixiter + + .. automethod: is_leaf + .. automethod: is_root + + .. automethod: root + .. automethod: parent + .. automethod: children + .. automethod: different_type_children + .. automethod: same_type_children + .. automethod: children_rql + .. automethod: path + """ + __regid__ = 'ITree' + __select__ = implements(ITree, warn=False) # XXX for bw compat, else should be abstract + + child_role = 'subject' + parent_role = 'object' + + @property + def tree_relation(self): + warn('[3.9] tree_attribute is deprecated, define tree_relation on a custom ' + 'ITree for %s instead' % (self.entity.__class__), + DeprecationWarning) + return self.entity.tree_attribute + + # XXX should be removed from the public interface + @implements_adapter_compat('ITree') + def children_rql(self): + """Returns RQL to get the children of the entity.""" + return self.entity.cw_related_rql(self.tree_relation, self.parent_role) + + @implements_adapter_compat('ITree') + def different_type_children(self, entities=True): + """Return children entities of different type as this entity. + + According to the `entities` parameter, return entity objects or the + equivalent result set. + """ + res = self.entity.related(self.tree_relation, self.parent_role, + entities=entities) + eschema = self.entity.e_schema + if entities: + return [e for e in res if e.e_schema != eschema] + return res.filtered_rset(lambda x: x.e_schema != eschema, self.entity.cw_col) + + @implements_adapter_compat('ITree') + def same_type_children(self, entities=True): + """Return children entities of the same type as this entity. + + According to the `entities` parameter, return entity objects or the + equivalent result set. + """ + res = self.entity.related(self.tree_relation, self.parent_role, + entities=entities) + eschema = self.entity.e_schema + if entities: + return [e for e in res if e.e_schema == eschema] + return res.filtered_rset(lambda x: x.e_schema is eschema, self.entity.cw_col) + + @implements_adapter_compat('ITree') + def is_leaf(self): + """Returns True if the entity does not have any children.""" + return len(self.children()) == 0 + + @implements_adapter_compat('ITree') + def is_root(self): + """Returns true if the entity is root of the tree (e.g. has no parent). + """ + return self.parent() is None + + @implements_adapter_compat('ITree') + def root(self): + """Return the root entity of the tree.""" + return self._cw.entity_from_eid(self.path()[0]) + + @implements_adapter_compat('ITree') + def parent(self): + """Returns the parent entity if any, else None (e.g. if we are on the + root). + """ + try: + return self.entity.related(self.tree_relation, self.child_role, + entities=True)[0] + except (KeyError, IndexError): + return None + + @implements_adapter_compat('ITree') + def children(self, entities=True, sametype=False): + """Return children entities. + + According to the `entities` parameter, return entity objects or the + equivalent result set. + """ + if sametype: + return self.same_type_children(entities) + else: + return self.entity.related(self.tree_relation, self.parent_role, + entities=entities) + + @implements_adapter_compat('ITree') + def iterparents(self, strict=True): + """Return an iterator on the parents of the entity.""" + def _uptoroot(self): + curr = self + while True: + curr = curr.parent() + if curr is None: + break + yield curr + curr = curr.cw_adapt_to('ITree') + if not strict: + return chain([self.entity], _uptoroot(self)) + return _uptoroot(self) + + @implements_adapter_compat('ITree') + def iterchildren(self, _done=None): + """Return an iterator over the item's children.""" + if _done is None: + _done = set() + for child in self.children(): + if child.eid in _done: + self.error('loop in %s tree: %s', child.__regid__.lower(), child) + continue + yield child + _done.add(child.eid) + + @implements_adapter_compat('ITree') + def prefixiter(self, _done=None): + """Return an iterator over the item's descendants in a prefixed order.""" + if _done is None: + _done = set() + if self.entity.eid in _done: + return + _done.add(self.entity.eid) + yield self.entity + for child in self.same_type_children(): + for entity in child.cw_adapt_to('ITree').prefixiter(_done): + yield entity + + @cached + @implements_adapter_compat('ITree') + def path(self): + """Returns the list of eids from the root object to this object.""" + path = [] + adapter = self + entity = adapter.entity + while entity is not None: + if entity.eid in path: + self.error('loop in %s tree: %s', entity.__regid__.lower(), entity) + break + path.append(entity.eid) + try: + # check we are not jumping to another tree + if (adapter.tree_relation != self.tree_relation or + adapter.child_role != self.child_role): + break + entity = adapter.parent() + adapter = entity.cw_adapt_to('ITree') + except AttributeError: + break + path.reverse() + return path + + +class IProgressAdapter(EntityAdapter): + """something that has a cost, a state and a progression. + + You should at least override progress_info an in_progress methods on concret + implementations. + """ + __regid__ = 'IProgress' + __select__ = implements(IProgress, warn=False) # XXX for bw compat, should be abstract + + @property + @implements_adapter_compat('IProgress') + def cost(self): + """the total cost""" + return self.progress_info()['estimated'] + + @property + @implements_adapter_compat('IProgress') + def revised_cost(self): + return self.progress_info().get('estimatedcorrected', self.cost) + + @property + @implements_adapter_compat('IProgress') + def done(self): + """what is already done""" + return self.progress_info()['done'] + + @property + @implements_adapter_compat('IProgress') + def todo(self): + """what remains to be done""" + return self.progress_info()['todo'] + + @implements_adapter_compat('IProgress') + def progress_info(self): + """returns a dictionary describing progress/estimated cost of the + version. + + - mandatory keys are (''estimated', 'done', 'todo') + + - optional keys are ('notestimated', 'notestimatedcorrected', + 'estimatedcorrected') + + 'noestimated' and 'notestimatedcorrected' should default to 0 + 'estimatedcorrected' should default to 'estimated' + """ + raise NotImplementedError + + @implements_adapter_compat('IProgress') + def finished(self): + """returns True if status is finished""" + return not self.in_progress() + + @implements_adapter_compat('IProgress') + def in_progress(self): + """returns True if status is not finished""" + raise NotImplementedError + + @implements_adapter_compat('IProgress') + def progress(self): + """returns the % progress of the task item""" + try: + return 100. * self.done / self.revised_cost + except ZeroDivisionError: + # total cost is 0 : if everything was estimated, task is completed + if self.progress_info().get('notestimated'): + return 0. + return 100 + + @implements_adapter_compat('IProgress') + def progress_class(self): + return '' + + +class IMileStoneAdapter(IProgressAdapter): + __regid__ = 'IMileStone' + __select__ = implements(IMileStone, warn=False) # XXX for bw compat, should be abstract + + parent_type = None # specify main task's type + + @implements_adapter_compat('IMileStone') + def get_main_task(self): + """returns the main ITask entity""" + raise NotImplementedError + + @implements_adapter_compat('IMileStone') + def initial_prevision_date(self): + """returns the initial expected end of the milestone""" + raise NotImplementedError + + @implements_adapter_compat('IMileStone') + def eta_date(self): + """returns expected date of completion based on what remains + to be done + """ + raise NotImplementedError + + @implements_adapter_compat('IMileStone') + def completion_date(self): + """returns date on which the subtask has been completed""" + raise NotImplementedError + + @implements_adapter_compat('IMileStone') + def contractors(self): + """returns the list of persons supposed to work on this task""" + raise NotImplementedError diff -r f4d1d5d9ccbb -r 90f2f20367bc entities/authobjs.py --- a/entities/authobjs.py Tue Jul 27 12:36:03 2010 +0200 +++ b/entities/authobjs.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""entity classes user and group entities +"""entity classes user and group entities""" -""" __docformat__ = "restructuredtext en" from logilab.common.decorators import cached diff -r f4d1d5d9ccbb -r 90f2f20367bc entities/lib.py --- a/entities/lib.py Tue Jul 27 12:36:03 2010 +0200 +++ b/entities/lib.py Wed Nov 03 16:38:28 2010 +0100 @@ -48,13 +48,13 @@ @property def email_of(self): - return self.reverse_use_email and self.reverse_use_email[0] + return self.reverse_use_email and self.reverse_use_email[0] or None @property def prefered(self): return self.prefered_form and self.prefered_form[0] or self - @deprecated('use .prefered') + @deprecated('[3.6] use .prefered') def canonical_form(self): return self.prefered_form and self.prefered_form[0] or self @@ -89,14 +89,6 @@ return self.display_address() return super(EmailAddress, self).printable_value(attr, value, attrtype, format) - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - if self.email_of: - return self.email_of.rest_path(), {} - return super(EmailAddress, self).after_deletion_path() - class Bookmark(AnyEntity): """customized class for Bookmark entities""" @@ -133,12 +125,6 @@ except UnknownProperty: return u'' - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - return 'view', {} - class CWCache(AnyEntity): """Cache""" diff -r f4d1d5d9ccbb -r 90f2f20367bc entities/schemaobjs.py --- a/entities/schemaobjs.py Tue Jul 27 12:36:03 2010 +0200 +++ b/entities/schemaobjs.py Wed Nov 03 16:38:28 2010 +0100 @@ -115,14 +115,6 @@ scard, self.relation_type[0].name, ocard, self.to_entity[0].name) - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - if self.relation_type: - return self.relation_type[0].rest_path(), {} - return super(CWRelation, self).after_deletion_path() - @property def rtype(self): return self.relation_type[0] @@ -139,6 +131,7 @@ rschema = self._cw.vreg.schema.rschema(self.rtype.name) return rschema.rdefs[(self.stype.name, self.otype.name)] + class CWAttribute(CWRelation): __regid__ = 'CWAttribute' @@ -160,14 +153,6 @@ def dc_title(self): return '%s(%s)' % (self.cstrtype[0].name, self.value or u'') - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - if self.reverse_constrained_by: - return self.reverse_constrained_by[0].rest_path(), {} - return super(CWConstraint, self).after_deletion_path() - @property def type(self): return self.cstrtype[0].name @@ -201,14 +186,6 @@ def check_expression(self, *args, **kwargs): return self._rqlexpr().check(*args, **kwargs) - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - if self.expression_of: - return self.expression_of.rest_path(), {} - return super(RQLExpression, self).after_deletion_path() - class CWPermission(AnyEntity): __regid__ = 'CWPermission' @@ -218,12 +195,3 @@ if self.label: return '%s (%s)' % (self._cw._(self.name), self.label) return self._cw._(self.name) - - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - permissionof = getattr(self, 'reverse_require_permission', ()) - if len(permissionof) == 1: - return permissionof[0].rest_path(), {} - return super(CWPermission, self).after_deletion_path() diff -r f4d1d5d9ccbb -r 90f2f20367bc entities/test/unittest_base.py --- a/entities/test/unittest_base.py Tue Jul 27 12:36:03 2010 +0200 +++ b/entities/test/unittest_base.py Wed Nov 03 16:38:28 2010 +0100 @@ -27,7 +27,7 @@ from cubicweb.devtools.testlib import CubicWebTC from cubicweb import ValidationError -from cubicweb.interfaces import IMileStone, IWorkflowable +from cubicweb.interfaces import IMileStone, ICalendarable from cubicweb.entities import AnyEntity @@ -44,16 +44,16 @@ self.login(u'member') entity = self.request().create_entity('Bookmark', title=u"hello", path=u'project/cubicweb') self.commit() - self.assertEquals(entity.creator.eid, self.member.eid) - self.assertEquals(entity.dc_creator(), u'member') + self.assertEqual(entity.creator.eid, self.member.eid) + self.assertEqual(entity.dc_creator(), u'member') def test_type(self): - self.assertEquals(self.member.dc_type(), 'cwuser') + self.assertEqual(self.member.dc_type(), 'cwuser') def test_entity_meta_attributes(self): # XXX move to yams - self.assertEquals(self.schema['CWUser'].meta_attributes(), {}) - self.assertEquals(dict((str(k), v) for k, v in self.schema['State'].meta_attributes().iteritems()), + self.assertEqual(self.schema['CWUser'].meta_attributes(), {}) + self.assertEqual(dict((str(k), v) for k, v in self.schema['State'].meta_attributes().iteritems()), {'description_format': ('format', 'description')}) @@ -63,20 +63,20 @@ email2 = self.execute('INSERT EmailAddress X: X address "maarten@philips.com"').get_entity(0, 0) email3 = self.execute('INSERT EmailAddress X: X address "toto@logilab.fr"').get_entity(0, 0) email1.set_relations(prefered_form=email2) - self.assertEquals(email1.prefered.eid, email2.eid) - self.assertEquals(email2.prefered.eid, email2.eid) - self.assertEquals(email3.prefered.eid, email3.eid) + self.assertEqual(email1.prefered.eid, email2.eid) + self.assertEqual(email2.prefered.eid, email2.eid) + self.assertEqual(email3.prefered.eid, email3.eid) def test_mangling(self): email = self.execute('INSERT EmailAddress X: X address "maarten.ter.huurne@philips.com"').get_entity(0, 0) - self.assertEquals(email.display_address(), 'maarten.ter.huurne@philips.com') - self.assertEquals(email.printable_value('address'), 'maarten.ter.huurne@philips.com') + self.assertEqual(email.display_address(), 'maarten.ter.huurne@philips.com') + self.assertEqual(email.printable_value('address'), 'maarten.ter.huurne@philips.com') self.vreg.config.global_set_option('mangle-emails', True) - self.assertEquals(email.display_address(), 'maarten.ter.huurne at philips dot com') - self.assertEquals(email.printable_value('address'), 'maarten.ter.huurne at philips dot com') + self.assertEqual(email.display_address(), 'maarten.ter.huurne at philips dot com') + self.assertEqual(email.printable_value('address'), 'maarten.ter.huurne at philips dot com') email = self.execute('INSERT EmailAddress X: X address "syt"').get_entity(0, 0) - self.assertEquals(email.display_address(), 'syt') - self.assertEquals(email.printable_value('address'), 'syt') + self.assertEqual(email.display_address(), 'syt') + self.assertEqual(email.printable_value('address'), 'syt') class CWUserTC(BaseEntityTC): @@ -94,19 +94,19 @@ def test_dc_title_and_name(self): e = self.execute('CWUser U WHERE U login "member"').get_entity(0, 0) - self.assertEquals(e.dc_title(), 'member') - self.assertEquals(e.name(), 'member') + self.assertEqual(e.dc_title(), 'member') + self.assertEqual(e.name(), 'member') e.set_attributes(firstname=u'bouah') - self.assertEquals(e.dc_title(), 'member') - self.assertEquals(e.name(), u'bouah') + self.assertEqual(e.dc_title(), 'member') + self.assertEqual(e.name(), u'bouah') e.set_attributes(surname=u'lôt') - self.assertEquals(e.dc_title(), 'member') - self.assertEquals(e.name(), u'bouah lôt') + self.assertEqual(e.dc_title(), 'member') + self.assertEqual(e.name(), u'bouah lôt') def test_allowed_massmail_keys(self): e = self.execute('CWUser U WHERE U login "member"').get_entity(0, 0) # Bytes/Password attributes should be omited - self.assertEquals(e.allowed_massmail_keys(), + self.assertEqual(e.cw_adapt_to('IEmailable').allowed_massmail_keys(), set(('surname', 'firstname', 'login', 'last_login_time', 'creation_date', 'modification_date', 'cwuri', 'eid')) ) @@ -115,8 +115,9 @@ class InterfaceTC(CubicWebTC): def test_nonregr_subclasses_and_mixins_interfaces(self): + from cubicweb.entities.wfobjs import WorkflowableMixIn + WorkflowableMixIn.__implements__ = (ICalendarable,) CWUser = self.vreg['etypes'].etype_class('CWUser') - self.failUnless(implements(CWUser, IWorkflowable)) class MyUser(CWUser): __implements__ = (IMileStone,) self.vreg._loadedmods[__name__] = {} @@ -126,10 +127,10 @@ # a copy is done systematically self.failUnless(issubclass(MyUser_, MyUser)) self.failUnless(implements(MyUser_, IMileStone)) - self.failUnless(implements(MyUser_, IWorkflowable)) + self.failUnless(implements(MyUser_, ICalendarable)) # original class should not have beed modified, only the copy self.failUnless(implements(MyUser, IMileStone)) - self.failIf(implements(MyUser, IWorkflowable)) + self.failIf(implements(MyUser, ICalendarable)) class SpecializedEntityClassesTC(CubicWebTC): @@ -143,7 +144,7 @@ # no specific class for Subdivisions, the default one should be selected eclass = self.select_eclass('SubDivision') self.failUnless(eclass.__autogenerated__) - #self.assertEquals(eclass.__bases__, (AnyEntity,)) + #self.assertEqual(eclass.__bases__, (AnyEntity,)) # build class from most generic to most specific and make # sure the most specific is always selected self.vreg._loadedmods[__name__] = {} @@ -155,12 +156,12 @@ self.failUnless(eclass.__autogenerated__) self.failIf(eclass is Foo) if etype == 'SubDivision': - self.assertEquals(eclass.__bases__, (Foo,)) + self.assertEqual(eclass.__bases__, (Foo,)) else: - self.assertEquals(eclass.__bases__[0].__bases__, (Foo,)) + self.assertEqual(eclass.__bases__[0].__bases__, (Foo,)) # check Division eclass is still selected for plain Division entities eclass = self.select_eclass('Division') - self.assertEquals(eclass.__regid__, 'Division') + self.assertEqual(eclass.__regid__, 'Division') if __name__ == '__main__': unittest_main() diff -r f4d1d5d9ccbb -r 90f2f20367bc entities/test/unittest_wfobjs.py --- a/entities/test/unittest_wfobjs.py Tue Jul 27 12:36:03 2010 +0200 +++ b/entities/test/unittest_wfobjs.py Wed Nov 03 16:38:28 2010 +0100 @@ -43,12 +43,12 @@ wf = add_wf(self, 'Company') foo = wf.add_state(u'foo', initial=True) bar = wf.add_state(u'bar') - self.assertEquals(wf.state_by_name('bar').eid, bar.eid) - self.assertEquals(wf.state_by_name('barrr'), None) + self.assertEqual(wf.state_by_name('bar').eid, bar.eid) + self.assertEqual(wf.state_by_name('barrr'), None) baz = wf.add_transition(u'baz', (foo,), bar, ('managers',)) - self.assertEquals(wf.transition_by_name('baz').eid, baz.eid) - self.assertEquals(len(baz.require_group), 1) - self.assertEquals(baz.require_group[0].name, 'managers') + self.assertEqual(wf.transition_by_name('baz').eid, baz.eid) + self.assertEqual(len(baz.require_group), 1) + self.assertEqual(baz.require_group[0].name, 'managers') def test_duplicated_state(self): wf = add_wf(self, 'Company') @@ -56,7 +56,7 @@ self.commit() wf.add_state(u'foo') ex = self.assertRaises(ValidationError, self.commit) - self.assertEquals(ex.errors, {'name-subject': 'workflow already have a state of that name'}) + self.assertEqual(ex.errors, {'name-subject': 'workflow already have a state of that name'}) # no pb if not in the same workflow wf2 = add_wf(self, 'Company') foo = wf2.add_state(u'foo', initial=True) @@ -66,7 +66,7 @@ self.commit() bar.set_attributes(name=u'foo') ex = self.assertRaises(ValidationError, self.commit) - self.assertEquals(ex.errors, {'name-subject': 'workflow already have a state of that name'}) + self.assertEqual(ex.errors, {'name-subject': 'workflow already have a state of that name'}) def test_duplicated_transition(self): wf = add_wf(self, 'Company') @@ -75,7 +75,7 @@ wf.add_transition(u'baz', (foo,), bar, ('managers',)) wf.add_transition(u'baz', (bar,), foo) ex = self.assertRaises(ValidationError, self.commit) - self.assertEquals(ex.errors, {'name-subject': 'workflow already have a transition of that name'}) + self.assertEqual(ex.errors, {'name-subject': 'workflow already have a transition of that name'}) # no pb if not in the same workflow wf2 = add_wf(self, 'Company') foo = wf.add_state(u'foo', initial=True) @@ -87,7 +87,7 @@ self.commit() biz.set_attributes(name=u'baz') ex = self.assertRaises(ValidationError, self.commit) - self.assertEquals(ex.errors, {'name-subject': 'workflow already have a transition of that name'}) + self.assertEqual(ex.errors, {'name-subject': 'workflow already have a transition of that name'}) class WorkflowTC(CubicWebTC): @@ -95,51 +95,55 @@ def setup_database(self): rschema = self.schema['in_state'] for rdef in rschema.rdefs.values(): - self.assertEquals(rdef.cardinality, '1*') + self.assertEqual(rdef.cardinality, '1*') self.member = self.create_user('member') def test_workflow_base(self): e = self.create_user('toto') - self.assertEquals(e.state, 'activated') - e.change_state('deactivated', u'deactivate 1') + iworkflowable = e.cw_adapt_to('IWorkflowable') + self.assertEqual(iworkflowable.state, 'activated') + iworkflowable.change_state('deactivated', u'deactivate 1') self.commit() - e.change_state('activated', u'activate 1') + iworkflowable.change_state('activated', u'activate 1') self.commit() - e.change_state('deactivated', u'deactivate 2') + iworkflowable.change_state('deactivated', u'deactivate 2') self.commit() - e.clear_related_cache('wf_info_for', 'object') - self.assertEquals([tr.comment for tr in e.reverse_wf_info_for], + e.cw_clear_relation_cache('wf_info_for', 'object') + self.assertEqual([tr.comment for tr in e.reverse_wf_info_for], ['deactivate 1', 'activate 1', 'deactivate 2']) - self.assertEquals(e.latest_trinfo().comment, 'deactivate 2') + self.assertEqual(iworkflowable.latest_trinfo().comment, 'deactivate 2') def test_possible_transitions(self): user = self.execute('CWUser X').get_entity(0, 0) - trs = list(user.possible_transitions()) - self.assertEquals(len(trs), 1) - self.assertEquals(trs[0].name, u'deactivate') - self.assertEquals(trs[0].destination(None).name, u'deactivated') + iworkflowable = user.cw_adapt_to('IWorkflowable') + trs = list(iworkflowable.possible_transitions()) + self.assertEqual(len(trs), 1) + self.assertEqual(trs[0].name, u'deactivate') + self.assertEqual(trs[0].destination(None).name, u'deactivated') # test a std user get no possible transition cnx = self.login('member') # fetch the entity using the new session - trs = list(cnx.user().possible_transitions()) - self.assertEquals(len(trs), 0) + trs = list(cnx.user().cw_adapt_to('IWorkflowable').possible_transitions()) + self.assertEqual(len(trs), 0) def _test_manager_deactivate(self, user): - user.clear_related_cache('in_state', 'subject') - self.assertEquals(len(user.in_state), 1) - self.assertEquals(user.state, 'deactivated') - trinfo = user.latest_trinfo() - self.assertEquals(trinfo.previous_state.name, 'activated') - self.assertEquals(trinfo.new_state.name, 'deactivated') - self.assertEquals(trinfo.comment, 'deactivate user') - self.assertEquals(trinfo.comment_format, 'text/plain') + iworkflowable = user.cw_adapt_to('IWorkflowable') + user.cw_clear_relation_cache('in_state', 'subject') + self.assertEqual(len(user.in_state), 1) + self.assertEqual(iworkflowable.state, 'deactivated') + trinfo = iworkflowable.latest_trinfo() + self.assertEqual(trinfo.previous_state.name, 'activated') + self.assertEqual(trinfo.new_state.name, 'deactivated') + self.assertEqual(trinfo.comment, 'deactivate user') + self.assertEqual(trinfo.comment_format, 'text/plain') return trinfo def test_change_state(self): user = self.user() - user.change_state('deactivated', comment=u'deactivate user') + iworkflowable = user.cw_adapt_to('IWorkflowable') + iworkflowable.change_state('deactivated', comment=u'deactivate user') trinfo = self._test_manager_deactivate(user) - self.assertEquals(trinfo.transition, None) + self.assertEqual(trinfo.transition, None) def test_set_in_state_bad_wf(self): wf = add_wf(self, 'CWUser') @@ -149,38 +153,41 @@ ex = self.assertRaises(ValidationError, self.session.execute, 'SET X in_state S WHERE X eid %(x)s, S eid %(s)s', {'x': self.user().eid, 's': s.eid}) - self.assertEquals(ex.errors, {'in_state-subject': "state doesn't belong to entity's workflow. " + self.assertEqual(ex.errors, {'in_state-subject': "state doesn't belong to entity's workflow. " "You may want to set a custom workflow for this entity first."}) def test_fire_transition(self): user = self.user() - user.fire_transition('deactivate', comment=u'deactivate user') + iworkflowable = user.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate', comment=u'deactivate user') user.clear_all_caches() - self.assertEquals(user.state, 'deactivated') + self.assertEqual(iworkflowable.state, 'deactivated') self._test_manager_deactivate(user) trinfo = self._test_manager_deactivate(user) - self.assertEquals(trinfo.transition.name, 'deactivate') + self.assertEqual(trinfo.transition.name, 'deactivate') def test_goback_transition(self): - wf = self.session.user.current_workflow + wf = self.session.user.cw_adapt_to('IWorkflowable').current_workflow asleep = wf.add_state('asleep') - wf.add_transition('rest', (wf.state_by_name('activated'), wf.state_by_name('deactivated')), - asleep) + wf.add_transition('rest', (wf.state_by_name('activated'), + wf.state_by_name('deactivated')), + asleep) wf.add_transition('wake up', asleep) user = self.create_user('stduser') - user.fire_transition('rest') + iworkflowable = user.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('rest') self.commit() - user.fire_transition('wake up') + iworkflowable.fire_transition('wake up') self.commit() - self.assertEquals(user.state, 'activated') - user.fire_transition('deactivate') + self.assertEqual(iworkflowable.state, 'activated') + iworkflowable.fire_transition('deactivate') self.commit() - user.fire_transition('rest') + iworkflowable.fire_transition('rest') self.commit() - user.fire_transition('wake up') + iworkflowable.fire_transition('wake up') self.commit() user.clear_all_caches() - self.assertEquals(user.state, 'deactivated') + self.assertEqual(iworkflowable.state, 'deactivated') # XXX test managers can change state without matching transition @@ -189,19 +196,19 @@ self.create_user('tutu') cnx = self.login('tutu') req = self.request() - member = req.entity_from_eid(self.member.eid) + iworkflowable = req.entity_from_eid(self.member.eid).cw_adapt_to('IWorkflowable') ex = self.assertRaises(ValidationError, - member.fire_transition, 'deactivate') - self.assertEquals(ex.errors, {'by_transition-subject': "transition may not be fired"}) + iworkflowable.fire_transition, 'deactivate') + self.assertEqual(ex.errors, {'by_transition-subject': "transition may not be fired"}) cnx.close() cnx = self.login('member') req = self.request() - member = req.entity_from_eid(self.member.eid) - member.fire_transition('deactivate') + iworkflowable = req.entity_from_eid(self.member.eid).cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') cnx.commit() ex = self.assertRaises(ValidationError, - member.fire_transition, 'activate') - self.assertEquals(ex.errors, {'by_transition-subject': "transition may not be fired"}) + iworkflowable.fire_transition, 'activate') + self.assertEqual(ex.errors, {'by_transition-subject': "transition may not be fired"}) def test_fire_transition_owned_by(self): self.execute('INSERT RQLExpression X: X exprtype "ERQLExpression", ' @@ -248,45 +255,46 @@ state3 = mwf.add_state(u'state3') swftr1 = mwf.add_wftransition(u'swftr1', swf, state1, [(swfstate2, state2), (swfstate3, state3)]) - self.assertEquals(swftr1.destination(None).eid, swfstate1.eid) + self.assertEqual(swftr1.destination(None).eid, swfstate1.eid) # workflows built, begin test - self.group = self.request().create_entity('CWGroup', name=u'grp1') + group = self.request().create_entity('CWGroup', name=u'grp1') self.commit() - self.assertEquals(self.group.current_state.eid, state1.eid) - self.assertEquals(self.group.current_workflow.eid, mwf.eid) - self.assertEquals(self.group.main_workflow.eid, mwf.eid) - self.assertEquals(self.group.subworkflow_input_transition(), None) - self.group.fire_transition('swftr1', u'go') + iworkflowable = group.cw_adapt_to('IWorkflowable') + self.assertEqual(iworkflowable.current_state.eid, state1.eid) + self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid) + self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid) + self.assertEqual(iworkflowable.subworkflow_input_transition(), None) + iworkflowable.fire_transition('swftr1', u'go') self.commit() - self.group.clear_all_caches() - self.assertEquals(self.group.current_state.eid, swfstate1.eid) - self.assertEquals(self.group.current_workflow.eid, swf.eid) - self.assertEquals(self.group.main_workflow.eid, mwf.eid) - self.assertEquals(self.group.subworkflow_input_transition().eid, swftr1.eid) - self.group.fire_transition('tr1', u'go') + group.clear_all_caches() + self.assertEqual(iworkflowable.current_state.eid, swfstate1.eid) + self.assertEqual(iworkflowable.current_workflow.eid, swf.eid) + self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid) + self.assertEqual(iworkflowable.subworkflow_input_transition().eid, swftr1.eid) + iworkflowable.fire_transition('tr1', u'go') self.commit() - self.group.clear_all_caches() - self.assertEquals(self.group.current_state.eid, state2.eid) - self.assertEquals(self.group.current_workflow.eid, mwf.eid) - self.assertEquals(self.group.main_workflow.eid, mwf.eid) - self.assertEquals(self.group.subworkflow_input_transition(), None) + group.clear_all_caches() + self.assertEqual(iworkflowable.current_state.eid, state2.eid) + self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid) + self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid) + self.assertEqual(iworkflowable.subworkflow_input_transition(), None) # force back to swfstate1 is impossible since we can't any more find # subworkflow input transition ex = self.assertRaises(ValidationError, - self.group.change_state, swfstate1, u'gadget') - self.assertEquals(ex.errors, {'to_state-subject': "state doesn't belong to entity's workflow"}) + iworkflowable.change_state, swfstate1, u'gadget') + self.assertEqual(ex.errors, {'to_state-subject': "state doesn't belong to entity's workflow"}) self.rollback() # force back to state1 - self.group.change_state('state1', u'gadget') - self.group.fire_transition('swftr1', u'au') - self.group.clear_all_caches() - self.group.fire_transition('tr2', u'chapeau') + iworkflowable.change_state('state1', u'gadget') + iworkflowable.fire_transition('swftr1', u'au') + group.clear_all_caches() + iworkflowable.fire_transition('tr2', u'chapeau') self.commit() - self.group.clear_all_caches() - self.assertEquals(self.group.current_state.eid, state3.eid) - self.assertEquals(self.group.current_workflow.eid, mwf.eid) - self.assertEquals(self.group.main_workflow.eid, mwf.eid) - self.assertListEquals(parse_hist(self.group.workflow_history), + group.clear_all_caches() + self.assertEqual(iworkflowable.current_state.eid, state3.eid) + self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid) + self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid) + self.assertListEqual(parse_hist(iworkflowable.workflow_history), [('state1', 'swfstate1', 'swftr1', 'go'), ('swfstate1', 'swfstate2', 'tr1', 'go'), ('swfstate2', 'state2', 'swftr1', 'exiting from subworkflow subworkflow'), @@ -310,7 +318,7 @@ mwf.add_wftransition(u'swftr1', swf, state1, [(swfstate2, state2), (swfstate2, state3)]) ex = self.assertRaises(ValidationError, self.commit) - self.assertEquals(ex.errors, {'subworkflow_exit-subject': u"can't have multiple exits on the same state"}) + self.assertEqual(ex.errors, {'subworkflow_exit-subject': u"can't have multiple exits on the same state"}) def test_swf_fire_in_a_row(self): # sub-workflow @@ -337,8 +345,9 @@ self.commit() group = self.request().create_entity('CWGroup', name=u'grp1') self.commit() + iworkflowable = group.cw_adapt_to('IWorkflowable') for trans in ('identify', 'release', 'close'): - group.fire_transition(trans) + iworkflowable.fire_transition(trans) self.commit() @@ -362,6 +371,7 @@ self.commit() group = self.request().create_entity('CWGroup', name=u'grp1') self.commit() + iworkflowable = group.cw_adapt_to('IWorkflowable') for trans, nextstate in (('identify', 'xsigning'), ('xabort', 'created'), ('identify', 'xsigning'), @@ -369,10 +379,10 @@ ('release', 'xsigning'), ('xabort', 'identified') ): - group.fire_transition(trans) + iworkflowable.fire_transition(trans) self.commit() group.clear_all_caches() - self.assertEquals(group.state, nextstate) + self.assertEqual(iworkflowable.state, nextstate) class CustomWorkflowTC(CubicWebTC): @@ -389,41 +399,44 @@ self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', {'wf': wf.eid, 'x': self.member.eid}) self.member.clear_all_caches() - self.assertEquals(self.member.state, 'activated')# no change before commit + iworkflowable = self.member.cw_adapt_to('IWorkflowable') + self.assertEqual(iworkflowable.state, 'activated')# no change before commit self.commit() self.member.clear_all_caches() - self.assertEquals(self.member.current_workflow.eid, wf.eid) - self.assertEquals(self.member.state, 'asleep') - self.assertEquals(self.member.workflow_history, ()) + self.assertEqual(iworkflowable.current_workflow.eid, wf.eid) + self.assertEqual(iworkflowable.state, 'asleep') + self.assertEqual(iworkflowable.workflow_history, ()) def test_custom_wf_replace_state_keep_history(self): """member in inital state with some history, state is redirected and state change is recorded to history """ - self.member.fire_transition('deactivate') - self.member.fire_transition('activate') + iworkflowable = self.member.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') + iworkflowable.fire_transition('activate') wf = add_wf(self, 'CWUser') wf.add_state('asleep', initial=True) self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', {'wf': wf.eid, 'x': self.member.eid}) self.commit() self.member.clear_all_caches() - self.assertEquals(self.member.current_workflow.eid, wf.eid) - self.assertEquals(self.member.state, 'asleep') - self.assertEquals(parse_hist(self.member.workflow_history), + self.assertEqual(iworkflowable.current_workflow.eid, wf.eid) + self.assertEqual(iworkflowable.state, 'asleep') + self.assertEqual(parse_hist(iworkflowable.workflow_history), [('activated', 'deactivated', 'deactivate', None), ('deactivated', 'activated', 'activate', None), ('activated', 'asleep', None, 'workflow changed to "CWUser"')]) def test_custom_wf_no_initial_state(self): """try to set a custom workflow which has no initial state""" - self.member.fire_transition('deactivate') + iworkflowable = self.member.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') wf = add_wf(self, 'CWUser') wf.add_state('asleep') self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', {'wf': wf.eid, 'x': self.member.eid}) ex = self.assertRaises(ValidationError, self.commit) - self.assertEquals(ex.errors, {'custom_workflow-subject': u'workflow has no initial state'}) + self.assertEqual(ex.errors, {'custom_workflow-subject': u'workflow has no initial state'}) def test_custom_wf_bad_etype(self): """try to set a custom workflow which doesn't apply to entity type""" @@ -432,13 +445,14 @@ self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', {'wf': wf.eid, 'x': self.member.eid}) ex = self.assertRaises(ValidationError, self.commit) - self.assertEquals(ex.errors, {'custom_workflow-subject': u"workflow isn't a workflow for this type"}) + self.assertEqual(ex.errors, {'custom_workflow-subject': u"workflow isn't a workflow for this type"}) def test_del_custom_wf(self): """member in some state shared by the new workflow, nothing has to be done """ - self.member.fire_transition('deactivate') + iworkflowable = self.member.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') wf = add_wf(self, 'CWUser') wf.add_state('asleep', initial=True) self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', @@ -447,12 +461,12 @@ self.execute('DELETE X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', {'wf': wf.eid, 'x': self.member.eid}) self.member.clear_all_caches() - self.assertEquals(self.member.state, 'asleep')# no change before commit + self.assertEqual(iworkflowable.state, 'asleep')# no change before commit self.commit() self.member.clear_all_caches() - self.assertEquals(self.member.current_workflow.name, "default user workflow") - self.assertEquals(self.member.state, 'activated') - self.assertEquals(parse_hist(self.member.workflow_history), + self.assertEqual(iworkflowable.current_workflow.name, "default user workflow") + self.assertEqual(iworkflowable.state, 'activated') + self.assertEqual(parse_hist(iworkflowable.workflow_history), [('activated', 'deactivated', 'deactivate', None), ('deactivated', 'asleep', None, 'workflow changed to "CWUser"'), ('asleep', 'activated', None, 'workflow changed to "default user workflow"'),]) @@ -473,28 +487,29 @@ def test_auto_transition_fired(self): wf = self.setup_custom_wf() user = self.create_user('member') + iworkflowable = user.cw_adapt_to('IWorkflowable') self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', {'wf': wf.eid, 'x': user.eid}) self.commit() user.clear_all_caches() - self.assertEquals(user.state, 'asleep') - self.assertEquals([t.name for t in user.possible_transitions()], + self.assertEqual(iworkflowable.state, 'asleep') + self.assertEqual([t.name for t in iworkflowable.possible_transitions()], ['rest']) - user.fire_transition('rest') + iworkflowable.fire_transition('rest') self.commit() user.clear_all_caches() - self.assertEquals(user.state, 'asleep') - self.assertEquals([t.name for t in user.possible_transitions()], + self.assertEqual(iworkflowable.state, 'asleep') + self.assertEqual([t.name for t in iworkflowable.possible_transitions()], ['rest']) - self.assertEquals(parse_hist(user.workflow_history), + self.assertEqual(parse_hist(iworkflowable.workflow_history), [('asleep', 'asleep', 'rest', None)]) user.set_attributes(surname=u'toto') # fulfill condition self.commit() - user.fire_transition('rest') + iworkflowable.fire_transition('rest') self.commit() user.clear_all_caches() - self.assertEquals(user.state, 'dead') - self.assertEquals(parse_hist(user.workflow_history), + self.assertEqual(iworkflowable.state, 'dead') + self.assertEqual(parse_hist(iworkflowable.workflow_history), [('asleep', 'asleep', 'rest', None), ('asleep', 'asleep', 'rest', None), ('asleep', 'dead', 'sick', None),]) @@ -505,7 +520,8 @@ self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', {'wf': wf.eid, 'x': user.eid}) self.commit() - self.assertEquals(user.state, 'dead') + iworkflowable = user.cw_adapt_to('IWorkflowable') + self.assertEqual(iworkflowable.state, 'dead') def test_auto_transition_initial_state_fired(self): wf = self.execute('Any WF WHERE ET default_workflow WF, ' @@ -517,14 +533,15 @@ self.commit() user = self.create_user('member', surname=u'toto') self.commit() - self.assertEquals(user.state, 'dead') + iworkflowable = user.cw_adapt_to('IWorkflowable') + self.assertEqual(iworkflowable.state, 'dead') class WorkflowHooksTC(CubicWebTC): def setUp(self): CubicWebTC.setUp(self) - self.wf = self.session.user.current_workflow + self.wf = self.session.user.cw_adapt_to('IWorkflowable').current_workflow self.session.set_pool() self.s_activated = self.wf.state_by_name('activated').eid self.s_deactivated = self.wf.state_by_name('deactivated').eid @@ -538,7 +555,7 @@ self.commit() initialstate = self.execute('Any N WHERE S name N, X in_state S, X eid %(x)s', {'x' : ueid})[0][0] - self.assertEquals(initialstate, u'activated') + self.assertEqual(initialstate, u'activated') # give access to users group on the user's wf transitions # so we can test wf enforcing on euser (managers don't have anymore this # enforcement @@ -572,18 +589,20 @@ def test_transition_checking1(self): cnx = self.login('stduser') user = cnx.user(self.session) + iworkflowable = user.cw_adapt_to('IWorkflowable') ex = self.assertRaises(ValidationError, - user.fire_transition, 'activate') - self.assertEquals(self._cleanup_msg(ex.errors['by_transition-subject']), + iworkflowable.fire_transition, 'activate') + self.assertEqual(self._cleanup_msg(ex.errors['by_transition-subject']), u"transition isn't allowed from") cnx.close() def test_transition_checking2(self): cnx = self.login('stduser') user = cnx.user(self.session) + iworkflowable = user.cw_adapt_to('IWorkflowable') ex = self.assertRaises(ValidationError, - user.fire_transition, 'dummy') - self.assertEquals(self._cleanup_msg(ex.errors['by_transition-subject']), + iworkflowable.fire_transition, 'dummy') + self.assertEqual(self._cleanup_msg(ex.errors['by_transition-subject']), u"transition isn't allowed from") cnx.close() @@ -591,15 +610,18 @@ cnx = self.login('stduser') session = self.session user = cnx.user(session) - user.fire_transition('deactivate') + iworkflowable = user.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') cnx.commit() session.set_pool() ex = self.assertRaises(ValidationError, - user.fire_transition, 'deactivate') - self.assertEquals(self._cleanup_msg(ex.errors['by_transition-subject']), + iworkflowable.fire_transition, 'deactivate') + self.assertEqual(self._cleanup_msg(ex.errors['by_transition-subject']), u"transition isn't allowed from") + cnx.rollback() + session.set_pool() # get back now - user.fire_transition('activate') + iworkflowable.fire_transition('activate') cnx.commit() cnx.close() diff -r f4d1d5d9ccbb -r 90f2f20367bc entities/wfobjs.py --- a/entities/wfobjs.py Tue Jul 27 12:36:03 2010 +0200 +++ b/entities/wfobjs.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,13 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""workflow definition and history related entities +"""workflow handling: +* entity types defining workflow (Workflow, State, Transition...) +* workflow history (TrInfo) +* adapter for workflowable entities (IWorkflowableAdapter) """ + __docformat__ = "restructuredtext en" from warnings import warn @@ -27,7 +31,8 @@ from logilab.common.compat import any from cubicweb.entities import AnyEntity, fetch_config -from cubicweb.interfaces import IWorkflowable +from cubicweb.view import EntityAdapter +from cubicweb.selectors import relation_possible from cubicweb.mixins import MI_REL_TRIGGERS class WorkflowException(Exception): pass @@ -47,15 +52,6 @@ return any(et for et in self.reverse_default_workflow if et.name == etype) - # XXX define parent() instead? what if workflow of multiple types? - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - if self.workflow_of: - return self.workflow_of[0].rest_path(), {'vid': 'workflow'} - return super(Workflow, self).after_deletion_path() - def iter_workflows(self, _done=None): """return an iterator on actual workflows, eg this workflow and its subworkflows @@ -177,7 +173,7 @@ {'os': todelstate.eid, 'ns': replacement.eid}) execute('SET X to_state NS WHERE X to_state OS, OS eid %(os)s, NS eid %(ns)s', {'os': todelstate.eid, 'ns': replacement.eid}) - todelstate.delete() + todelstate.cw_delete() class BaseTransition(AnyEntity): @@ -226,14 +222,6 @@ return False return True - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - if self.transition_of: - return self.transition_of[0].rest_path(), {} - return super(BaseTransition, self).after_deletion_path() - def set_permissions(self, requiredgroups=(), conditions=(), reset=True): """set or add (if `reset` is False) groups and conditions for this transition @@ -277,7 +265,7 @@ try: return self.destination_state[0] except IndexError: - return entity.latest_trinfo().previous_state + return entity.cw_adapt_to('IWorkflowable').latest_trinfo().previous_state def potential_destinations(self): try: @@ -288,9 +276,6 @@ for previousstate in tr.reverse_allowed_transition: yield previousstate - def parent(self): - return self.workflow - class WorkflowTransition(BaseTransition): """customized class for WorkflowTransition entities""" @@ -331,7 +316,7 @@ return None if tostateeid is None: # go back to state from which we've entered the subworkflow - return entity.subworkflow_input_trinfo().previous_state + return entity.cw_adapt_to('IWorkflowable').subworkflow_input_trinfo().previous_state return self._cw.entity_from_eid(tostateeid) @cached @@ -358,9 +343,6 @@ def destination(self): return self.destination_state and self.destination_state[0] or None - def parent(self): - return self.reverse_subworkflow_exit[0] - class State(AnyEntity): """customized class for State entities""" @@ -371,10 +353,7 @@ @property def workflow(self): # take care, may be missing in multi-sources configuration - return self.state_of and self.state_of[0] - - def parent(self): - return self.workflow + return self.state_of and self.state_of[0] or None class TrInfo(AnyEntity): @@ -399,22 +378,99 @@ def transition(self): return self.by_transition and self.by_transition[0] or None - def parent(self): - return self.for_entity - class WorkflowableMixIn(object): """base mixin providing workflow helper methods for workflowable entities. This mixin will be automatically set on class supporting the 'in_state' relation (which implies supporting 'wf_info_for' as well) """ - __implements__ = (IWorkflowable,) + + @property + @deprecated('[3.5] use printable_state') + def displayable_state(self): + return self._cw._(self.state) + @property + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').main_workflow") + def main_workflow(self): + return self.cw_adapt_to('IWorkflowable').main_workflow + @property + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').current_workflow") + def current_workflow(self): + return self.cw_adapt_to('IWorkflowable').current_workflow + @property + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').current_state") + def current_state(self): + return self.cw_adapt_to('IWorkflowable').current_state + @property + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').state") + def state(self): + return self.cw_adapt_to('IWorkflowable').state + @property + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').printable_state") + def printable_state(self): + return self.cw_adapt_to('IWorkflowable').printable_state + @property + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').workflow_history") + def workflow_history(self): + return self.cw_adapt_to('IWorkflowable').workflow_history + + @deprecated('[3.5] get transition from current workflow and use its may_be_fired method') + def can_pass_transition(self, trname): + """return the Transition instance if the current user can fire the + transition with the given name, else None + """ + tr = self.current_workflow and self.current_workflow.transition_by_name(trname) + if tr and tr.may_be_fired(self.eid): + return tr + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').cwetype_workflow()") + def cwetype_workflow(self): + return self.cw_adapt_to('IWorkflowable').main_workflow() + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').latest_trinfo()") + def latest_trinfo(self): + return self.cw_adapt_to('IWorkflowable').latest_trinfo() + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').possible_transitions()") + def possible_transitions(self, type='normal'): + return self.cw_adapt_to('IWorkflowable').possible_transitions(type) + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').fire_transition()") + def fire_transition(self, tr, comment=None, commentformat=None): + return self.cw_adapt_to('IWorkflowable').fire_transition(tr, comment, commentformat) + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').change_state()") + def change_state(self, statename, comment=None, commentformat=None, tr=None): + return self.cw_adapt_to('IWorkflowable').change_state(statename, comment, commentformat, tr) + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').subworkflow_input_trinfo()") + def subworkflow_input_trinfo(self): + return self.cw_adapt_to('IWorkflowable').subworkflow_input_trinfo() + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').subworkflow_input_transition()") + def subworkflow_input_transition(self): + return self.cw_adapt_to('IWorkflowable').subworkflow_input_transition() + + +MI_REL_TRIGGERS[('in_state', 'subject')] = WorkflowableMixIn + + + +class IWorkflowableAdapter(WorkflowableMixIn, EntityAdapter): + """base adapter providing workflow helper methods for workflowable entities. + """ + __regid__ = 'IWorkflowable' + __select__ = relation_possible('in_state') + + @cached + def cwetype_workflow(self): + """return the default workflow for entities of this type""" + # XXX CWEType method + wfrset = self._cw.execute('Any WF WHERE ET default_workflow WF, ' + 'ET name %(et)s', {'et': self.entity.__regid__}) + if wfrset: + return wfrset.get_entity(0, 0) + self.warning("can't find any workflow for %s", self.entity.__regid__) + return None @property def main_workflow(self): """return current workflow applied to this entity""" - if self.custom_workflow: - return self.custom_workflow[0] + if self.entity.custom_workflow: + return self.entity.custom_workflow[0] return self.cwetype_workflow() @property @@ -425,14 +481,14 @@ @property def current_state(self): """return current state entity""" - return self.in_state and self.in_state[0] or None + return self.entity.in_state and self.entity.in_state[0] or None @property def state(self): """return current state name""" try: - return self.in_state[0].name - except IndexError: + return self.current_state.name + except AttributeError: self.warning('entity %s has no state', self) return None @@ -449,26 +505,15 @@ """return the workflow history for this entity (eg ordered list of TrInfo entities) """ - return self.reverse_wf_info_for + return self.entity.reverse_wf_info_for def latest_trinfo(self): """return the latest transition information for this entity""" try: - return self.reverse_wf_info_for[-1] + return self.workflow_history[-1] except IndexError: return None - @cached - def cwetype_workflow(self): - """return the default workflow for entities of this type""" - # XXX CWEType method - wfrset = self._cw.execute('Any WF WHERE ET default_workflow WF, ' - 'ET name %(et)s', {'et': self.__regid__}) - if wfrset: - return wfrset.get_entity(0, 0) - self.warning("can't find any workflow for %s", self.__regid__) - return None - def possible_transitions(self, type='normal'): """generates transition that MAY be fired for the given entity, expected to be in this state @@ -483,16 +528,44 @@ {'x': self.current_state.eid, 'type': type, 'wfeid': self.current_workflow.eid}) for tr in rset.entities(): - if tr.may_be_fired(self.eid): + if tr.may_be_fired(self.entity.eid): yield tr + def subworkflow_input_trinfo(self): + """return the TrInfo which has be recorded when this entity went into + the current sub-workflow + """ + if self.main_workflow.eid == self.current_workflow.eid: + return # doesn't make sense + subwfentries = [] + for trinfo in self.workflow_history: + if (trinfo.transition and + trinfo.previous_state.workflow.eid != trinfo.new_state.workflow.eid): + # entering or leaving a subworkflow + if (subwfentries and + subwfentries[-1].new_state.workflow.eid == trinfo.previous_state.workflow.eid and + subwfentries[-1].previous_state.workflow.eid == trinfo.new_state.workflow.eid): + # leave + del subwfentries[-1] + else: + # enter + subwfentries.append(trinfo) + if not subwfentries: + return None + return subwfentries[-1] + + def subworkflow_input_transition(self): + """return the transition which has went through the current sub-workflow + """ + return getattr(self.subworkflow_input_trinfo(), 'transition', None) + def _add_trinfo(self, comment, commentformat, treid=None, tseid=None): kwargs = {} if comment is not None: kwargs['comment'] = comment if commentformat is not None: kwargs['comment_format'] = commentformat - kwargs['wf_info_for'] = self + kwargs['wf_info_for'] = self.entity if treid is not None: kwargs['by_transition'] = self._cw.entity_from_eid(treid) if tseid is not None: @@ -532,51 +605,3 @@ stateeid = state.eid # XXX try to find matching transition? return self._add_trinfo(comment, commentformat, tr and tr.eid, stateeid) - - def subworkflow_input_trinfo(self): - """return the TrInfo which has be recorded when this entity went into - the current sub-workflow - """ - if self.main_workflow.eid == self.current_workflow.eid: - return # doesn't make sense - subwfentries = [] - for trinfo in self.workflow_history: - if (trinfo.transition and - trinfo.previous_state.workflow.eid != trinfo.new_state.workflow.eid): - # entering or leaving a subworkflow - if (subwfentries and - subwfentries[-1].new_state.workflow.eid == trinfo.previous_state.workflow.eid and - subwfentries[-1].previous_state.workflow.eid == trinfo.new_state.workflow.eid): - # leave - del subwfentries[-1] - else: - # enter - subwfentries.append(trinfo) - if not subwfentries: - return None - return subwfentries[-1] - - def subworkflow_input_transition(self): - """return the transition which has went through the current sub-workflow - """ - return getattr(self.subworkflow_input_trinfo(), 'transition', None) - - def clear_all_caches(self): - super(WorkflowableMixIn, self).clear_all_caches() - clear_cache(self, 'cwetype_workflow') - - @deprecated('[3.5] get transition from current workflow and use its may_be_fired method') - def can_pass_transition(self, trname): - """return the Transition instance if the current user can fire the - transition with the given name, else None - """ - tr = self.current_workflow and self.current_workflow.transition_by_name(trname) - if tr and tr.may_be_fired(self.eid): - return tr - - @property - @deprecated('[3.5] use printable_state') - def displayable_state(self): - return self._cw._(self.state) - -MI_REL_TRIGGERS[('in_state', 'subject')] = WorkflowableMixIn diff -r f4d1d5d9ccbb -r 90f2f20367bc entity.py --- a/entity.py Tue Jul 27 12:36:03 2010 +0200 +++ b/entity.py Wed Nov 03 16:38:28 2010 +0100 @@ -19,11 +19,12 @@ __docformat__ = "restructuredtext en" +from copy import copy from warnings import warn from logilab.common import interface -from logilab.common.compat import all from logilab.common.decorators import cached +from logilab.common.deprecation import deprecated from logilab.mtconverter import TransformData, TransformError, xml_escape from rql.utils import rqlvar_maker @@ -50,8 +51,20 @@ return card return '1' +def can_use_rest_path(value): + """return True if value can be used at the end of a Rest URL path""" + if value is None: + return False + value = unicode(value) + # the check for ?, /, & are to prevent problems when running + # behind Apache mod_proxy + if value == u'' or u'?' in value or u'/' in value or u'&' in value: + return False + return True -class Entity(AppObject, dict): + + +class Entity(AppObject): """an entity instance has e_schema automagically set on the class and instances has access to their issuing cursor. @@ -106,10 +119,10 @@ if not interface.implements(cls, iface): interface.extend(cls, iface) if role == 'subject': - setattr(cls, rschema.type, SubjectRelation(rschema)) + attr = rschema.type else: attr = 'reverse_%s' % rschema.type - setattr(cls, attr, ObjectRelation(rschema)) + setattr(cls, attr, Relation(rschema, role)) if mixins: # see etype class instantation in cwvreg.ETypeRegistry.etype_class method: # due to class dumping, cls is the generated top level class with actual @@ -124,6 +137,24 @@ cls.__bases__ = tuple(mixins) cls.info('plugged %s mixins on %s', mixins, cls) + fetch_attrs = ('modification_date',) + @classmethod + def fetch_order(cls, attr, var): + """class method used to control sort order when multiple entities of + this type are fetched + """ + return cls.fetch_unrelated_order(attr, var) + + @classmethod + def fetch_unrelated_order(cls, attr, var): + """class method used to control sort order when multiple entities of + this type are fetched to use in edition (eg propose them to create a + new relation on an edited entity). + """ + if attr == 'modification_date': + return '%s DESC' % var + return None + @classmethod def fetch_rql(cls, user, restriction=None, fetchattrs=None, mainvar='X', settype=True, ordermethod='fetch_order'): @@ -192,9 +223,10 @@ destcls._fetch_restrictions(var, varmaker, destcls.fetch_attrs, selection, orderby, restrictions, user, ordermethod, visited=visited) - orderterm = getattr(cls, ordermethod)(attr, var) - if orderterm: - orderby.append(orderterm) + if ordermethod is not None: + orderterm = getattr(cls, ordermethod)(attr, var) + if orderterm: + orderby.append(orderterm) return selection, orderby, restrictions @classmethod @@ -269,17 +301,17 @@ def __init__(self, req, rset=None, row=None, col=0): AppObject.__init__(self, req, rset=rset, row=row, col=col) - dict.__init__(self) - self._related_cache = {} + self._cw_related_cache = {} if rset is not None: self.eid = rset[row][col] else: self.eid = None - self._is_saved = True + self._cw_is_saved = True + self.cw_attr_cache = {} def __repr__(self): return '' % ( - self.e_schema, self.eid, self.keys(), id(self)) + self.e_schema, self.eid, self.cw_attr_cache.keys(), id(self)) def __json_encode__(self): """custom json dumps hook to dump the entity's eid @@ -298,12 +330,18 @@ def __cmp__(self, other): raise NotImplementedError('comparison not implemented for %s' % self.__class__) + def __contains__(self, key): + return key in self.cw_attr_cache + + def __iter__(self): + return iter(self.cw_attr_cache) + def __getitem__(self, key): if key == 'eid': warn('[3.7] entity["eid"] is deprecated, use entity.eid instead', DeprecationWarning, stacklevel=2) return self.eid - return super(Entity, self).__getitem__(key) + return self.cw_attr_cache[key] def __setitem__(self, attr, value): """override __setitem__ to update self.edited_attributes. @@ -321,13 +359,13 @@ DeprecationWarning, stacklevel=2) self.eid = value else: - super(Entity, self).__setitem__(attr, value) + self.cw_attr_cache[attr] = value # don't add attribute into skip_security if already in edited # attributes, else we may accidentaly skip a desired security check if hasattr(self, 'edited_attributes') and \ attr not in self.edited_attributes: self.edited_attributes.add(attr) - self.skip_security_attributes.add(attr) + self._cw_skip_security_attributes.add(attr) def __delitem__(self, attr): """override __delitem__ to update self.edited_attributes on cleanup of @@ -345,28 +383,35 @@ del self.entity['load_left'] """ - super(Entity, self).__delitem__(attr) + del self.cw_attr_cache[attr] if hasattr(self, 'edited_attributes'): self.edited_attributes.remove(attr) + def clear(self): + self.cw_attr_cache.clear() + + def get(self, key, default=None): + return self.cw_attr_cache.get(key, default) + def setdefault(self, attr, default): """override setdefault to update self.edited_attributes""" - super(Entity, self).setdefault(attr, default) + value = self.cw_attr_cache.setdefault(attr, default) # don't add attribute into skip_security if already in edited # attributes, else we may accidentaly skip a desired security check if hasattr(self, 'edited_attributes') and \ attr not in self.edited_attributes: self.edited_attributes.add(attr) - self.skip_security_attributes.add(attr) + self._cw_skip_security_attributes.add(attr) + return value def pop(self, attr, default=_marker): """override pop to update self.edited_attributes on cleanup of undesired changes introduced in the entity's dict. See `__delitem__` """ if default is _marker: - value = super(Entity, self).pop(attr) + value = self.cw_attr_cache.pop(attr) else: - value = super(Entity, self).pop(attr, default) + value = self.cw_attr_cache.pop(attr, default) if hasattr(self, 'edited_attributes') and attr in self.edited_attributes: self.edited_attributes.remove(attr) return value @@ -377,27 +422,24 @@ for attr, value in values.items(): self[attr] = value # use self.__setitem__ implementation - def rql_set_value(self, attr, value): - """call by rql execution plan when some attribute is modified - - don't use dict api in such case since we don't want attribute to be - added to skip_security_attributes. - """ - super(Entity, self).__setitem__(attr, value) + def cw_adapt_to(self, interface): + """return an adapter the entity to the given interface name. - def pre_add_hook(self): - """hook called by the repository before doing anything to add the entity - (before_add entity hooks have not been called yet). This give the - occasion to do weird stuff such as autocast (File -> Image for instance). - - This method must return the actual entity to be added. + return None if it can not be adapted. """ - return self + try: + cache = self._cw_adapters_cache + except AttributeError: + self._cw_adapters_cache = cache = {} + try: + return cache[interface] + except KeyError: + adapter = self._cw.vreg['adapters'].select_or_none( + interface, self._cw, entity=self) + cache[interface] = adapter + return adapter - def set_eid(self, eid): - self.eid = eid - - def has_eid(self): + def has_eid(self): # XXX cw_has_eid """return True if the entity has an attributed eid (False meaning that the entity has to be created """ @@ -407,38 +449,38 @@ except (ValueError, TypeError): return False - def is_saved(self): + def cw_is_saved(self): """during entity creation, there is some time during which the entity - has an eid attributed though it's not saved (eg during before_add_entity - hooks). You can use this method to ensure the entity has an eid *and* is - saved in its source. + has an eid attributed though it's not saved (eg during + 'before_add_entity' hooks). You can use this method to ensure the entity + has an eid *and* is saved in its source. """ - return self.has_eid() and self._is_saved + return self.has_eid() and self._cw_is_saved @cached - def metainformation(self): + def cw_metainformation(self): res = dict(zip(('type', 'source', 'extid'), self._cw.describe(self.eid))) res['source'] = self._cw.source_defs()[res['source']] return res - def clear_local_perm_cache(self, action): - for rqlexpr in self.e_schema.get_rqlexprs(action): - self._cw.local_perm_cache.pop((rqlexpr.eid, (('x', self.eid),)), None) - - def check_perm(self, action): + def cw_check_perm(self, action): self.e_schema.check_perm(self._cw, action, eid=self.eid) - def has_perm(self, action): + def cw_has_perm(self, action): return self.e_schema.has_perm(self._cw, action, eid=self.eid) - def view(self, __vid, __registry='views', w=None, **kwargs): + def view(self, __vid, __registry='views', w=None, initargs=None, **kwargs): # XXX cw_view """shortcut to apply a view on this entity""" + if initargs is None: + initargs = kwargs + else: + initargs.update(kwargs) view = self._cw.vreg[__registry].select(__vid, self._cw, rset=self.cw_rset, row=self.cw_row, col=self.cw_col, - **kwargs) + **initargs) return view.render(row=self.cw_row, col=self.cw_col, w=w, **kwargs) - def absolute_url(self, *args, **kwargs): + def absolute_url(self, *args, **kwargs): # XXX cw_url """return an absolute url to view this entity""" # use *args since we don't want first argument to be "anonymous" to # avoid potential clash with kwargs @@ -450,11 +492,16 @@ # in linksearch mode, we don't want external urls else selecting # the object for use in the relation is tricky # XXX search_state is web specific - if getattr(self._cw, 'search_state', ('normal',))[0] == 'normal': - kwargs['base_url'] = self.metainformation()['source'].get('base-url') + use_ext_id = False + if 'base_url' not in kwargs and \ + getattr(self._cw, 'search_state', ('normal',))[0] == 'normal': + baseurl = self.cw_metainformation()['source'].get('base-url') + if baseurl: + kwargs['base_url'] = baseurl + use_ext_id = True if method in (None, 'view'): try: - kwargs['_restpath'] = self.rest_path(kwargs.get('base_url')) + kwargs['_restpath'] = self.rest_path(use_ext_id) except TypeError: warn('[3.4] %s: rest_path() now take use_ext_eid argument, ' 'please update' % self.__regid__, DeprecationWarning) @@ -463,14 +510,14 @@ kwargs['rql'] = 'Any X WHERE X eid %s' % self.eid return self._cw.build_url(method, **kwargs) - def rest_path(self, use_ext_eid=False): + def rest_path(self, use_ext_eid=False): # XXX cw_rest_path """returns a REST-like (relative) path for this entity""" mainattr, needcheck = self._rest_attr_info() etype = str(self.e_schema) path = etype.lower() if mainattr != 'eid': value = getattr(self, mainattr) - if value is None or unicode(value) == u'': + if not can_use_rest_path(value): mainattr = 'eid' path += '/eid' elif needcheck: @@ -486,12 +533,12 @@ path += '/eid' if mainattr == 'eid': if use_ext_eid: - value = self.metainformation()['extid'] + value = self.cw_metainformation()['extid'] else: value = self.eid return '%s/%s' % (path, self._cw.url_quote(value)) - def attr_metadata(self, attr, metadata): + def cw_attr_metadata(self, attr, metadata): """return a metadata for an attribute (None if unspecified)""" value = getattr(self, '%s_%s' % (attr, metadata), None) if value is None and metadata == 'encoding': @@ -499,7 +546,7 @@ return value def printable_value(self, attr, value=_marker, attrtype=None, - format='text/html', displaytime=True): + format='text/html', displaytime=True): # XXX cw_printable_value """return a displayable value (i.e. unicode string) which may contains html tags """ @@ -518,16 +565,16 @@ # description... if props.internationalizable: value = self._cw._(value) - attrformat = self.attr_metadata(attr, 'format') + attrformat = self.cw_attr_metadata(attr, 'format') if attrformat: - return self.mtc_transform(value, attrformat, format, - self._cw.encoding) + return self._cw_mtc_transform(value, attrformat, format, + self._cw.encoding) elif attrtype == 'Bytes': - attrformat = self.attr_metadata(attr, 'format') + attrformat = self.cw_attr_metadata(attr, 'format') if attrformat: - encoding = self.attr_metadata(attr, 'encoding') - return self.mtc_transform(value.getvalue(), attrformat, format, - encoding) + encoding = self.cw_attr_metadata(attr, 'encoding') + return self._cw_mtc_transform(value.getvalue(), attrformat, format, + encoding) return u'' value = printable_value(self._cw, attrtype, value, props, displaytime=displaytime) @@ -535,8 +582,8 @@ value = xml_escape(value) return value - def mtc_transform(self, data, format, target_format, encoding, - _engine=ENGINE): + def _cw_mtc_transform(self, data, format, target_format, encoding, + _engine=ENGINE): trdata = TransformData(data, format, encoding, appobject=self) data = _engine.convert(trdata, target_format).decode() if format == 'text/html': @@ -545,7 +592,13 @@ # entity cloning ########################################################## - def copy_relations(self, ceid): + def cw_copy(self): + thecopy = copy(self) + thecopy.cw_attr_cache = copy(self.cw_attr_cache) + thecopy._cw_related_cache = {} + return thecopy + + def copy_relations(self, ceid): # XXX cw_copy_relations """copy relations of the object with the given eid on this object (this method is called on the newly created copy, and ceid designates the original entity). @@ -574,7 +627,7 @@ rql = 'SET X %s V WHERE X eid %%(x)s, Y eid %%(y)s, Y %s V' % ( rschema.type, rschema.type) execute(rql, {'x': self.eid, 'y': ceid}) - self.clear_related_cache(rschema.type, 'subject') + self.cw_clear_relation_cache(rschema.type, 'subject') for rschema in self.e_schema.object_relations(): if rschema.meta: continue @@ -592,36 +645,32 @@ rql = 'SET V %s X WHERE X eid %%(x)s, Y eid %%(y)s, V %s Y' % ( rschema.type, rschema.type) execute(rql, {'x': self.eid, 'y': ceid}) - self.clear_related_cache(rschema.type, 'object') + self.cw_clear_relation_cache(rschema.type, 'object') # data fetching methods ################################################### @cached - def as_rset(self): + def as_rset(self): # XXX .cw_as_rset """returns a resultset containing `self` information""" rset = ResultSet([(self.eid,)], 'Any X WHERE X eid %(x)s', {'x': self.eid}, [(self.__regid__,)]) rset.req = self._cw return rset - def to_complete_relations(self): + def _cw_to_complete_relations(self): """by default complete final relations to when calling .complete()""" for rschema in self.e_schema.subject_relations(): if rschema.final: continue targets = rschema.objects(self.e_schema) - if len(targets) > 1: - # ambigous relations, the querier doesn't handle - # outer join correctly in this case - continue if rschema.inlined: matching_groups = self._cw.user.matching_groups - rdef = rschema.rdef(self.e_schema, targets[0]) - if matching_groups(rdef.get_groups('read')) and \ - all(matching_groups(e.get_groups('read')) for e in targets): + if all(matching_groups(e.get_groups('read')) and + rschema.rdef(self.e_schema, e).get_groups('read') + for e in targets): yield rschema, 'subject' - def to_complete_attributes(self, skip_bytes=True, skip_pwd=True): + def _cw_to_complete_attributes(self, skip_bytes=True, skip_pwd=True): for rschema, attrschema in self.e_schema.attribute_definitions(): # skip binary data by default if skip_bytes and attrschema.type == 'Bytes': @@ -638,7 +687,7 @@ yield attr _cw_completed = False - def complete(self, attributes=None, skip_bytes=True, skip_pwd=True): + def complete(self, attributes=None, skip_bytes=True, skip_pwd=True): # XXX cw_complete """complete this entity by adding missing attributes (i.e. query the repository to fill the entity) @@ -655,9 +704,9 @@ V = varmaker.next() rql = ['WHERE %s eid %%(x)s' % V] selected = [] - for attr in (attributes or self.to_complete_attributes(skip_bytes, skip_pwd)): + for attr in (attributes or self._cw_to_complete_attributes(skip_bytes, skip_pwd)): # if attribute already in entity, nothing to do - if self.has_key(attr): + if self.cw_attr_cache.has_key(attr): continue # case where attribute must be completed, but is not yet in entity var = varmaker.next() @@ -665,28 +714,24 @@ selected.append((attr, var)) # +1 since this doen't include the main variable lastattr = len(selected) + 1 - if attributes is None: + # don't fetch extra relation if attributes specified or of the entity is + # coming from an external source (may lead to error) + if attributes is None and self.cw_metainformation()['source']['uri'] == 'system': # fetch additional relations (restricted to 0..1 relations) - for rschema, role in self.to_complete_relations(): + for rschema, role in self._cw_to_complete_relations(): rtype = rschema.type - if self.relation_cached(rtype, role): + if self.cw_relation_cached(rtype, role): continue + # at this point we suppose that: + # * this is a inlined relation + # * entity (self) is the subject + # * user has read perm on the relation and on the target entity + assert rschema.inlined + assert role == 'subject' var = varmaker.next() - targettype = rschema.targets(self.e_schema, role)[0] - rdef = rschema.role_rdef(self.e_schema, targettype, role) - card = rdef.role_cardinality(role) - assert card in '1?', '%s %s %s %s' % (self.e_schema, rtype, - role, card) - if role == 'subject': - if card == '1': - rql.append('%s %s %s' % (V, rtype, var)) - else: - rql.append('%s %s %s?' % (V, rtype, var)) - else: - if card == '1': - rql.append('%s %s %s' % (var, rtype, V)) - else: - rql.append('%s? %s %s' % (var, rtype, V)) + # keep outer join anyway, we don't want .complete to crash on + # missing mandatory relation (see #1058267) + rql.append('%s %s %s?' % (V, rtype, var)) selected.append(((rtype, role), var)) if selected: # select V, we need it as the left most selected variable @@ -706,9 +751,9 @@ rrset.req = self._cw else: rrset = self._cw.eid_rset(value) - self.set_related_cache(rtype, role, rrset) + self.cw_set_relation_cache(rtype, role, rrset) - def get_value(self, name): + def cw_attr_value(self, name): """get value for the attribute relation , query the repository to get the value if necessary. @@ -716,9 +761,9 @@ :param name: name of the attribute to get """ try: - value = self[name] + value = self.cw_attr_cache[name] except KeyError: - if not self.is_saved(): + if not self.cw_is_saved(): return None rql = "Any A WHERE X eid %%(x)s, X %s A" % name try: @@ -740,7 +785,7 @@ self[name] = value = None return value - def related(self, rtype, role='subject', limit=None, entities=False): + def related(self, rtype, role='subject', limit=None, entities=False): # XXX .cw_related """returns a resultset of related entities :param role: is the role played by 'self' in the relation ('subject' or 'object') @@ -748,19 +793,19 @@ :param entities: if True, the entites are returned; if False, a result set is returned """ try: - return self.related_cache(rtype, role, entities, limit) + return self._cw_relation_cache(rtype, role, entities, limit) except KeyError: pass if not self.has_eid(): if entities: return [] return self.empty_rset() - rql = self.related_rql(rtype, role) + rql = self.cw_related_rql(rtype, role) rset = self._cw.execute(rql, {'x': self.eid}) - self.set_related_cache(rtype, role, rset) + self.cw_set_relation_cache(rtype, role, rset) return self.related(rtype, role, limit, entities) - def related_rql(self, rtype, role='subject', targettypes=None): + def cw_related_rql(self, rtype, role='subject', targettypes=None): rschema = self._cw.vreg.schema[rtype] if role == 'subject': restriction = 'E eid %%(x)s, E %s X' % rtype @@ -809,7 +854,7 @@ # generic vocabulary methods ############################################## - def unrelated_rql(self, rtype, targettype, role, ordermethod=None, + def cw_unrelated_rql(self, rtype, targettype, role, ordermethod=None, vocabconstraints=True): """build a rql to fetch `targettype` entities unrelated to this entity using (rtype, role) relation. @@ -871,12 +916,12 @@ return rql, args def unrelated(self, rtype, targettype, role='subject', limit=None, - ordermethod=None): + ordermethod=None): # XXX .cw_unrelated """return a result set of target type objects that may be related by a given relation, with self as subject or object """ try: - rql, args = self.unrelated_rql(rtype, targettype, role, ordermethod) + rql, args = self.cw_unrelated_rql(rtype, targettype, role, ordermethod) except Unauthorized: return self._cw.empty_rset() if limit is not None: @@ -884,18 +929,19 @@ rql = '%s LIMIT %s WHERE %s' % (before, limit, after) return self._cw.execute(rql, args) - # relations cache handling ################################################ + # relations cache handling ################################################# - def relation_cached(self, rtype, role): - """return true if the given relation is already cached on the instance + def cw_relation_cached(self, rtype, role): + """return None if the given relation isn't already cached on the + instance, else the content of the cache (a 2-uple (rset, entities)). """ - return self._related_cache.get('%s_%s' % (rtype, role)) + return self._cw_related_cache.get('%s_%s' % (rtype, role)) - def related_cache(self, rtype, role, entities=True, limit=None): + def _cw_relation_cache(self, rtype, role, entities=True, limit=None): """return values for the given relation if it's cached on the instance, else raise `KeyError` """ - res = self._related_cache['%s_%s' % (rtype, role)][entities] + res = self._cw_related_cache['%s_%s' % (rtype, role)][entities] if limit is not None and limit < len(res): if entities: res = res[:limit] @@ -903,10 +949,10 @@ res = res.limit(limit) return res - def set_related_cache(self, rtype, role, rset, col=0): + def cw_set_relation_cache(self, rtype, role, rset): """set cached values for the given relation""" if rset: - related = list(rset.entities(col)) + related = list(rset.entities(0)) rschema = self._cw.vreg.schema.rschema(rtype) if role == 'subject': rcard = rschema.rdef(self.e_schema, related[0].e_schema).cardinality[1] @@ -916,23 +962,24 @@ target = 'subject' if rcard in '?1': for rentity in related: - rentity._related_cache['%s_%s' % (rtype, target)] = ( + rentity._cw_related_cache['%s_%s' % (rtype, target)] = ( self.as_rset(), (self,)) else: related = () - self._related_cache['%s_%s' % (rtype, role)] = (rset, related) + self._cw_related_cache['%s_%s' % (rtype, role)] = (rset, related) - def clear_related_cache(self, rtype=None, role=None): + def cw_clear_relation_cache(self, rtype=None, role=None): """clear cached values for the given relation or the entire cache if no relation is given """ if rtype is None: - self._related_cache = {} + self._cw_related_cache = {} + self._cw_adapters_cache = {} else: assert role - self._related_cache.pop('%s_%s' % (rtype, role), None) + self._cw_related_cache.pop('%s_%s' % (rtype, role), None) - def clear_all_caches(self): + def clear_all_caches(self): # XXX cw_clear_all_caches """flush all caches on this entity. Further attributes/relations access will triggers new database queries to get back values. @@ -942,10 +989,9 @@ # clear attributes cache haseid = 'eid' in self self._cw_completed = False - self.clear() + self.cw_attr_cache.clear() # clear relations cache - for rschema, _, role in self.e_schema.relation_definitions(): - self.clear_related_cache(rschema.type, role) + self.cw_clear_relation_cache() # rest path unique cache try: del self.__unique @@ -954,10 +1000,10 @@ # raw edition utilities ################################################### - def set_attributes(self, **kwargs): + def set_attributes(self, **kwargs): # XXX cw_set_attributes _check_cw_unsafe(kwargs) assert kwargs - assert self._is_saved, "should not call set_attributes while entity "\ + assert self.cw_is_saved(), "should not call set_attributes while entity "\ "hasn't been saved yet" relations = [] for key in kwargs: @@ -972,7 +1018,7 @@ # edited_attributes / skip_security_attributes machinery self.update(kwargs) - def set_relations(self, **kwargs): + def set_relations(self, **kwargs): # XXX cw_set_relations """add relations to the given object. To set a relation where this entity is the object of the relation, use 'reverse_' as argument name. @@ -996,28 +1042,42 @@ restr, ','.join(str(r.eid) for r in values)), {'x': self.eid}) - def delete(self, **kwargs): + def cw_delete(self, **kwargs): assert self.has_eid(), self.eid self._cw.execute('DELETE %s X WHERE X eid %%(x)s' % self.e_schema, {'x': self.eid}, **kwargs) # server side utilities ################################################### + def _cw_rql_set_value(self, attr, value): + """call by rql execution plan when some attribute is modified + + don't use dict api in such case since we don't want attribute to be + added to skip_security_attributes. + + This method is for internal use, you should not use it. + """ + self.cw_attr_cache[attr] = value + + def _cw_clear_local_perm_cache(self, action): + for rqlexpr in self.e_schema.get_rqlexprs(action): + self._cw.local_perm_cache.pop((rqlexpr.eid, (('x', self.eid),)), None) + @property - def skip_security_attributes(self): + def _cw_skip_security_attributes(self): try: - return self._skip_security_attributes + return self.__cw_skip_security_attributes except: - self._skip_security_attributes = set() - return self._skip_security_attributes + self.__cw_skip_security_attributes = set() + return self.__cw_skip_security_attributes - def set_defaults(self): + def _cw_set_defaults(self): """set default values according to the schema""" for attr, value in self.e_schema.defaults(): - if not self.has_key(attr): + if not self.cw_attr_cache.has_key(attr): self[str(attr)] = value - def check(self, creation=False): + def _cw_check(self, creation=False): """check this entity against its schema. Only final relation are checked here, constraint on actual relations are checked in hooks """ @@ -1040,60 +1100,33 @@ self.e_schema.check(self, creation=creation, _=_, relations=relations) - def fti_containers(self, _done=None): - if _done is None: - _done = set() - _done.add(self.eid) - containers = tuple(self.e_schema.fulltext_containers()) - if containers: - for rschema, target in containers: - if target == 'object': - targets = getattr(self, rschema.type) - else: - targets = getattr(self, 'reverse_%s' % rschema) - for entity in targets: - if entity.eid in _done: - continue - for container in entity.fti_containers(_done): - yield container - yielded = True - else: - yield self + @deprecated('[3.9] use entity.cw_attr_value(attr)') + def get_value(self, name): + return self.cw_attr_value(name) - def get_words(self): - """used by the full text indexer to get words to index + @deprecated('[3.9] use entity.cw_delete()') + def delete(self, **kwargs): + return self.cw_delete(**kwargs) - this method should only be used on the repository side since it depends - on the logilab.database package + @deprecated('[3.9] use entity.cw_attr_metadata(attr, metadata)') + def attr_metadata(self, attr, metadata): + return self.cw_attr_metadata(attr, metadata) - :rtype: list - :return: the list of indexable word of this entity - """ - from logilab.database.fti import tokenize - # take care to cases where we're modyfying the schema - pending = self._cw.transaction_data.setdefault('pendingrdefs', set()) - words = [] - for rschema in self.e_schema.indexable_attributes(): - if (self.e_schema, rschema) in pending: - continue - try: - value = self.printable_value(rschema, format='text/plain') - except TransformError: - continue - except: - self.exception("can't add value of %s to text index for entity %s", - rschema, self.eid) - continue - if value: - words += tokenize(value) - for rschema, role in self.e_schema.fulltext_relations(): - if role == 'subject': - for entity in getattr(self, rschema.type): - words += entity.get_words() - else: # if role == 'object': - for entity in getattr(self, 'reverse_%s' % rschema.type): - words += entity.get_words() - return words + @deprecated('[3.9] use entity.cw_has_perm(action)') + def has_perm(self, action): + return self.cw_has_perm(action) + + @deprecated('[3.9] use entity.cw_set_relation_cache(rtype, role, rset)') + def set_related_cache(self, rtype, role, rset): + self.cw_set_relation_cache(rtype, role, rset) + + @deprecated('[3.9] use entity.cw_clear_relation_cache(rtype, role, rset)') + def clear_related_cache(self, rtype=None, role=None): + self.cw_clear_relation_cache(rtype, role) + + @deprecated('[3.9] use entity.cw_related_rql(rtype, [role, [targettypes]])') + def related_rql(self, rtype, role='subject', targettypes=None): + return self.cw_related_rql(rtype, role, targettypes) # attribute and relation descriptors ########################################## @@ -1108,22 +1141,22 @@ def __get__(self, eobj, eclass): if eobj is None: return self - return eobj.get_value(self._attrname) + return eobj.cw_attr_value(self._attrname) def __set__(self, eobj, value): eobj[self._attrname] = value + class Relation(object): """descriptor that controls schema relation access""" - _role = None # for pylint - def __init__(self, rschema): - self._rschema = rschema + def __init__(self, rschema, role): self._rtype = rschema.type + self._role = role def __get__(self, eobj, eclass): if eobj is None: - raise AttributeError('%s cannot be only be accessed from instances' + raise AttributeError('%s can only be accessed from instances' % self._rtype) return eobj.related(self._rtype, self._role, entities=True) @@ -1131,14 +1164,6 @@ raise NotImplementedError -class SubjectRelation(Relation): - """descriptor that controls schema relation access""" - _role = 'subject' - -class ObjectRelation(Relation): - """descriptor that controls schema relation access""" - _role = 'object' - from logging import getLogger from cubicweb import set_log_methods set_log_methods(Entity, getLogger('cubicweb.entity')) diff -r f4d1d5d9ccbb -r 90f2f20367bc etwist/request.py --- a/etwist/request.py Tue Jul 27 12:36:03 2010 +0200 +++ b/etwist/request.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""Twisted request handler for CubicWeb +"""Twisted request handler for CubicWeb""" -""" __docformat__ = "restructuredtext en" from datetime import datetime @@ -55,9 +54,9 @@ return self._twreq.method def relative_path(self, includeparams=True): - """return the normalized path of the request (ie at least relative - to the instance's root, but some other normalization may be needed - so that the returned path may be used to compare to generated urls + """return the normalized path of the request (ie at least relative to + the instance's root, but some other normalization may be needed so that + the returned path may be used to compare to generated urls :param includeparams: boolean indicating if GET form parameters should be kept in the path @@ -68,8 +67,8 @@ return path def get_header(self, header, default=None, raw=True): - """return the value associated with the given input header, - raise KeyError if the header is not set + """return the value associated with the given input header, raise + KeyError if the header is not set """ if raw: return self._headers_in.getRawHeaders(header, [default])[0] diff -r f4d1d5d9ccbb -r 90f2f20367bc etwist/server.py --- a/etwist/server.py Tue Jul 27 12:36:03 2010 +0200 +++ b/etwist/server.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""twisted server for CubicWeb web instances +"""twisted server for CubicWeb web instances""" -""" __docformat__ = "restructuredtext en" import sys @@ -39,11 +38,11 @@ from twisted.web import static, resource from twisted.web.server import NOT_DONE_YET -from cubicweb.web import dumps from logilab.common.decorators import monkeypatch from cubicweb import AuthenticationError, ConfigurationError, CW_EVENT_MANAGER +from cubicweb.utils import json_dumps from cubicweb.web import Redirect, DirectResponse, StatusResponse, LogOut from cubicweb.web.application import CubicWebPublisher from cubicweb.web.http_headers import generateDateTime @@ -99,12 +98,11 @@ class CubicWebRootResource(resource.Resource): - def __init__(self, config, debug=None): - self.debugmode = debug + def __init__(self, config, vreg=None): self.config = config # instantiate publisher here and not in init_publisher to get some # checks done before daemonization (eg versions consistency) - self.appli = CubicWebPublisher(config, debug=self.debugmode) + self.appli = CubicWebPublisher(config, vreg=vreg) self.base_url = config['base-url'] self.https_url = config['https-url'] self.children = {} @@ -118,8 +116,6 @@ # when we have an in-memory repository, clean unused sessions every XX # seconds and properly shutdown the server if config.repo_method == 'inmemory': - reactor.addSystemEventTrigger('before', 'shutdown', - self.shutdown_event) if config.pyro_enabled(): # if pyro is enabled, we have to register to the pyro name # server, create a pyro daemon, and create a task to handle pyro @@ -127,7 +123,10 @@ self.pyro_daemon = self.appli.repo.pyro_register() self.pyro_listen_timeout = 0.02 self.appli.repo.looping_task(1, self.pyro_loop_event) - self.appli.repo.start_looping_tasks() + if config.mode != 'test': + reactor.addSystemEventTrigger('before', 'shutdown', + self.shutdown_event) + self.appli.repo.start_looping_tasks() self.set_url_rewriter() CW_EVENT_MANAGER.bind('after-registry-reload', self.set_url_rewriter) @@ -156,6 +155,9 @@ pre_path = request.path.split('/')[1:] if pre_path[0] == 'https': pre_path.pop(0) + uiprops = self.config.https_uiprops + else: + uiprops = self.config.uiprops directory = pre_path[0] # Anything in data/, static/, fckeditor/ and the generated versioned # data directory is treated as static files @@ -165,7 +167,7 @@ if directory == 'static': return File(self.config.static_directory) if directory == 'fckeditor': - return File(self.config.ext_resources['FCKEDITOR_PATH']) + return File(uiprops['FCKEDITOR_PATH']) if directory != 'data': # versioned directory, use specific file with http cache # headers so their are cached for a very long time @@ -173,10 +175,10 @@ else: cls = File if path == 'fckeditor': - return cls(self.config.ext_resources['FCKEDITOR_PATH']) + return cls(uiprops['FCKEDITOR_PATH']) if path == directory: # recurse return self - datadir = self.config.locate_resource(path) + datadir, path = self.config.locate_resource(path) if datadir is None: return self # recurse self.debug('static file %s from %s', path, datadir) @@ -187,7 +189,10 @@ def render(self, request): """Render a page from the root resource""" # reload modified files in debug mode - if self.debugmode: + if self.config.debugmode: + self.config.uiprops.reload_if_needed() + if self.https_url: + self.config.https_uiprops.reload_if_needed() self.appli.vreg.reload_if_needed() if self.config['profile']: # default profiler don't trace threads return self.render_request(request) @@ -312,12 +317,12 @@ self.setResponseCode(http.BAD_REQUEST) if path in JSON_PATHS: # XXX better json path detection self.setHeader('content-type',"application/json") - body = dumps({'reason': 'request max size exceeded'}) + body = json_dumps({'reason': 'request max size exceeded'}) elif path in FRAME_POST_PATHS: # XXX better frame post path detection self.setHeader('content-type',"text/html") body = ('' % dumps( (False, 'request max size exceeded', None) )) + '' % json_dumps( (False, 'request max size exceeded', None) )) else: self.setHeader('content-type',"text/html") body = ("Processing Failed" @@ -394,20 +399,22 @@ LOGGER = getLogger('cubicweb.twisted') set_log_methods(CubicWebRootResource, LOGGER) -def run(config, debug): +def run(config, vreg=None, debug=None): + if debug is not None: + config.debugmode = debug + config.check_writeable_uid_directory(config.appdatahome) # create the site - root_resource = CubicWebRootResource(config, debug) + root_resource = CubicWebRootResource(config, vreg=vreg) website = server.Site(root_resource) # serve it via standard HTTP on port set in the configuration port = config['port'] or 8080 reactor.listenTCP(port, website) - logger = getLogger('cubicweb.twisted') - if not debug: + if not config.debugmode: if sys.platform == 'win32': raise ConfigurationError("Under windows, you must use the service management " "commands (e.g : 'net start my_instance)'") from logilab.common.daemon import daemonize - print 'instance starting in the background' + LOGGER.info('instance started in the background on %s', root_resource.base_url) if daemonize(config['pid-file']): return # child process root_resource.init_publisher() # before changing uid @@ -419,7 +426,7 @@ uid = getpwnam(config['uid']).pw_uid os.setuid(uid) root_resource.start_service() - logger.info('instance started on %s', root_resource.base_url) + LOGGER.info('instance started on %s', root_resource.base_url) # avoid annoying warnign if not in Main Thread signals = threading.currentThread().getName() == 'MainThread' if config['profile']: diff -r f4d1d5d9ccbb -r 90f2f20367bc etwist/service.py --- a/etwist/service.py Tue Jul 27 12:36:03 2010 +0200 +++ b/etwist/service.py Wed Nov 03 16:38:28 2010 +0100 @@ -72,8 +72,9 @@ # create the site config = cwcfg.config_for(self.instance) config.init_log(force=True) + config.debugmode = False logger.info('starting cubicweb instance %s ', self.instance) - root_resource = CubicWebRootResource(config, False) + root_resource = CubicWebRootResource(config) website = server.Site(root_resource) # serve it via standard HTTP on port set in the configuration port = config['port'] or 8080 diff -r f4d1d5d9ccbb -r 90f2f20367bc etwist/twctl.py --- a/etwist/twctl.py Tue Jul 27 12:36:03 2010 +0200 +++ b/etwist/twctl.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,7 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""cubicweb-clt handlers for twisted - -""" +"""cubicweb-clt handlers for twisted""" from cubicweb.toolsutils import CommandHandler from cubicweb.web.webctl import WebCreateHandler @@ -32,9 +30,9 @@ cmdname = 'start' cfgname = 'twisted' - def start_server(self, config, debug): + def start_server(self, config): from cubicweb.etwist import server - server.run(config, debug) + server.run(config) class TWStopHandler(CommandHandler): cmdname = 'stop' diff -r f4d1d5d9ccbb -r 90f2f20367bc ext/rest.py --- a/ext/rest.py Tue Jul 27 12:36:03 2010 +0200 +++ b/ext/rest.py Wed Nov 03 16:38:28 2010 +0100 @@ -229,7 +229,7 @@ :rtype: unicode :return: - the data formatted as HTML or the original data if an error occured + the data formatted as HTML or the original data if an error occurred """ req = context._cw if isinstance(data, unicode): @@ -242,8 +242,14 @@ data = data.translate(ESC_CAR_TABLE) settings = {'input_encoding': encoding, 'output_encoding': 'unicode', 'warning_stream': StringIO(), + 'traceback': True, # don't sys.exit + 'stylesheet': None, # don't try to embed stylesheet (may cause + # obscure bug due to docutils computing + # relative path according to the directory + # used *at import time* # dunno what's the max, severe is 4, and we never want a crash - # (though try/except may be a better option...) + # (though try/except may be a better option...). May be the + # above traceback option will avoid this? 'halt_level': 10, } if context: diff -r f4d1d5d9ccbb -r 90f2f20367bc ext/test/unittest_rest.py --- a/ext/test/unittest_rest.py Tue Jul 27 12:36:03 2010 +0200 +++ b/ext/test/unittest_rest.py Wed Nov 03 16:38:28 2010 +0100 @@ -29,9 +29,9 @@ def test_eid_role(self): context = self.context() - self.assertEquals(rest_publish(context, ':eid:`%s`' % context.eid), + self.assertEqual(rest_publish(context, ':eid:`%s`' % context.eid), '

        #%s

        \n' % context.eid) - self.assertEquals(rest_publish(context, ':eid:`%s:some text`' % context.eid), + self.assertEqual(rest_publish(context, ':eid:`%s:some text`' % context.eid), '

        some text

        \n') def test_bad_rest_no_crash(self): diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/__init__.py --- a/goa/__init__.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,159 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""cubicweb on google appengine - -""" -__docformat__ = "restructuredtext en" - - -try: - # WARNING: do not import the google's db module here since it will take - # precedence over our own db submodule - from google.appengine.api.datastore import Key, Get, Query - from google.appengine.api.datastore_errors import BadKeyError -except ImportError: - # not in google app environment - pass -else: - - import os - _SS = os.environ.get('SERVER_SOFTWARE') - if _SS is None: - MODE = 'test' - elif _SS.startswith('Dev'): - MODE = 'dev' - else: - MODE = 'prod' - - from cubicweb.server import SOURCE_TYPES - from cubicweb.goa.gaesource import GAESource - SOURCE_TYPES['gae'] = GAESource - - - def do_monkey_patch(): - - # monkey patch yams Bytes validator since it should take a bytes string with gae - # and not a StringIO - def check_bytes(eschema, value): - """check value is a bytes string""" - return isinstance(value, str) - from yams import constraints - constraints.BASE_CHECKERS['Bytes'] = check_bytes - - def rql_for_eid(eid): - return 'Any X WHERE X eid "%s"' % eid - from cubicweb import uilib - uilib.rql_for_eid = rql_for_eid - - def typed_eid(eid): - try: - return str(Key(eid)) - except BadKeyError: - raise ValueError(eid) - import cubicweb - cubicweb.typed_eid = typed_eid - - # XXX monkey patch cubicweb.schema.CubicWebSchema to have string eid with - # optional cardinality (since eid is set after the validation) - - import re - from yams import buildobjs as ybo - - def add_entity_type(self, edef): - edef.name = edef.name.encode() - assert re.match(r'[A-Z][A-Za-z0-9]*[a-z]+[0-9]*$', edef.name), repr(edef.name) - eschema = super(CubicWebSchema, self).add_entity_type(edef) - if not eschema.final: - # automatically add the eid relation to non final entity types - rdef = ybo.RelationDefinition(eschema.type, 'eid', 'Bytes', - cardinality='?1', uid=True) - self.add_relation_def(rdef) - rdef = ybo.RelationDefinition(eschema.type, 'identity', eschema.type) - self.add_relation_def(rdef) - self._eid_index[eschema.eid] = eschema - return eschema - - from cubicweb.schema import CubicWebSchema - CubicWebSchema.add_entity_type = add_entity_type - - - # don't reset vreg on repository set_schema - from cubicweb.server import repository - orig_set_schema = repository.Repository.set_schema - def set_schema(self, schema, resetvreg=True): - orig_set_schema(self, schema, False) - repository.Repository.set_schema = set_schema - # deactivate function ensuring relation cardinality consistency - repository.del_existing_rel_if_needed = lambda *args: None - - def get_cubes(self): - """return the list of top level cubes used by this instance""" - config = self.config - cubes = config['included-cubes'] + config['included-yams-cubes'] - return config.expand_cubes(cubes) - repository.Repository.get_cubes = get_cubes - - from rql import RQLHelper - RQLHelper.simplify = lambda x, r: None - - # activate entity caching on the server side - - def set_entity_cache(self, entity): - self.transaction_data.setdefault('_eid_cache', {})[entity.eid] = entity - - def entity_cache(self, eid): - return self.transaction_data['_eid_cache'][eid] - - def drop_entity_cache(self, eid=None): - if eid is None: - self.transaction_data['_eid_cache'] = {} - elif '_eid_cache' in self.transaction_data: - self.transaction_data['_eid_cache'].pop(eid, None) - - def datastore_get(self, key): - if isinstance(key, basestring): - key = Key(key) - try: - gentity = self.transaction_data['_key_cache'][key] - #self.critical('cached %s', gentity) - except KeyError: - gentity = Get(key) - #self.critical('Get %s', gentity) - self.transaction_data.setdefault('_key_cache', {})[key] = gentity - return gentity - - def clear_datastore_cache(self, key=None): - if key is None: - self.transaction_data['_key_cache'] = {} - else: - if isinstance(key, basestring): - key = Key(key) - self.transaction_data['_key_cache'].pop(key, None) - - from cubicweb.server.session import Session - Session.set_entity_cache = set_entity_cache - Session.entity_cache = entity_cache - Session.drop_entity_cache = drop_entity_cache - Session.datastore_get = datastore_get - Session.clear_datastore_cache = clear_datastore_cache - - from docutils.frontend import OptionParser - # avoid a call to expanduser which is not available under gae - def get_standard_config_files(self): - return self.standard_config_files - OptionParser.get_standard_config_files = get_standard_config_files diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/appobjects/__init__.py --- a/goa/appobjects/__init__.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,17 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/appobjects/components.py --- a/goa/appobjects/components.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,105 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""overrides some base views for cubicweb on google appengine - -""" -__docformat__ = "restructuredtext en" - -from logilab.mtconverter import xml_escape - -from cubicweb import typed_eid -from cubicweb.selectors import one_line_rset, match_search_state, accept -from cubicweb.schema import display_name -from cubicweb.view import StartupView, EntityView -from cubicweb.web import Redirect -from cubicweb.web.views import vid_from_rset - -from google.appengine.api import mail - - -class SearchForAssociationView(EntityView): - """view called by the edition view when the user asks - to search for something to link to the edited eid - """ - id = 'search-associate' - - __select__ = one_line_rset() & match_search_state('linksearch') & accept - - def cell_call(self, row, col): - entity = self.rset.get_entity(0, 0) - role, eid, rtype, etype = self.req.search_state[1] - assert entity.eid == typed_eid(eid) - rset = entity.unrelated(rtype, etype, role, ordermethod='fetch_order') - vid = vid_from_rset(self.req, rset, self.schema) - self.w(u'
        ') - self.pagination(self.req, rset, w=self.w) - self.wview(vid, rset) - self.w(u'
        ') - - -class SchemaImageView(StartupView): - id = 'schemagraph' - binary = True - content_type = 'image/png' - def call(self): - """display global schema information""" - skipmeta = int(self.req.form.get('skipmeta', 1)) - if skipmeta: - url = self.build_url('data/schema.png') - else: - url = self.build_url('data/metaschema.png') - raise Redirect(url) - - -from cubicweb.web.views.baseviews import MetaDataView - -class GAEMetaDataView(MetaDataView): - show_eid = False - - -from cubicweb.web.views.startup import ManageView - -def entity_types_no_count(self, eschemas): - """return a list of formatted links to get a list of entities of - a each entity's types - """ - req = self.req - for eschema in eschemas: - if eschema.final or not (eschema.has_perm(req, 'read') or - eschema.has_local_role('read')): - continue - etype = eschema.type - label = display_name(req, etype, 'plural') - view = self.vreg.select('views', 'list', req, req.etype_rset(etype)) - url = view.url() - etypelink = u' %s' % (xml_escape(url), label) - yield (label, etypelink, self.add_entity_link(eschema, req)) - -ManageView.entity_types = entity_types_no_count - - -from cubicweb.web.views.basecontrollers import SendMailController - -def sendmail(self, recipient, subject, body): - sender = '%s <%s>' % ( - self.req.user.dc_title() or self.config['sender-name'], - self.req.user.get_email() or self.config['sender-addr']) - mail.send_mail(sender=sender, to=recipient, - subject=subject, body=body) - -SendMailController.sendmail = sendmail diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/appobjects/dbmgmt.py --- a/goa/appobjects/dbmgmt.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,200 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""special management views to manage repository content (initialization and -restoration). - -""" -__docformat__ = "restructuredtext en" - -from os.path import exists, join, abspath -from pickle import loads, dumps - -from logilab.common.decorators import cached -from logilab.mtconverter import xml_escape - -from cubicweb.selectors import none_rset, match_user_groups -from cubicweb.view import StartupView -from cubicweb.web import Redirect -from cubicweb.goa.dbinit import fix_entities, init_persistent_schema, insert_versions - -from google.appengine.api.datastore import Entity, Key, Get, Put, Delete -from google.appengine.api.datastore_types import Blob -from google.appengine.api.datastore_errors import EntityNotFoundError - - -def _get_status(name, create=True): - key = Key.from_path('EApplicationStatus', name) - try: - status = Get(key) - except EntityNotFoundError: - if create: - status = Entity('EApplicationStatus', name=name) - else: - status = None - return status - - -class AuthInfo(StartupView): - """special management view to get cookie values to give to laxctl commands - which are doing datastore administration requests - """ - id = 'authinfo' - __select__ = none_rset() & match_user_groups('managers') - - def call(self): - cookie = self.req.get_cookie() - values = [] - if self.config['use-google-auth']: - for param in ('ACSID', 'dev_appserver_login'): - morsel = cookie.get(param) - if morsel: - values.append('%s=%s' % (param, morsel.value)) - break - values.append('__session=%s' % cookie['__session'].value) - self.w(u"

        pass this flag to the client: --cookie='%s'

        " - % xml_escape('; '.join(values))) - - - -class ContentInit(StartupView): - """special management view to initialize content of a repository, - step by step to avoid depassing quotas - """ - id = 'contentinit' - __select__ = none_rset() & match_user_groups('managers') - - def server_session(self): - ssession = self.config.repo_session(self.req.cnx.sessionid) - ssession.set_pool() - return ssession - - def end_core_step(self, msg, status, stepid): - status['cpath'] = '' - status['stepid'] = stepid - Put(status) - self.msg(msg) - - def call(self): - status = _get_status('creation') - if status.get('finished'): - self.redirect('process already completed') - config = self.config - # execute cubicweb's post script - #mhandler.exec_event_script('post%s' % event) - # execute cubes'post script if any - paths = [p for p in config.cubes_path() + [config.apphome] - if exists(join(p, 'migration'))] - paths = [abspath(p) for p in (reversed(paths))] - cpath = status.get('cpath') - if cpath is None and status.get('stepid') is None: - init_persistent_schema(self.server_session(), self.schema) - self.end_core_step(u'inserted schema entities', status, 0) - return - if cpath == '' and status.get('stepid') == 0: - fix_entities(self.schema) - self.end_core_step(u'fixed bootstrap groups and users', status, 1) - return - if cpath == '' and status.get('stepid') == 1: - insert_versions(self.server_session(), self.config) - self.end_core_step(u'inserted software versions', status, None) - return - for i, path in enumerate(paths): - if not cpath or cpath == path: - self.info('running %s', path) - stepid = status.get('stepid') - context = status.get('context') - if context is not None: - context = loads(context) - else: - context = {} - stepid = self._migrhandler.exec_event_script( - 'postcreate', path, 'stepable_postcreate', stepid, context) - if stepid is None: # finished for this script - # reset script state - context = stepid = None - # next time, go to the next script - self.msg(u'finished postcreate for %s' % path) - try: - path = paths[i+1] - self.continue_link() - except IndexError: - status['finished'] = True - path = None - self.redirect('process completed') - else: - if context.get('stepidx'): - self.msg(u'created %s entities for step %s of %s' % ( - context['stepidx'], stepid, path)) - else: - self.msg(u'finished postcreate step %s for %s' % ( - stepid, path)) - context = Blob(dumps(context)) - self.continue_link() - status['context'] = context - status['stepid'] = stepid - status['cpath'] = path - break - else: - if not cpath: - # nothing to be done - status['finished'] = True - self.redirect('process completed') - else: - # Note the error: is expected by the laxctl command line tool, - # deal with this if internationalization is introduced - self.msg(u'error: strange creation state, can\'t find %s' - % cpath) - self.w(u'
        click here to ' - 'delete all datastore content so process can be ' - 'reinitialized
        ' % xml_escape(self.req.base_url())) - Put(status) - - @property - @cached - def _migrhandler(self): - return self.config.migration_handler(self.schema, interactive=False, - cnx=self.req.cnx, - repo=self.config.repository()) - - def msg(self, msg): - self.w(u'
        %s
        ' % xml_escape(msg)) - def redirect(self, msg): - raise Redirect(self.req.build_url('', msg)) - def continue_link(self): - self.w(u'continue
        ' % xml_escape(self.req.url())) - - -class ContentClear(StartupView): - id = 'contentclear' - __select__ = none_rset() & match_user_groups('managers') - skip_etypes = ('CWGroup', 'CWUser') - - def call(self): - # XXX should use unsafe execute with all hooks deactivated - # XXX step by catching datastore errors? - for eschema in self.schema.entities(): - if eschema.final or eschema in self.skip_etypes: - continue - self.req.execute('DELETE %s X' % eschema) - self.w(u'deleted all %s entities
        ' % eschema) - status = _get_status('creation', create=False) - if status: - Delete(status) - self.w(u'done
        ') - self.w(u'click here to start the data ' - 'initialization process
        ' % self.req.base_url()) diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/appobjects/gauthservice.py --- a/goa/appobjects/gauthservice.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,44 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""authentication using google authentication service - -""" -__docformat__ = "restructuredtext en" - -from cubicweb.web.views.basecomponents import UserLink -from cubicweb.web.views.actions import LogoutAction - -from google.appengine.api import users - - -class GACWUserLink(UserLink): - - def anon_user_link(self): - self.w(self.req._('anonymous')) - self.w(u' [%s]' - % (users.create_login_url(self.req.url()), self.req._('login'))) - -class GAELogoutAction(LogoutAction): - - def url(self): - return users.create_logout_url(self.req.build_url('logout') ) - -def registration_callback(vreg): - if hasattr(vreg.config, 'has_resource'): - vreg.register(GACWUserLink, clear=True) - vreg.register(GAELogoutAction, clear=True) diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/appobjects/sessions.py --- a/goa/appobjects/sessions.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,291 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""persistent sessions stored in big table - - -XXX TODO: -* cleanup persistent session -* use user as ancestor? -""" -__docformat__ = "restructuredtext en" - -from pickle import loads, dumps -from time import localtime, strftime - -from logilab.common.decorators import cached, clear_cache - -from cubicweb import BadConnectionId -from cubicweb.dbapi import Connection, ConnectionProperties, repo_connect -from cubicweb.selectors import none_rset, match_user_groups -from cubicweb.server.session import Session -from cubicweb.web import InvalidSession -from cubicweb.web.application import AbstractSessionManager -from cubicweb.web.application import AbstractAuthenticationManager - -from google.appengine.api.datastore import Key, Entity, Get, Put, Delete, Query -from google.appengine.api.datastore_errors import EntityNotFoundError -from google.appengine.api.datastore_types import Blob - -try: - del Connection.__del__ -except AttributeError: - pass # already deleted - - -class GAEAuthenticationManager(AbstractAuthenticationManager): - """authenticate user associated to a request and check session validity, - using google authentication service - """ - - def __init__(self, *args, **kwargs): - super(GAEAuthenticationManager, self).__init__(*args, **kwargs) - self._repo = self.config.repository(vreg=self.vreg) - - def authenticate(self, req, _login=None, _password=None): - """authenticate user and return an established connection for this user - - :raise ExplicitLogin: if authentication is required (no authentication - info found or wrong user/password) - """ - if _login is not None: - login, password = _login, _password - else: - login, password = req.get_authorization() - # remove possibly cached cursor coming from closed connection - clear_cache(req, 'cursor') - cnxprops = ConnectionProperties(self.vreg.config.repo_method, - close=False, log=False) - cnx = repo_connect(self._repo, login, password=password, cnxprops=cnxprops) - self._init_cnx(cnx, login, password) - # associate the connection to the current request - req.set_connection(cnx) - return cnx - - def _init_cnx(self, cnx, login, password): - cnx.anonymous_connection = self.config.is_anonymous_user(login) - cnx.vreg = self.vreg - cnx.login = login - cnx.password = password - - -class GAEPersistentSessionManager(AbstractSessionManager): - """manage session data associated to a session identifier""" - - def __init__(self, vreg, *args, **kwargs): - super(GAEPersistentSessionManager, self).__init__(vreg, *args, **kwargs) - self._repo = self.config.repository(vreg=vreg) - - def get_session(self, req, sessionid): - """return existing session for the given session identifier""" - # search a record for the given session - key = Key.from_path('CubicWebSession', 'key_' + sessionid, parent=None) - try: - record = Get(key) - except EntityNotFoundError: - raise InvalidSession() - repo = self._repo - if self.has_expired(record): - repo._sessions.pop(sessionid, None) - Delete(record) - raise InvalidSession() - # associate it with a repository session - try: - reposession = repo._get_session(sessionid) - user = reposession.user - # touch session to avoid closing our own session when sessions are - # cleaned (touch is done on commit/rollback on the server side, too - # late in that case) - reposession._touch() - except BadConnectionId: - # can't found session in the repository, this probably mean the - # session is not yet initialized on this server, hijack the repo - # to create it - # use an internal connection - ssession = repo.internal_session() - # try to get a user object - try: - user = repo.authenticate_user(ssession, record['login'], - record['password']) - finally: - ssession.close() - reposession = Session(user, self._repo, _id=sessionid) - self._repo._sessions[sessionid] = reposession - cnx = Connection(self._repo, sessionid) - return self._get_proxy(req, record, cnx, user) - - def open_session(self, req): - """open and return a new session for the given request""" - cnx = self.authmanager.authenticate(req) - # avoid rebuilding a user - user = self._repo._get_session(cnx.sessionid).user - # build persistent record for session data - record = Entity('CubicWebSession', name='key_' + cnx.sessionid) - record['login'] = cnx.login - record['password'] = cnx.password - record['anonymous_connection'] = cnx.anonymous_connection - Put(record) - return self._get_proxy(req, record, cnx, user) - - def close_session(self, proxy): - """close session on logout or on invalid session detected (expired out, - corrupted...) - """ - proxy.close() - - def current_sessions(self): - for record in Query('CubicWebSession').Run(): - yield ConnectionProxy(record) - - def _get_proxy(self, req, record, cnx, user): - proxy = ConnectionProxy(record, cnx, user) - user.req = req - req.set_connection(proxy, user) - return proxy - - -class ConnectionProxy(object): - - def __init__(self, record, cnx=None, user=None): - self.__record = record - self.__cnx = cnx - self.__user = user - self.__data = None - self.__is_dirty = False - self.sessionid = record.key().name()[4:] # remove 'key_' prefix - - def __repr__(self): - sstr = '') - self.w(u'%s web sessions closed
        \n' % nbclosed) - # clean repository sessions - repo = self.config.repository(vreg=self.vreg) - nbclosed = repo.clean_sessions() - self.w(u'%s repository sessions closed
        \n' % nbclosed) - self.w(u'%s remaining sessions
        \n' % remaining) - self.w(u'') - - -def registration_callback(vreg): - vreg.register(SessionsCleaner) - vreg.register(GAEAuthenticationManager, clear=True) - vreg.register(GAEPersistentSessionManager, clear=True) diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/bin/laxctl --- a/goa/bin/laxctl Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,20 +0,0 @@ -#!/usr/bin/env python - -import sys -import os.path as osp - -APPLROOT = osp.abspath(osp.join(osp.dirname(osp.abspath(__file__)), '..')) -if APPLROOT not in sys.path: - sys.path.insert(0, APPLROOT) -CUBES_DIR = osp.join(APPLROOT, 'cw-cubes') -if CUBES_DIR not in sys.path: - sys.path.insert(1, CUBES_DIR) - -try: - import custom -except ImportError, exc: - print exc - sys.exit(2) - -from tools.laxctl import run -run() diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/db.py --- a/goa/db.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,469 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""provide replacement classes for gae db module, so that a gae model can be -used as base for a cubicweb application by simply replacing :: - - from google.appengine.ext import db - -by - - from cubicweb.goa import db - -The db.model api should be fully featured by replacement classes, with the -following differences: - -* all methods returning `google.appengine.ext.db.Model` instance(s) will return - `cubicweb.goa.db.Model` instance instead (though you should see almost no - difference since those instances have the same api) - -* class methods returning model instance take a `req` as first argument, unless - they are called through an instance, representing the current request - (accessible through `self.req` on almost all objects) - -* XXX no instance._set attributes, use instance.reverse_ - instead -* XXX reference property always return a list of objects, not the instance -* XXX name/collection_name argument of properties constructor are ignored -* XXX ListProperty - -""" -__docformat__ = "restructuredtext en" - -from copy import deepcopy - -from logilab.common.decorators import cached, iclassmethod - -from cubicweb import Binary, entities -from cubicweb.req import RequestSessionBase -from cubicweb.rset import ResultSet -from cubicweb.entity import metaentity -from cubicweb.server.utils import crypt_password -from cubicweb.goa import MODE -from cubicweb.goa.dbinit import init_relations - -from google.appengine.api.datastore import Get, Put, Key, Entity, Query -from google.appengine.api.datastore import NormalizeAndTypeCheck, RunInTransaction -from google.appengine.api.datastore_types import Text, Blob -from google.appengine.api.datastore_errors import BadKeyError - -# XXX remove this dependancy -from google.appengine.ext import db - - -def rset_from_objs(req, objs, attrs=('eid',), rql=None, args=None): - """return a ResultSet instance for list of objects""" - if objs is None: - objs = () - elif isinstance(objs, Entity): - objs = (objs,) - if rql is None: - rql = 'Any X' - rows = [] - description = [] - rset = ResultSet(rows, rql, args, description=description) - vreg = req.vreg - for i, obj in enumerate(objs): - line = [] - linedescr = [] - eschema = vreg.schema.eschema(obj.kind()) - for j, attr in enumerate(attrs): - if attr == 'eid': - value = obj.key() - obj.row, obj.col = i, j - descr = eschema.type - value = str(value) - else: - value = obj[attr] - descr = str(eschema.destination(attr)) - line.append(value) - linedescr.append(descr) - rows.append(line) - description.append(linedescr) - for j, attr in enumerate(attrs): - if attr == 'eid': - entity = vreg.etype_class(eschema.type)(req, rset, i, j) - rset._get_entity_cache_ = {(i, j): entity} - rset.rowcount = len(rows) - rset.req = req - return rset - - -def needrequest(wrapped): - def wrapper(cls, *args, **kwargs): - req = kwargs.pop('req', None) - if req is None and args and isinstance(args[0], RequestSessionBase): - args = list(args) - req = args.pop(0) - if req is None: - req = getattr(cls, 'req', None) - if req is None: - raise Exception('either call this method on an instance or ' - 'specify the req argument') - return wrapped(cls, req, *args, **kwargs) - return iclassmethod(wrapper) - - -class gaedbmetaentity(metaentity): - """metaclass for goa.db.Model classes: filter entity / db model part, - put aside the db model part for later creation of db model class. - """ - def __new__(mcs, name, bases, classdict): - if not 'id' in classdict: - classdict['id'] = name - entitycls = super(gaedbmetaentity, mcs).__new__(mcs, name, bases, classdict) - return entitycls - - -TEST_MODELS = {} - -def extract_dbmodel(entitycls): - if MODE == 'test' and entitycls in TEST_MODELS: - dbclassdict = TEST_MODELS[entitycls] - else: - dbclassdict = {} - for attr, value in entitycls.__dict__.items(): - if isinstance(value, db.Property) or isinstance(value, ReferencePropertyStub): - dbclassdict[attr] = value - # don't remove attr from entitycls, this make tests fail, and it's anyway - # overwritten by descriptor at class initialization time - #delattr(entitycls, attr) - if MODE == 'test': - TEST_MODELS[entitycls] = dbclassdict - dbclassdict = deepcopy(dbclassdict) - for propname, prop in TEST_MODELS[entitycls].iteritems(): - if getattr(prop, 'reference_class', None) is db._SELF_REFERENCE: - dbclassdict[propname].reference_class = db._SELF_REFERENCE - return dbclassdict - - -class Model(entities.AnyEntity): - id = 'Any' - __metaclass__ = gaedbmetaentity - - row = col = 0 - - @classmethod - def __initialize__(cls): - super(Model, cls).__initialize__() - cls._attributes = frozenset(rschema for rschema in cls.e_schema.subject_relations() - if rschema.final) - - def __init__(self, *args, **kwargs): - # db.Model prototype: - # __init__(self, parent=None, key_name=None, **kw) - # - # Entity prototype: - # __init__(self, req, rset, row=None, col=0) - if args and isinstance(args[0], RequestSessionBase) or 'req' in kwargs: - super(Model, self).__init__(*args, **kwargs) - self._gaeinitargs = None - else: - super(Model, self).__init__(None, None) - # if Model instances are given in kwargs, turn them into db model - for key, val in kwargs.iteritems(): - if key in self.e_schema.subject_relations() and not self.e_schema.schema[key].final: - if isinstance(kwargs, (list, tuple)): - val = [isinstance(x, Model) and x._dbmodel or x for x in val] - elif isinstance(val, Model): - val = val._dbmodel - kwargs[key] = val.key() - self._gaeinitargs = (args, kwargs) - - def __repr__(self): - return '' % ( - self.e_schema, self.eid, self.keys(), id(self)) - - def _cubicweb_to_datastore(self, attr, value): - attr = attr[2:] # remove 's_' / 'o_' prefix - if attr in self._attributes: - tschema = self.e_schema.destination(attr) - if tschema == 'String': - if len(value) > 500: - value = Text(value) - elif tschema == 'Password': - # if value is a Binary instance, this mean we got it - # from a query result and so it is already encrypted - if isinstance(value, Binary): - value = value.getvalue() - else: - value = crypt_password(value) - elif tschema == 'Bytes': - if isinstance(value, Binary): - value = value.getvalue() - value = Blob(value) - else: - value = Key(value) - return value - - def _to_gae_dict(self, convert=True): - gaedict = {} - for attr, value in self.iteritems(): - attr = 's_' + attr - if value is not None and convert: - value = self._cubicweb_to_datastore(attr, value) - gaedict[attr] = value - return gaedict - - def to_gae_model(self): - dbmodel = self._dbmodel - dbmodel.update(self._to_gae_dict()) - return dbmodel - - @property - @cached - def _dbmodel(self): - if self.has_eid(): - assert self._gaeinitargs is None - try: - return self.req.datastore_get(self.eid) - except AttributeError: # self.req is not a server session - return Get(self.eid) - self.set_defaults() - values = self._to_gae_dict(convert=False) - parent = key_name = _app = None - if self._gaeinitargs is not None: - args, kwargs = self._gaeinitargs - args = list(args) - if args: - parent = args.pop(0) - if args: - key_name = args.pop(0) - if args: - _app = args.pop(0) - assert not args - if 'parent' in kwargs: - assert parent is None - parent = kwargs.pop('parent') - if 'key_name' in kwargs: - assert key_name is None - key_name = kwargs.pop('key_name') - if '_app' in kwargs: - assert _app is None - _app = kwargs.pop('_app') - - for key, value in kwargs.iteritems(): - if key in self._attributes: - values['s_'+key] = value - else: - kwargs = None - if key_name is None: - key_name = self.db_key_name() - if key_name is not None: - key_name = 'key_' + key_name - for key, value in values.iteritems(): - if value is None: - continue - values[key] = self._cubicweb_to_datastore(key, value) - entity = Entity(self.id, parent, _app, key_name) - entity.update(values) - init_relations(entity, self.e_schema) - return entity - - def db_key_name(self): - """override this method to control datastore key name that should be - used at entity creation. - - Note that if this function return something else than None, the returned - value will be prefixed by 'key_' to build the actual key name. - """ - return None - - def metainformation(self): - return {'type': self.id, 'source': {'uri': 'system'}, 'extid': None} - - def view(self, vid, __registry='views', **kwargs): - """shortcut to apply a view on this entity""" - return self.vreg[__registry].render(vid, self.req, rset=self.rset, - row=self.row, col=self.col, **kwargs) - - @classmethod - def _rest_attr_info(cls): - mainattr, needcheck = super(Model, cls)._rest_attr_info() - if needcheck: - return 'eid', False - return mainattr, needcheck - - def get_value(self, name): - try: - value = self[name] - except KeyError: - if not self.has_eid(): - return None - value = self._dbmodel.get('s_'+name) - if value is not None: - if isinstance(value, Text): - value = unicode(value) - elif isinstance(value, Blob): - value = Binary(str(value)) - self[name] = value - return value - - def has_eid(self): - if self.eid is None: - return False - try: - Key(self.eid) - return True - except BadKeyError: - return False - - def complete(self, skip_bytes=True): - pass - - def unrelated(self, rtype, targettype, role='subject', limit=None, - ordermethod=None): - # XXX dumb implementation - if limit is not None: - objs = Query(str(targettype)).Get(limit) - else: - objs = Query(str(targettype)).Run() - return rset_from_objs(self.req, objs, ('eid',), - 'Any X WHERE X is %s' % targettype) - - def key(self): - return Key(self.eid) - - def put(self, req=None): - if req is not None and self.req is None: - self.req = req - dbmodel = self.to_gae_model() - key = Put(dbmodel) - self.set_eid(str(key)) - if self.req is not None and self.rset is None: - self.rset = rset_from_objs(self.req, dbmodel, ('eid',), - 'Any X WHERE X eid %(x)s', {'x': self.eid}) - self.row = self.col = 0 - return dbmodel - - @needrequest - def get(cls, req, keys): - # if check if this is a dict.key call - if isinstance(cls, Model) and keys in cls._attributes: - return super(Model, cls).get(keys) - rset = rset_from_objs(req, Get(keys), ('eid',), - 'Any X WHERE X eid IN %(x)s', {'x': keys}) - return list(rset.entities()) - - @needrequest - def get_by_id(cls, req, ids, parent=None): - if isinstance(parent, Model): - parent = parent.key() - ids, multiple = NormalizeAndTypeCheck(ids, (int, long)) - keys = [Key.from_path(cls.kind(), id, parent=parent) - for id in ids] - rset = rset_from_objs(req, Get(keys)) - return list(rset.entities()) - - @classmethod - def get_by_key_name(cls, req, key_names, parent=None): - if isinstance(parent, Model): - parent = parent.key() - key_names, multiple = NormalizeAndTypeCheck(key_names, basestring) - keys = [Key.from_path(cls.kind(), name, parent=parent) - for name in key_names] - rset = rset_from_objs(req, Get(keys)) - return list(rset.entities()) - - @classmethod - def get_or_insert(cls, req, key_name, **kwds): - def txn(): - entity = cls.get_by_key_name(key_name, parent=kwds.get('parent')) - if entity is None: - entity = cls(key_name=key_name, **kwds) - entity.put() - return entity - return RunInTransaction(txn) - - @classmethod - def all(cls, req): - rset = rset_from_objs(req, Query(cls.id).Run()) - return list(rset.entities()) - - @classmethod - def gql(cls, req, query_string, *args, **kwds): - raise NotImplementedError('use rql') - - @classmethod - def kind(cls): - return cls.id - - @classmethod - def properties(cls): - raise NotImplementedError('use eschema') - - def dynamic_properties(self): - raise NotImplementedError('use eschema') - - def is_saved(self): - return self.has_eid() - - def parent(self): - parent = self._dbmodel.parent() - if not parent is None: - rset = rset_from_objs(self.req, (parent,), ('eid',), - 'Any X WHERE X eid %(x)s', {'x': parent.key()}) - parent = rset.get_entity(0, 0) - return parent - - def parent_key(self): - return self.parent().key() - - def to_xml(self): - return self._dbmodel.ToXml() - -# hijack AnyEntity class -entities.AnyEntity = Model - -BooleanProperty = db.BooleanProperty -URLProperty = db.URLProperty -DateProperty = db.DateProperty -DateTimeProperty = db.DateTimeProperty -TimeProperty = db.TimeProperty -StringProperty = db.StringProperty -TextProperty = db.TextProperty -BlobProperty = db.BlobProperty -IntegerProperty = db.IntegerProperty -FloatProperty = db.FloatProperty -ListProperty = db.ListProperty -SelfReferenceProperty = db.SelfReferenceProperty -UserProperty = db.UserProperty - - -class ReferencePropertyStub(object): - def __init__(self, cls, args, kwargs): - self.cls = cls - self.args = args - self.kwargs = kwargs - self.required = False - self.__dict__.update(kwargs) - self.creation_counter = db.Property.creation_counter - db.Property.creation_counter += 1 - - @property - def data_type(self): - class FakeDataType(object): - @staticmethod - def kind(): - return self.cls.__name__ - return FakeDataType - -def ReferenceProperty(cls, *args, **kwargs): - if issubclass(cls, db.Model): - cls = db.class_for_kind(cls.__name__) - return db.ReferenceProperty(cls, *args, **kwargs) - return ReferencePropertyStub(cls, args, kwargs) diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/dbinit.py --- a/goa/dbinit.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,120 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""some utility functions for datastore initialization. - -""" -__docformat__ = "restructuredtext en" - -from google.appengine.api.datastore import Key, Entity, Put, Get, Query -from google.appengine.api import datastore_errors - -_GROUP_CACHE = {} # XXX use memcache - -def _get_group(groupname): - try: - return _GROUP_CACHE[groupname] - except KeyError: - key = Key.from_path('CWGroup', 'key_' + groupname, parent=None) - try: - group = Get(key) - except datastore_errors.EntityNotFoundError: - raise Exception('can\'t find required group %s, is your instance ' - 'correctly initialized (eg did you run the ' - 'initialization script) ?' % groupname) - _GROUP_CACHE[groupname] = group - return group - - -def create_user(login, password, groups): - """create a cubicweb user""" - from cubicweb.server.utils import crypt_password - user = Entity('CWUser', name=login) - user['s_login'] = unicode(login) - user['s_upassword'] = crypt_password(password) - set_user_groups(user, groups) - Put(user) - return user - -def create_groups(): - """create initial cubicweb groups""" - for groupname in ('managers', 'users', 'guests'): - group = Entity('CWGroup', name='key_' + groupname) - group['s_name'] = unicode(groupname) - Put(group) - _GROUP_CACHE[groupname] = group - -def set_user_groups(user, groups): - """set user in the given groups (as string). The given user entity - (datastore.Entity) is not putted back to the repository, this is the caller - responsability. - """ - groups = [_get_group(g) for g in groups] - user['s_in_group'] = [g.key() for g in groups] or None - for group in groups: - try: - group['o_in_group'].append(user.key()) - except (KeyError, AttributeError): - group['o_in_group'] = [user.key()] - Put(group) - -def init_relations(gaeentity, eschema): - """set None for every subject relations which is not yet defined""" - for rschema in eschema.subject_relations(): - if rschema in ('identity', 'has_text'): - continue - dsrelation = 's_' + rschema.type - if not dsrelation in gaeentity: - gaeentity[dsrelation] = None - for rschema in eschema.object_relations(): - if rschema == 'identity': - continue - dsrelation = 'o_' + rschema.type - if not dsrelation in gaeentity: - gaeentity[dsrelation] = None - -def fix_entities(schema): - for etype in ('CWUser', 'CWGroup'): - eschema = schema.eschema(etype) - for gaeentity in Query(etype).Run(): - init_relations(gaeentity, eschema) - # XXX o_is on CWEType entity - gaeentity['s_is'] = Key.from_path('CWEType', 'key_' + etype, parent=None) - Put(gaeentity) - -def init_persistent_schema(ssession, schema): - execute = ssession.execute - rql = ('INSERT CWEType X: X name %(name)s, X description %(descr)s,' - 'X final FALSE') - eschema = schema.eschema('CWEType') - execute(rql, {'name': u'CWEType', 'descr': unicode(eschema.description)}) - for eschema in schema.entities(): - if eschema.final or eschema == 'CWEType': - continue - execute(rql, {'name': unicode(eschema), - 'descr': unicode(eschema.description)}) - -def insert_versions(ssession, config): - execute = ssession.execute - # insert versions - execute('INSERT CWProperty X: X pkey %(pk)s, X value%(v)s', - {'pk': u'system.version.cubicweb', - 'v': unicode(config.cubicweb_version())}) - for cube in config.cubes(): - execute('INSERT CWProperty X: X pkey %(pk)s, X value%(v)s', - {'pk': u'system.version.%s' % cube, - 'v': unicode(config.cube_version(cube))}) diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/dbmyams.py --- a/goa/dbmyams.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,223 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""extends yams to be able to load google appengine's schemas - -MISSING FEATURES: - - ListProperty, StringList, EmailProperty, etc. (XXX) - - ReferenceProperty.verbose_name, collection_name, etc. (XXX) - -XXX proprify this knowing we'll use goa.db -""" - -from os.path import join -from datetime import datetime, date, time - -from google.appengine.ext import db -from google.appengine.api import datastore_types - -from yams.buildobjs import (String, Int, Float, Boolean, Date, Time, Datetime, - Bytes, SubjectRelation) -from yams.buildobjs import metadefinition, EntityType - -from cubicweb.schema import CubicWebSchemaLoader -from cubicweb.goa import db as goadb - -# db.Model -> yams ############################################################ - -DBM2Y_TYPESMAP = { - basestring: String, - datastore_types.Text: String, - int: Int, - float: Float, - bool: Boolean, - time: Time, - date: Date, - datetime: Datetime, - datastore_types.Blob: Bytes, - } - - -def dbm2y_default_factory(prop, **kwargs): - """just wraps the default types map to set - basic constraints like `required`, `default`, etc. - """ - yamstype = DBM2Y_TYPESMAP[prop.data_type] - if 'default' not in kwargs: - default = prop.default_value() - if default is not None: - kwargs['default'] = default - if prop.required: - kwargs['required'] = True - return yamstype(**kwargs) - -def dbm2y_string_factory(prop): - """like dbm2y_default_factory but also deals with `maxsize` and `vocabulary`""" - kwargs = {} - if prop.data_type is basestring: - kwargs['maxsize'] = 500 - if prop.choices is not None: - kwargs['vocabulary'] = prop.choices - return dbm2y_default_factory(prop, **kwargs) - -def dbm2y_date_factory(prop): - """like dbm2y_default_factory but also deals with today / now definition""" - kwargs = {} - if prop.auto_now_add: - if prop.data_type is datetime: - kwargs['default'] = 'now' - else: - kwargs['default'] = 'today' - # XXX no equivalent to Django's `auto_now` - return dbm2y_default_factory(prop, **kwargs) - - -def dbm2y_relation_factory(etype, prop, multiple=False): - """called if `prop` is a `db.ReferenceProperty`""" - if multiple: - cardinality = '**' - elif prop.required: - cardinality = '1*' - else: - cardinality = '?*' - # XXX deal with potential kwargs of ReferenceProperty.__init__() - try: - return SubjectRelation(prop.data_type.kind(), cardinality=cardinality) - except AttributeError, ex: - # hack, data_type is still _SELF_REFERENCE_MARKER - return SubjectRelation(etype, cardinality=cardinality) - - -DBM2Y_FACTORY = { - basestring: dbm2y_string_factory, - datastore_types.Text: dbm2y_string_factory, - int: dbm2y_default_factory, - float: dbm2y_default_factory, - bool: dbm2y_default_factory, - time: dbm2y_date_factory, - date: dbm2y_date_factory, - datetime: dbm2y_date_factory, - datastore_types.Blob: dbm2y_default_factory, - } - - -class GaeSchemaLoader(CubicWebSchemaLoader): - """Google appengine schema loader class""" - def __init__(self, *args, **kwargs): - self.use_gauthservice = kwargs.pop('use_gauthservice', False) - super(GaeSchemaLoader, self).__init__(*args, **kwargs) - self.defined = {} - self.created = [] - self.loaded_files = [] - self._instantiate_handlers() - - def finalize(self, register_base_types=False): - return self._build_schema('google-appengine', register_base_types) - - def load_dbmodel(self, name, props): - clsdict = {} - ordered_props = sorted(props.items(), - key=lambda x: x[1].creation_counter) - for pname, prop in ordered_props: - if isinstance(prop, db.ListProperty): - if not issubclass(prop.item_type, db.Model): - self.error('ignoring list property with %s item type' - % prop.item_type) - continue - rdef = dbm2y_relation_factory(name, prop, multiple=True) - else: - try: - if isinstance(prop, (db.ReferenceProperty, - goadb.ReferencePropertyStub)): - rdef = dbm2y_relation_factory(name, prop) - else: - rdef = DBM2Y_FACTORY[prop.data_type](prop) - except KeyError, ex: - import traceback - traceback.print_exc() - self.error('ignoring property %s (keyerror on %s)' % (pname, ex)) - continue - rdef.creation_rank = prop.creation_counter - clsdict[pname] = rdef - edef = metadefinition(name, (EntityType,), clsdict) - self.add_definition(self, edef()) - - def error(self, msg): - print 'ERROR:', msg - - def import_yams_schema(self, ertype, schemamod): - erdef = self.pyreader.import_erschema(ertype, schemamod) - - def import_yams_cube_schema(self, templpath): - for filepath in self.get_schema_files(templpath): - self.handle_file(filepath) - - @property - def pyreader(self): - return self._live_handlers['.py'] - -import os -from cubicweb import CW_SOFTWARE_ROOT - -def load_schema(config, schemaclasses=None, extrahook=None): - """high level method to load all the schema for a lax instance""" - # IMPORTANT NOTE: dbmodel schemas must be imported **BEFORE** - # the loader is instantiated because this is where the dbmodels - # are registered in the yams schema - for compname in config['included-cubes']: - __import__('%s.schema' % compname) - loader = GaeSchemaLoader(use_gauthservice=config['use-google-auth'], db=db) - if schemaclasses is not None: - for cls in schemaclasses: - loader.load_dbmodel(cls.__name__, goadb.extract_dbmodel(cls)) - elif config['schema-type'] == 'dbmodel': - import schema as appschema - for obj in vars(appschema).values(): - if isinstance(obj, type) and issubclass(obj, goadb.Model) and obj.__module__ == appschema.__name__: - loader.load_dbmodel(obj.__name__, goadb.extract_dbmodel(obj)) - for erschema in ('CWGroup', 'CWEType', 'CWRType', 'RQLExpression', - 'is_', 'is_instance_of', - 'read_permission', 'add_permission', - 'delete_permission', 'update_permission'): - loader.import_yams_schema(erschema, 'bootstrap') - loader.handle_file(join(CW_SOFTWARE_ROOT, 'schemas', 'base.py')) - cubes = config['included-yams-cubes'] - for cube in reversed(config.expand_cubes(cubes)): - config.info('loading cube %s', cube) - loader.import_yams_cube_schema(config.cube_dir(cube)) - if config['schema-type'] == 'yams': - loader.import_yams_cube_schema('.') - if extrahook is not None: - extrahook(loader) - if config['use-google-auth']: - loader.defined['CWUser'].remove_relation('upassword') - loader.defined['CWUser'].permissions['add'] = () - loader.defined['CWUser'].permissions['delete'] = () - for etype in ('CWGroup', 'RQLExpression'): - read_perm_rel = loader.defined[etype].get_relations('read_permission').next() - read_perm_rel.cardinality = '**' - # XXX not yet ready for CWUser workflow - loader.defined['CWUser'].remove_relation('in_state') - loader.defined['CWUser'].remove_relation('wf_info_for') - # remove RQLConstraint('NOT O name "owners"') on CWUser in_group CWGroup - # since "owners" group is not persistent with gae - loader.defined['CWUser'].get_relations('in_group').next().constraints = [] - # return the full schema including the cubes' schema - for ertype in loader.defined.values(): - if getattr(ertype, 'inlined', False): - ertype.inlined = False - return loader.finalize() diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/doc/FAQ.en.txt --- a/goa/doc/FAQ.en.txt Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,5 +0,0 @@ -============================== -LAX Frequently Asked Questions -============================== - -[WRITE ME] \ No newline at end of file diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/doc/README_LAX.fr.txt --- a/goa/doc/README_LAX.fr.txt Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,75 +0,0 @@ -Qu'est-ce que ``LAX`` ? -======================= - -``LAX`` (Logilab Application engine eXtension) est un framework -d'application web qui facilite les développements faits pour -``Google AppEngine``. - -``LAX`` est un portage de la partie web de la plate-forme -applicative développée par Logilab depuis 2001. Cette plate-forme -publie des données que la partie stockage tire de bases SQL, -d'annuaires LDAP et de systèmes de gestion de version. Depuis mai -2008, elle fonctionne sur le "datastore" de ``Google AppEngine``. - -``LAX`` est pour le moment en version alpha. - -Django/GAE vs. LAX/GAE -======================= - -NotImplementedError() - - -Téléchargement des sources -========================== - -- Les sources de ``Google AppEngine`` peuvent être obtenues à l'adresse - suivante : http://code.google.com/appengine/downloads.html - -- Les sources de ``LAX`` se trouvent à l'adresse suivante : - http://lax.logilab.org/ - - -Installation -============ - -Les sources de ``Google AppEngine`` doivent être décompressées et le -répertoire `google` qui s'y trouve doit être accessible par la variable -d'environnement ``PYTHONPATH``. Correctement définir le ``PYTHONPATH`` -n'est pas nécessaire pour le lancement de l'application elle-même mais -pour l'utilisation des scripts fournis par ``LAX`` ou pour l'exécution -des tests unitaires. - -Une fois décompactée, l'archive ``lax-0.1.0-alpha.tar.gz``, on obtient -l'arborescence suivante:: - - . - |-- app.yaml - |-- custom.py - |-- data - |-- cubicweb/ - |-- i18n/ - |-- logilab/ - |-- main.py - |-- mx/ - |-- rql/ - |-- schema.py - |-- simplejson/ - |-- tools/ - | |-- generate_schema_img.py - | `-- i18ncompile.py - |-- views.py - |-- yams/ - `-- yapps/ - - -On retrouve le squelette d'une application web de ``Google AppEngine`` -(fichiers ``app.yaml``, ``main.py``en particulier) avec les dépendances -supplémentaires nécessaires à l'utilisation du framework ``LAX`` - - -Lancement de l'application de base -================================== - -python /path/to/google_appengine/dev_appserver.py /path/to/lax - - diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/doc/devmanual_fr/advanced_notes.txt --- a/goa/doc/devmanual_fr/advanced_notes.txt Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,5 +0,0 @@ - -La différence entre la classe `AppRsetObject` et la classe `AppObject` est que -les instances de la premières sont séléctionnées pour une requête et un "result -set" et alors que les secondes ne sont séléctionnées qu'en fonction de leur -identifiant. diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/doc/devmanual_fr/archi_globale.dia Binary file goa/doc/devmanual_fr/archi_globale.dia has changed diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/doc/devmanual_fr/archi_globale.png Binary file goa/doc/devmanual_fr/archi_globale.png has changed diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/doc/devmanual_fr/chap_autres_composants_ui.txt --- a/goa/doc/devmanual_fr/chap_autres_composants_ui.txt Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,14 +0,0 @@ -Autres composants de l'interface web -==================================== - -Actions -------- -XXXFILLME - -Component, VComponent ---------------------- -XXXFILLME - -CWProperty ---------- -XXXFILLME diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/doc/devmanual_fr/chap_bases_framework_erudi.txt --- a/goa/doc/devmanual_fr/chap_bases_framework_erudi.txt Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,226 +0,0 @@ -Fondements du framework CubicWeb -============================= - -Le moteur web d'cubicweb consiste en quelques classes gérant un ensemble d'objets -chargés dynamiquement au lancement d'cubicweb. Ce sont ces objets dynamiques, issus -du modèle ou de la librairie, qui construisent le site web final. Les différents -composants dynamiques sont par exemple : - -* coté client et serveur - - - les définitions d'entités, contenant la logique permettant la manipulation des - données de l'application - -* coté client - - - les *vues* , ou encore plus spécifiquement - - - les boites - - l'en-tête et le pied de page - - les formulaires - - les gabarits de pages - - - les *actions* - - les *controleurs* - -* coté serveur - - - les crochets de notification - - les vues de notification - -Les différents composants du moteur sont : - -* un frontal web (seul twisted disponible pour le moment), transparent du point - de vue des objets dynamiques -* un objet encapsulant la configuration -* un `vregistry` (`cubicweb.cwvreg`) contenant les objets chargés dynamiquements - - -Détail de la procédure d'enregistrement ---------------------------------------- -Au démarage le `vregistry` ou base de registres inspecte un certain nombre de -répertoires à la recherche de définition de classes "compatible". Après une -procédure d'enregistrement les objets sont affectés dans différents registres -afin d'être ensuite séléctionné dynamiquement pendant le fonctionnement de -l'application. - -La classe de base de tout ces objets est la classe `AppRsetObject` (module -`cubicweb.common.appobject`). - - -API Python/RQL --------------- - -Inspiré de la db-api standard, avec un object Connection possédant les méthodes -cursor, rollback et commit principalement. La méthode importante est la méthode -`execute` du curseur : - -`execute(rqlstring, args=None, eid_key=None, build_descr=True)` - -:rqlstring: la requête rql à éxécuter (unicode) -:args: si la requête contient des substitutions, un dictionnaire contenant les - valeurs à utiliser -:eid_key: - un détail d'implémentation du cache de requêtes RQL fait que si une substitution est - utilisée pour introduire un eid *levant des ambiguités dans la résolution de - type de la requête*, il faut spécifier par cet argument la clé correspondante - dans le dictionnaire - -C'est l'objet Connection qui possède les méthodes classiques `commit` et -`rollback`. Vous ne *devriez jamais avoir à les utiliser* lors du développement -d'interface web sur la base du framework CubicWeb étant donné que la fin de la -transaction est déterminée par celui-ci en fonction du succès d'éxécution de la -requête. - -NOTE : lors de l'éxécution de requêtes de modification (SET,INSERT,DELETE), si une -requête génère une erreur liée à la sécurité, un rollback est systématiquement -effectuée sur la transaction courante. - - -La classe `Request` (`cubicweb.web`) ---------------------------------- -Une instance de requête est créée lorsque une requête HTTP est transmise au -serveur web. Elle contient des informations telles que les paramètres de -formulaires, l'utilisateur connecté, etc. - -**De manière plus générale une requête représente une demande d'un utilisateur, -que se soit par HTTP ou non (on parle également de requête rql coté serveur par -exemple)** - -Une instance de la classe `Request` possède les attributs : - -* `user`, instance de`cubicweb.common.utils.User` correspondant à l'utilisateur - connecté -* `form`, dictionaire contenant les valeurs de formulaire web -* `encoding`, l'encodage de caractère à utiliser dans la réponse - -Mais encore : - -:Gestion des données de session: - * `session_data()`, retourne un dictionaire contenant l'intégralité des - données de la session - * `get_session_data(key, default=None)`, retourne la valeur associée à - la clé ou la valeur `default` si la clé n'est pas définie - * `set_session_data(key, value)`, associe une valeur à une clé - * `del_session_data(key)`, supprime la valeur associé à une clé - - -:Gestion de cookie: - * `get_cookie()`, retourne un dictionnaire contenant la valeur de l'entête - HTTP 'Cookie' - * `set_cookie(cookie, key, maxage=300)`, ajoute un en-tête HTTP `Set-Cookie`, - avec une durée de vie 5 minutes par défault (`maxage` = None donne un cooke - *de session"* expirant quand l'utilisateur ferme son navigateur - * `remove_cookie(cookie, key)`, fait expirer une valeur - -:Gestion d'URL: - * `url()`, retourne l'url complète de la requête HTTP - * `base_url()`, retourne l'url de la racine de l'application - * `relative_path()`, retourne chemin relatif de la requête - -:Et encore...: - * `set_content_type(content_type, filename=None)`, place l'en-tête HTTP - 'Content-Type' - * `get_header(header)`, retourne la valeur associé à un en-tête HTTP - arbitraire de la requête - * `set_header(header, value)`, ajoute un en-tête HTTP arbitraire dans la - réponse - * `cursor()` retourne un curseur RQL sur la session - * `execute(*args, **kwargs)`, raccourci vers .cursor().execute() - * `property_value(key)`, gestion des propriétés (`CWProperty`) - * le dictionaire `data` pour stocker des données pour partager de - l'information entre les composants *durant l'éxécution de la requête*. - -A noter que cette classe est en réalité abstraite et qu'une implémentation -concrète sera fournie par le *frontend* web utilisé (en l'occurent *twisted* -aujourd'hui). Enfin pour les vues ou autres qui sont éxécutés coté serveur, -la majeure partie de l'interface de `Request` est définie sur la session -associée au client. - - -La classe `AppObject` ---------------------- - -En général : - -* on n'hérite pas directement des cette classe mais plutôt d'une classe - plus spécifique comme par exemple `AnyEntity`, `EntityView`, `AnyRsetView`, - `Action`... - -* pour être enregistrable, un classe fille doit définir son registre (attribut - `__registry__`) et son identifiant (attribut `id`). Généralement on n'a pas à - s'occuper du registre, uniquement de l'identifiant `id` :) - -On trouve un certain nombre d'attributs et de méthodes définis dans cette classe -et donc commune à tous les objets de l'application : - -A l'enregistrement, les attributs suivants sont ajoutés dynamiquement aux -*classes* filles: - -* `vreg`, le `vregistry` de l'application -* `schema`, le schéma de l'application -* `config`, la configuration de l'application - -On trouve également sur les instances les attributs : - -* `req`, instance de `Request` -* `rset`, le "result set" associé à l'objet le cas échéant -* `cursor`, curseur rql sur la session - - -:Gestion d'URL: - * `build_url(method=None, **kwargs)`, retourne une URL absolue construites à - partir des arguments donnés. Le *controleur* devant gérer la réponse - peut-être spécifié via l'argument spécial `method` (le branchement est - théoriquement bien effectué automatiquement :). - - * `datadir_url()`, retourne l'url du répertoire de données de l'application - (contenant les fichiers statiques tels que les images, css, js...) - - * `base_url()`, raccourci sur `req.base_url()` - - * `url_quote(value)`, version *unicode safe* de de la fonction `urllib.quote` - -:Manipulation de données: - - * `etype_rset(etype, size=1)`, raccourci vers `vreg.etype_rset()` - - * `eid_rset(eid, rql=None, descr=True)`, retourne un objet result set pour - l'eid donné - * `entity(row, col=0)`, retourne l'entité correspondant à la position données - du "result set" associé à l'objet - - * `complete_entity(row, col=0, skip_bytes=True)`, équivalent à `entity` mais - appelle également la méthode `complete()` sur l'entité avant de la retourner - -:Formattage de données: - * `format_date(date, date_format=None, time=False)` - * `format_time(time)`, - -:Et encore...: - - * `external_resource(rid, default=_MARKER)`, accède à une valeur définie dans - le fichier de configuration `external_resource` - - * `tal_render(template, variables)`, - - -**NOTE IMPORTANTE** -Lorsqu'on hérite d'`AppObject` (même indirectement), il faut **toujours** -utiliser **super()** pour récupérer les méthodes et attributs des classes -parentes, et pas passer par l'identifiant de classe parente directement. -(sous peine de tomber sur des bugs bizarres lors du rechargement automatique -des vues). Par exemple, plutôt que d'écrire:: - - class Truc(PrimaryView): - def f(self, arg1): - PrimaryView.f(self, arg1) - -Il faut écrire:: - - class Truc(PrimaryView): - def f(self, arg1): - super(Truc, self).f(arg1) - - -XXX FILLME diagramme interaction application/controller/template/view diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/doc/devmanual_fr/chap_configuration_instance.txt --- a/goa/doc/devmanual_fr/chap_configuration_instance.txt Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,160 +0,0 @@ -Configuration d'une instance -============================ - -À la création d'une instance, un fichier de configuration est généré dans :: - - $(CW_REGISTRY)//.conf - -par exemple :: - - /etc/cubicweb.d/jpl/all-in-one.conf - -C'est un simple fichier texte au format INI. Dans la description suivante, -chaque nom d'option est préfixé de sa section et suivi de sa valeur par défaut -le cas échéant, e.g. "`
        .
        ) et appelle la vue `cell` pour chaque cellule - du résultat. Appelable sur n'importe quel result set. -:cell: - par défaut redirige sur la vue `final` si c'est une entité finale - ou sur la vue `outofcontext` sinon -:null: - vue toujours appelable et ne retournant rien diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/doc/quickstart.txt --- a/goa/doc/quickstart.txt Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,128 +0,0 @@ -.. -*- coding: utf-8 -*- - -Introduction -============= - - -Concepts et vocabulaire ------------------------ - -*schéma* - le schéma définit le modèle de données d'une application sous forme - d'entités et de relations. C'est l'élément central d'une - application. - -*result set* - objet qui encaspule les résultats d'une requête adressée à l'entrepôt - de données et des informations sur cette requête. - -*vue* - une vue est une manière de représenter les données d'un `result set` - sous forme HTML, CSV, JSON, etc. - - - -Définition d'une application de Blog -==================================== - -La première chose à faire est de copier le squelette depuis le répertoire -``lax/skel`` vers un nouveau répertoire qui sera votre application -``Google AppEngine``:: - - $ cp -r lax/skel myapp - -Définition du schéma --------------------- - -Ouvrir le fichier ``myapp/schema.py`` afin de définir le schéma des -données manipulées. La syntaxe de la définition est la même que celle -proposée par `Google AppEngine`_ mais il faut remplacer la ligne -d'import:: - - from google.appengine.ext import db - -par celle-ci:: - - from cubicweb.goa import db - - -Un exemple de schéma de données pour un ``Blog`` pourrait être:: - - from cubicweb.goa import db - - class Blog(db.Model): - # un titre à donner à l'entrée - title = db.StringProperty(required=True) - # la date à laquelle le blog est créé - diem = db.DateProperty(required=True, auto_now_add=True) - # le contenu de l'entrée - content = db.TextProperty() - # une entrée peut en citer une autre - cites = db.SelfReferenceProperty() - - -Personnalisation des vues -------------------------- - -``LAX`` permet de générer directement, à partir de la définition -du schéma, des vues de consultation, d'ajout et de modification -pour tous les types de donées manipulés. Il est toutefois -généralement souhaitable de personnaliser les vues de consultations. - -Dans ``LAX``, les vues sont représentées par des classes Python. - -Une vue se caractérise par : - -- un identifiant (tous les objets dans ``LAX`` sont enregistrés - dans un registre et cet identifiant sert de clé pour y retrouver - la vue) - -- une description des types de données auxquels elle s'applique - -Il existe dans ``LAX`` des vues prédéfinies et utilisées par le moteur -d'affichage. Pour avoir une liste exhaustive de ces vues prédéfinies, -vous pouvez consulter cette page. (XXX mettre le lien vers la liste). -Par exemple, la vue ``primary`` est la vue utilisée pour générer la -page principale de consultation d'un objet. - -Par exemple, si on souhaite modifier la page principale d'une entrée de -blog, il faut surcharger la vue ``primary`` des objets ``Blog`` dans -le fichier ``myapp/views.py``:: - - from cubicweb.web.views import baseviews - - class BlogPrimaryView(baseviews.PrimaryView): - accepts = ('Blog',) - - def cell_call(self, row, col): - entity = self.rset.get_entity(row, col) - self.w(u'

        %s

        ' % entity.title) - self.w(u'
        %s
        ' entity.content) - - -Génération du graphique de schéma ---------------------------------- - -Il existe une vue ``schema`` qui permet d'afficher un graphique -représantant les différents types d'entités définis dans le schéma -ainsi que les relations entre ces types. Ce graphique doit être généré -statiquement. Le script à utiliser pour générer ce schéma est -dans ``myapp/tools``. Ce script nécessite d'avoir accès aux -bibliothèques fournies par le SDK de ``Google AppEngine``. Il faut -donc modifier son PYTHONPATH:: - - $ export PYTHONPATH=GAE_ROOT/google:GAE_ROOT/lib/yaml - $ python tools/generate_schema_img.py - - -Génération des fichiers de traduction -------------------------------------- - -Des catalogues de traduction se trouvent dans `myapp/i18n`. Il faut -pour l'instant les mettre à jour à la main (et/ou avec les outils -``GNU`` comme ``xgettext``) et ensuite les compiler grâce au script -``myapp/tools/i18ncompile.py``:: - - $ python tools/i18ncompile.py - -.. _`Google AppEngine` :: http://code.google.com/appengine/docs/datastore/overview.html diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/doc/tutorial-wine.txt --- a/goa/doc/tutorial-wine.txt Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,158 +0,0 @@ -.. -*- coding: utf-8 -*- - -============= -LAX Tutorial -============= - -Introduction ------------- - -LAX is a web framework on top of the Google AppEngine datastore. - - -features: schema/data-model at core of app, selection/view mechanism, -reuseable components, very fast development - - -Since we are french, let us develop an example application that deals -with wine and will allow any wine enthusiast to track the content of -its cellar and share his tasting experiences. - -Schema ------- - -With LAX, the core of the application is the schema/datamodel. - -laxctl newapp ? XXX - -We will start by something simple and define three entities: WineMaker, -Wine and Bottle. - -:: - - class WineMaker(EntityType): - name = String(maxsize=50, required=True) - - class Wine(EntityType): - name = String(required=True, maxsize=100, fulltextindexed=True) - vintage = Int(required=True, constraints=[IntervalBoundConstraint(1850,2100)]) - grown_by = SubjectRelation('WineMaker', cardinality='?*', - description=_('Winemaker who grew the wine')) - - class Bottle(EntityType): - buy_date = Date(description=_('Date when the bottle was bought.'), - default='TODAY') - bottle_of = SubjectRelation('Wine', cardinality='?*') - -A WineMaker only has a name which is a string that is required and -must be less than 50 characters. - -A Wine has a name, which is a string that is required, must be less -than 100 characters and will be indexed in the full-text index XXX -fulltextindex marche pas encore. A Wine -also has a vintage year which is an integer that is required and must -be between 1850 and 2100. A Wine also has a relationship ``grown_by`` -that link it to a WineMaker. Cardinality ``?*`` means that a Wine can -have zero or one WineMaker (``?`` means `zero or one`) and that a -WineMaker can have any number of Wine entities (``*`` means `any number -including zero`). - -A Bottle has a buy_date attribute, which is a date with a default -value of TODAY, meaning that when a new bottle is created, it will -have its creation date as buy_date unless the user changes it to some -other date. A Bottle also has a relationship ``bottle_of`` that link -it to a Wine. The cardinality of that relationship implies that a -Bottle can be linked to zero or one Wine and that a Wine can by linked -to any number of Bottle entities. - - -Defining this simple schema is enough to get us started, launch the -application with the command:: - - laxctl start Winopedia - -and point your browser at localhost:8080 - -You will see the home page of your application. It lists the entity -types: WineMaker, Wine, Bottle. - -Let us create a few of these. Click on the [+] at the right of the -link WineMaker. Call this new WineMaker ``Domaine du château`` and -validate the form by clicking on ``button_ok``. - -Click on the logo at top left to get back to the home page, then -follow the WineMaker link. You should be seeing a list with a single -item ``Domaine du château``. Clicking on this item will get you to -its detailed description except that in this case, there is not much -to display besides the name. - -Now get back to the home page by clicking on the top-left logo, then -create a new WineMaker called ``Vallon de la Dame`` and get back to the -home page again to follow the WineMaker link for the second time. The -list now has two items. - -Get back to the home page and click on [+] at the right of the link -Wine. Call this new wine ``Cuvée du Roi`` and enter 2008 as vintage, -then click on ``button_ok``. You added a new wine without saying who -made it. There is a box on the left entitled "actions", click on the -menu item `modify`. You are back to the form to edit the wine entity -you just created, except that the form now has another section with a -combobox titled "add a relationship". Chose "grown_by" in this -menu and a second combobox appears where you pick ``Domaine du -château``. Validate the changes by clicking ``button_ok``. The entity -Wine that is displayed now includes a link to the entity WineMaker -named ``Domaine du château``. - -Exercise -~~~~~~~~ - -Create new entities Wine and Bottle. - -What we learned -~~~~~~~~~~~~~~~ - -Creating a simple schema was enough to set up a new application that -can store WineMaker, Wine, Bottle. - -What is next ? --------------- - -Althought the application is fully functionnal, its look is very -basic. We will now improve how information is displayed by writing -views. - - -Views -====== - -... - -Defining views with selection/views - -implementing interfaces, calendar for bottles bought and for tasting. -calendar with export icalput attribute drink_date on bottle - -add attribute wine color - -create view "bottle table" with color, buy_date, drink_date. - -in view wine, select Wine.bottles and apply view "bottle table" - -demo ajax with filter on bottle table - -Components -=========== - -... - - - -customize MainTemplate - -rss channel of new bottles or wines - -use URLRewriting for nice urls - -talk about security access rights - -talk about rql \ No newline at end of file diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/doc/tutorial.en.txt --- a/goa/doc/tutorial.en.txt Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,227 +0,0 @@ -.. -*- coding: utf-8 -*- - -============= -LAX Tutorial -============= - -Introduction -============ - -LAX stands for Logilab App engine eXtension. It is a web framework -running on top of the Google AppEngine datastore. - -Distinctive features include a data-model driven engine, a query -language, a selection/view mechanism for HTML/XML/text generation, -reuseable components, etc. It all sums up to very fast and efficient -development. - -This tutorial will guide you to build a blog application step by step -to discover the unique features of LAX. It assumes that you followed -the installation guidelines and that both the AppEngine SDK and the -LAX framework are setup on your computer. - -Creating a very simple application -================================== - -Creating a new application --------------------------- - -When you installed lax, you saw a directory named skel. Make a copy of -this directory and call it BlogDemo. - -Defining a schema ------------------ - -With LAX, the schema/datamodel is the core of the application. - -Let us start with something simple and improve on it later. First, we -make sure that in appconfig.py we have a line :: - - schema_type = 'yams' - -Then, in schema.py, we define two entities : ``Blog`` and ``BlogEntry``. - -:: - - class Blog(EntityType): - title = String(maxsize=50, required=True) - description = String() - - class BlogEntry(EntityType): - title = String(maxsize=100, required=True) - publish_date = Date(default='TODAY') - text = String(fulltextindexed=True) - category = String(vocabulary=('important','business')) - entry_of = SubjectRelation('Blog', cardinality='?*') - -A Blog has a title and a description. The title is a string that is -required and must be less than 50 characters. The description is a -string that is not constrained. - -A BlogEntry has a title, a publish_date and a text. The title is a -string that is required and must be less than 100 characters. The -publish_date is a Date with a default value of TODAY, meaning that -when a BlogEntry is created, its publish_date will be the current day -unless it is modified. The text is a string that will be indexed in -the full-text index and has no constraint. - -A BlogEntry also has a relationship ``entry_of`` that link it to a -Blog. The cardinality ``?*`` means that a BlogEntry can be part of -zero or one Blog (``?`` means `zero or one`) and that a Blog can -have any number of BlogEntry (``*`` means `any number including -zero`). For completeness, remember that ``+`` means `one or more`. - -:note: in lax-0.3.0, cardinality checking is not fully ported to -AppEngine, so cardinality limits are not enforced. This should be -fixed in lax-0.4.0 available at the beginning of June. - -Using the application ---------------------- - -Defining this simple schema is enough to get us started. Launch the -application with the command:: - - python dev_appserver.py BlogDemo - -and point your browser at localhost:8080 - -You will see the home page of your application. It lists the entity -types: Blog and BlogEntry. - -Let us create a few of these. Click on the [+] at the right of the -link Blog. Call this new Blog ``Tech-blog`` and type in -``everything about technology`` as the description, then validate the -form by clicking on ``button_ok``. - -Click on the logo at top left to get back to the home page, then -follow the Blog link. If this link reads ``blog_plural`` it is because -i18n is not working for you yet. Let us ignore this for a while. After -following the link, you should be seeing a list with a single item -``Tech-blog``. Clicking on this item will get you to its detailed -description except that in this case, there is not much to display -besides the name and the phrase ``everything about technology``. - -Now get back to the home page by clicking on the top-left logo, then -create a new Blog called ``MyLife`` and get back to the home page -again to follow the Blog link for the second time. The list now -has two items. - -Get back to the home page and click on [+] at the right of the link -BlogEntry. Call this new entry ``Hello World`` and type in some text -before clicking on ``button_ok``. You added a new blog entry without -saying to what blog it belongs. There is a box on the left entitled -``actions``, click on the menu item ``modify``. You are back to the form -to edit the blog entry you just created, except that the form now has -another section with a combobox titled ``add relation``. Chose -``entry_of`` in this menu and a second combobox appears where you pick -``MyLife``. Validate the changes by clicking -``button_ok``. The entity BlogEntry that is displayed now includes a link -to the entity Blog named ``MyLife``. - -Conclusion ----------- - -Exercise -~~~~~~~~ - -Create new blog entries in ``Tech-blog``. - -What we learned -~~~~~~~~~~~~~~~ - -Creating a simple schema was enough to set up a new application that -can store blogs and blog entries. - -What is next ? --------------- - -Althought the application is fully functionnal, its look is very -basic. We will now improve how information is displayed by writing -views. - - -Developing the user interface with Views -======================================== - -[WRITE ME] - -* Defining views with selection/views - -* implementing interfaces, calendar for blog entries. - -* show that a calendar view can export data to ical. - -* create view "blogentry table" with title, publish_date, category. - -* in view blog, select blogentries and apply view "blogentry table" - -* demo ajax by filtering blogentry table on category - -Components -=========== - -[WRITE ME] - -* explain the component architecture - -* add comments to the blog by importing the comments component - -Boxes -====== - -[WRITE ME] - -* explain how to build a box - -* add an blogentry archives box - -Preferences -============ - -[WRITE ME] - -* talk about the user preferences - -* add an example on how to hide / display / move a component or a box - -MainTemplate -============ - -[WRITE ME] - -* customize MainTemplate and show that everything in the user - interface can be changed - - -RSS Channel -=========== - -[WRITE ME] - -* show that the RSS view can be used to display an ordered selection - of blog entries, thus providing a RSS channel - -* show that a different selection (by category) means a different channel - -RQL -==== - -[WRITE ME] - -* talk about the Relation Query Language - -URL Rewriting -============= - -[WRITE ME] - -* show how urls are mapped to selections and views and explain URLRewriting - -Security -========= - -[WRITE ME] - -* talk about security access rights and show that security is defined - using RQL - diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/gaesource.py --- a/goa/gaesource.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,331 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Adapter for google appengine source. - -""" -__docformat__ = "restructuredtext en" - -from cubicweb import AuthenticationError, UnknownEid -from cubicweb.server.sources import AbstractSource, ConnectionWrapper -from cubicweb.server.pool import SingleOperation -from cubicweb.server.utils import crypt_password -from cubicweb.goa.dbinit import set_user_groups -from cubicweb.goa.rqlinterpreter import RQLInterpreter - -from google.appengine.api.datastore import Key, Entity, Put, Delete -from google.appengine.api import datastore_errors, users - -def _init_groups(guser, euser): - # set default groups - if guser is None: - groups = ['guests'] - else: - groups = ['users'] - if users.is_current_user_admin(): - groups.append('managers') - set_user_groups(euser, groups) - -def _clear_related_cache(session, gaesubject, rtype, gaeobject): - subject, object = str(gaesubject.key()), str(gaeobject.key()) - for eid, role in ((subject, 'subject'), (object, 'object')): - # clear related cache if necessary - try: - entity = session.entity_cache(eid) - except KeyError: - pass - else: - entity.clear_related_cache(rtype, role) - if gaesubject.kind() == 'CWUser': - for asession in session.repo._sessions.itervalues(): - if asession.user.eid == subject: - asession.user.clear_related_cache(rtype, 'subject') - if gaeobject.kind() == 'CWUser': - for asession in session.repo._sessions.itervalues(): - if asession.user.eid == object: - asession.user.clear_related_cache(rtype, 'object') - -def _mark_modified(session, gaeentity): - modified = session.transaction_data.setdefault('modifiedentities', {}) - modified[str(gaeentity.key())] = gaeentity - DatastorePutOp(session) - -def _rinfo(session, subject, rtype, object): - gaesubj = session.datastore_get(subject) - gaeobj = session.datastore_get(object) - rschema = session.vreg.schema.rschema(rtype) - cards = rschema.rproperty(gaesubj.kind(), gaeobj.kind(), 'cardinality') - return gaesubj, gaeobj, cards - -def _radd(session, gaeentity, targetkey, relation, card): - if card in '?1': - gaeentity[relation] = targetkey - else: - try: - related = gaeentity[relation] - except KeyError: - related = [] - else: - if related is None: - related = [] - related.append(targetkey) - gaeentity[relation] = related - _mark_modified(session, gaeentity) - -def _rdel(session, gaeentity, targetkey, relation, card): - if card in '?1': - gaeentity[relation] = None - else: - related = gaeentity[relation] - if related is not None: - related = [key for key in related if not key == targetkey] - gaeentity[relation] = related or None - _mark_modified(session, gaeentity) - - -class DatastorePutOp(SingleOperation): - """delayed put of entities to have less datastore write api calls - - * save all modified entities at precommit (should be the first operation - processed, hence the 0 returned by insert_index()) - - * in case others precommit operations modify some entities, resave modified - entities at commit. This suppose that no db changes will occurs during - commit event but it should be the case. - """ - def insert_index(self): - return 0 - - def _put_entities(self): - pending = self.session.transaction_data.get('pendingeids', ()) - modified = self.session.transaction_data.get('modifiedentities', {}) - for eid, gaeentity in modified.iteritems(): - assert not eid in pending - Put(gaeentity) - modified.clear() - - def commit_event(self): - self._put_entities() - - def precommit_event(self): - self._put_entities() - - -class GAESource(AbstractSource): - """adapter for a system source on top of google appengine datastore""" - - passwd_rql = "Any P WHERE X is CWUser, X login %(login)s, X upassword P" - auth_rql = "Any X WHERE X is CWUser, X login %(login)s, X upassword %(pwd)s" - _sols = ({'X': 'CWUser', 'P': 'Password'},) - - options = () - - def __init__(self, repo, appschema, source_config, *args, **kwargs): - AbstractSource.__init__(self, repo, appschema, source_config, - *args, **kwargs) - if repo.config['use-google-auth']: - self.info('using google authentication service') - self.authenticate = self.authenticate_gauth - else: - self.authenticate = self.authenticate_local - - def reset_caches(self): - """method called during test to reset potential source caches""" - pass - - def init_creating(self): - pass - - def init(self): - # XXX unregister unsupported hooks - from cubicweb.server.hooks import sync_owner_after_add_composite_relation - self.repo.hm.unregister_hook(sync_owner_after_add_composite_relation, - 'after_add_relation', '') - - def get_connection(self): - return ConnectionWrapper() - - # ISource interface ####################################################### - - def compile_rql(self, rql): - rqlst = self.repo.vreg.parse(rql) - rqlst.restricted_vars = () - rqlst.children[0].solutions = self._sols - return rqlst - - def set_schema(self, schema): - """set the instance'schema""" - self.interpreter = RQLInterpreter(schema) - self.schema = schema - if 'CWUser' in schema and not self.repo.config['use-google-auth']: - # rql syntax trees used to authenticate users - self._passwd_rqlst = self.compile_rql(self.passwd_rql) - self._auth_rqlst = self.compile_rql(self.auth_rql) - - def support_entity(self, etype, write=False): - """return true if the given entity's type is handled by this adapter - if write is true, return true only if it's a RW support - """ - return True - - def support_relation(self, rtype, write=False): - """return true if the given relation's type is handled by this adapter - if write is true, return true only if it's a RW support - """ - return True - - def authenticate_gauth(self, session, login, password): - guser = users.get_current_user() - # allowing or not anonymous connection should be done in the app.yaml - # file, suppose it's authorized if we are there - if guser is None: - login = u'anonymous' - else: - login = unicode(guser.nickname()) - # XXX http://code.google.com/appengine/docs/users/userobjects.html - # use a reference property to automatically work with email address - # changes after the propagation feature is implemented - key = Key.from_path('CWUser', 'key_' + login, parent=None) - try: - euser = session.datastore_get(key) - # XXX fix user. Required until we find a better way to fix broken records - if not euser.get('s_in_group'): - _init_groups(guser, euser) - Put(euser) - return str(key) - except datastore_errors.EntityNotFoundError: - # create a record for this user - euser = Entity('CWUser', name='key_' + login) - euser['s_login'] = login - _init_groups(guser, euser) - Put(euser) - return str(euser.key()) - - def authenticate_local(self, session, login, password): - """return CWUser eid for the given login/password if this account is - defined in this source, else raise `AuthenticationError` - - two queries are needed since passwords are stored crypted, so we have - to fetch the salt first - """ - args = {'login': login, 'pwd' : password} - if password is not None: - rset = self.syntax_tree_search(session, self._passwd_rqlst, args) - try: - pwd = rset[0][0] - except IndexError: - raise AuthenticationError('bad login') - # passwords are stored using the bytea type, so we get a StringIO - if pwd is not None: - args['pwd'] = crypt_password(password, pwd[:2]) - # get eid from login and (crypted) password - rset = self.syntax_tree_search(session, self._auth_rqlst, args) - try: - return rset[0][0] - except IndexError: - raise AuthenticationError('bad password') - - def syntax_tree_search(self, session, union, args=None, cachekey=None, - varmap=None): - """return result from this source for a rql query (actually from a rql - syntax tree and a solution dictionary mapping each used variable to a - possible type). If cachekey is given, the query necessary to fetch the - results (but not the results themselves) may be cached using this key. - """ - results, description = self.interpreter.interpret(union, args, - session.datastore_get) - return results # XXX description - - def flying_insert(self, table, session, union, args=None, varmap=None): - raise NotImplementedError - - def add_entity(self, session, entity): - """add a new entity to the source""" - # do not delay add_entity as other modifications, new created entity - # needs an eid - entity.put() - - def update_entity(self, session, entity): - """replace an entity in the source""" - gaeentity = entity.to_gae_model() - _mark_modified(session, entity.to_gae_model()) - if gaeentity.kind() == 'CWUser': - for asession in self.repo._sessions.itervalues(): - if asession.user.eid == entity.eid: - asession.user.update(dict(gaeentity)) - - def delete_entity(self, session, entity): - """delete an entity from the source""" - # do not delay delete_entity as other modifications to ensure - # consistency - eid = entity.eid - key = Key(eid) - Delete(key) - session.clear_datastore_cache(key) - session.drop_entity_cache(eid) - session.transaction_data.get('modifiedentities', {}).pop(eid, None) - - def add_relation(self, session, subject, rtype, object): - """add a relation to the source""" - gaesubj, gaeobj, cards = _rinfo(session, subject, rtype, object) - _radd(session, gaesubj, gaeobj.key(), 's_' + rtype, cards[0]) - _radd(session, gaeobj, gaesubj.key(), 'o_' + rtype, cards[1]) - _clear_related_cache(session, gaesubj, rtype, gaeobj) - - def delete_relation(self, session, subject, rtype, object): - """delete a relation from the source""" - gaesubj, gaeobj, cards = _rinfo(session, subject, rtype, object) - pending = session.transaction_data.setdefault('pendingeids', set()) - if not subject in pending: - _rdel(session, gaesubj, gaeobj.key(), 's_' + rtype, cards[0]) - if not object in pending: - _rdel(session, gaeobj, gaesubj.key(), 'o_' + rtype, cards[1]) - _clear_related_cache(session, gaesubj, rtype, gaeobj) - - # system source interface ################################################# - - def eid_type_source(self, session, eid): - """return a tuple (type, source, extid) for the entity with id """ - try: - key = Key(eid) - except datastore_errors.BadKeyError: - raise UnknownEid(eid) - return key.kind(), 'system', None - - def create_eid(self, session): - return None # let the datastore generating key - - def add_info(self, session, entity, source, extid=None): - """add type and source info for an eid into the system table""" - pass - - def delete_info(self, session, eid, etype, uri, extid): - """delete system information on deletion of an entity by transfering - record from the entities table to the deleted_entities table - """ - pass - - def fti_unindex_entity(self, session, eid): - """remove text content for entity with the given eid from the full text - index - """ - pass - - def fti_index_entity(self, session, entity): - """add text content of a created/modified entity to the full text index - """ - pass diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/goaconfig.py --- a/goa/goaconfig.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,179 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""google appengine configuration - -""" -__docformat__ = "restructuredtext en" - -import os -from os.path import join - -from cubicweb import CW_SOFTWARE_ROOT -from cubicweb.cwconfig import CubicWebConfiguration -from cubicweb.web.webconfig import WebConfiguration, merge_options -from cubicweb.server.serverconfig import ServerConfiguration -from cubicweb.goa.dbmyams import load_schema - -UNSUPPORTED_OPTIONS = set(('connections-pool-size', - 'pyro-host', 'pyro-instance-id', - 'pyro-ns-host', 'pyro-ns-group', - 'https-url', 'host', 'pid-file', 'uid', 'base-url', 'log-file', - 'smtp-host', 'smtp-port', - 'embed-allowed', - )) - -# XXX fix: -# * default sender-name / sender-addr value -# * what about *session-time -# * check auth-mode=http + fix doc (eg require use-google-auth = False) - -class GAEConfiguration(ServerConfiguration, WebConfiguration): - """repository and web instance in Google AppEngine environment""" - name = 'app' - repo_method = 'inmemory' - options = merge_options(( - ('included-cubes', - {'type' : 'csv', - 'default': [], - 'help': 'list of db model based cubes used by the instance.', - 'group': 'main', 'level': 1, - }), - ('included-yams-cubes', - {'type' : 'csv', - 'default': [], - 'help': 'list of yams based cubes used by the instance.', - 'group': 'main', 'level': 1, - }), - ('use-google-auth', - {'type' : 'yn', - 'default': True, - 'help': 'does this instance rely on google authentication service or not.', - 'group': 'main', 'level': 1, - }), - ('schema-type', - {'type' : 'choice', 'choices': ('yams', 'dbmodel'), - 'default': 'yams', - 'help': 'does this instance is defining its schema using yams or db model.', - 'group': 'main', 'level': 1, - }), - # overriden options - ('query-log-file', - {'type' : 'string', - 'default': None, - 'help': 'web instance query log file: DON\'T SET A VALUE HERE WHEN ' - 'UPLOADING YOUR INSTANCE. This should only be used to analyse ' - 'queries issued by your instance in the development environment.', - 'group': 'main', 'level': 2, - }), - ('anonymous-user', - {'type' : 'string', - 'default': None, - 'help': 'login of the CubicWeb user account to use for anonymous user ' - '(if you want to allow anonymous). This option will be ignored if ' - 'use-google-auth option is set (in which case you should control ' - 'anonymous access using the app.yaml file)', - 'group': 'main', 'level': 1, - }), - - ) + WebConfiguration.options + ServerConfiguration.options) - options = [(optname, optdict) for optname, optdict in options - if not optname in UNSUPPORTED_OPTIONS] - - cubicweb_appobject_path = WebConfiguration.cubicweb_appobject_path | ServerConfiguration.cubicweb_appobject_path - cubicweb_appobject_path = list(cubicweb_appobject_path) + ['goa/appobjects'] - cube_appobject_path = WebConfiguration.cube_appobject_path | ServerConfiguration.cube_appobject_path - - # use file system schema - read_instance_schema = False - # schema is not persistent, don't load schema hooks (unavailable) - schema_hooks = False - # no user workflow for now - consider_user_state = False - - # deactivate some hooks during [pre|post]create scripts execution - # (unique values check, owned_by/created_by relations setup) - free_wheel = True - - if not os.environ.get('APYCOT_ROOT'): - CUBES_DIR = join(CW_SOFTWARE_ROOT, '../cubes') - - def __init__(self, appid, apphome=None): - if apphome is None: - apphome = 'data' - self._apphome = apphome - self._base_url = None - CubicWebConfiguration.__init__(self, appid) - - def __getitem__(self, key): - if key == 'connections-pool-size': - return 4 # > 1 to allow multiple user sessions in tests - if key == 'base-url': - return self._base_url - return super(GAEConfiguration, self).__getitem__(key) - - # overriden from cubicweb base configuration - - @property - def apphome(self): - return self._apphome - - def cubes(self): - """return the list of top level cubes used by this instance (eg - without dependencies) - """ - if self._cubes is None: - cubes = self['included-cubes'] + self['included-yams-cubes'] - cubes = self.expand_cubes(cubes) - return self.reorder_cubes(cubes) - return self._cubes - - def vc_config(self): - """return CubicWeb's engine and instance's cube versions number""" - return {} - - # overriden from cubicweb web configuration - - def instance_md5_version(self): - return '' - - def _init_base_url(self): - pass - - # overriden from cubicweb server configuration - - def sources(self): - return {'system': {'adapter': 'gae'}} - - def load_schema(self, schemaclasses=None, extrahook=None): - try: - return self._schema - except AttributeError: - self._schema = load_schema(self, schemaclasses, extrahook) - return self._schema - - # goa specific - def repo_session(self, sessionid): - return self.repository()._sessions[sessionid] - - def is_anonymous_user(self, login): - if self['use-google-auth']: - from google.appengine.api import users - return users.get_current_user() is None - else: - return login == self.anonymous_user()[0] - diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/goactl.py --- a/goa/goactl.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,254 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""cubicweb on appengine plugins for cubicweb-ctl - -""" -__docformat__ = "restructuredtext en" - -from os.path import exists, join, split, basename, normpath, abspath -from logilab.common.clcommands import register_commands - -from cubicweb import CW_SOFTWARE_ROOT, BadCommandUsage -from cubicweb.toolsutils import (Command, copy_skeleton, create_symlink, - create_dir) -from cubicweb.cwconfig import CubicWebConfiguration - - -def slink_directories(): - import rql, yams, yapps, docutils, roman - try: - import json as simplejson - except ImportError: - import simplejson - from logilab import common as lgc - from logilab import constraint as lgcstr - from logilab import mtconverter as lgmtc - dirs = [ - (lgc.__path__[0], 'logilab/common'), - (lgmtc.__path__[0], 'logilab/mtconverter'), - (lgcstr.__path__[0], 'logilab/constraint'), - (rql.__path__[0], 'rql'), - (simplejson.__path__[0], 'simplejson'), - (yams.__path__[0], 'yams'), - (yapps.__path__[0], 'yapps'), - (docutils.__path__[0], 'docutils'), - (roman.__file__.replace('.pyc', '.py'), 'roman.py'), - - ('/usr/share/fckeditor/', 'fckeditor'), - - (join(CW_SOFTWARE_ROOT, 'web', 'data'), join('cubes', 'shared', 'data')), - (join(CW_SOFTWARE_ROOT, 'web', 'wdoc'), join('cubes', 'shared', 'wdoc')), - (join(CW_SOFTWARE_ROOT, 'i18n'), join('cubes', 'shared', 'i18n')), - (join(CW_SOFTWARE_ROOT, 'goa', 'tools'), 'tools'), - (join(CW_SOFTWARE_ROOT, 'goa', 'bin'), 'bin'), - ] - - try: - import dateutil - import vobject - dirs.extend([ (dateutil.__path__[0], 'dateutil'), - (vobject.__path__[0], 'vobject') ] ) - except ImportError: - pass - return dirs - -COPY_CW_FILES = ( - '__init__.py', - '__pkginfo__.py', - '_exceptions.py', - 'appobject.py', - 'dbapi.py', - 'cwvreg.py', - 'cwconfig.py', - 'entity.py', - 'interfaces.py', - 'i18n.py', - 'mail.py', - 'migration.py', - 'mixins.py', - 'mttransforms.py', - 'rqlrewrite.py', - 'rset.py', - 'schema.py', - 'schemaviewer.py', - 'selectors.py', - 'uilib.py', - 'utils.py', - 'vregistry.py', - 'view.py', - - 'ext/html4zope.py', - 'ext/rest.py', - - 'server/hookhelper.py', - 'server/hooksmanager.py', - 'server/hooks.py', - 'server/migractions.py', - 'server/pool.py', - 'server/querier.py', - 'server/repository.py', - 'server/securityhooks.py', - 'server/session.py', - 'server/serverconfig.py', - 'server/ssplanner.py', - 'server/utils.py', - 'server/sources/__init__.py', - - 'entities/__init__.py', - 'entities/authobjs.py', - 'entities/lib.py', - 'entities/schemaobjs.py', - 'entities/wfobjs.py', - - 'sobjects/__init__.py', - 'sobjects/notification.py', - -# XXX would be necessary for goa.testlib but require more stuff to be added -# such as server.serverconfig and so on (check devtools.__init__) -# 'devtools/__init__.py', -# 'devtools/fake.py', - - 'web/__init__.py', - 'web/_exceptions.py', - 'web/action.py', - 'web/application.py', - 'web/box.py', - 'web/component.py', - 'web/controller.py', - 'web/form.py', - 'web/htmlwidgets.py', - 'web/httpcache.py', - 'web/request.py', - 'web/webconfig.py', - - 'web/views/__init__.py', - 'web/views/actions.py', - 'web/views/basecomponents.py', - 'web/views/basecontrollers.py', - 'web/views/baseforms.py', - 'web/views/basetemplates.py', - 'web/views/baseviews.py', - 'web/views/boxes.py', - 'web/views/calendar.py', - 'web/views/error.py', - 'web/views/editcontroller.py', - 'web/views/ibreadcrumbs.py', - 'web/views/idownloadable.py', - 'web/views/magicsearch.py', - 'web/views/management.py', - 'web/views/navigation.py', - 'web/views/startup.py', - 'web/views/vcard.py', - 'web/views/wdoc.py', - 'web/views/urlpublishing.py', - 'web/views/urlrewrite.py', - 'web/views/xbel.py', - - 'wsgi/__init__.py', - 'wsgi/handler.py', - 'wsgi/request.py', - - 'goa/__init__.py', - 'goa/db.py', - 'goa/dbinit.py', - 'goa/dbmyams.py', - 'goa/goaconfig.py', - 'goa/goavreg.py', - 'goa/gaesource.py', - 'goa/rqlinterpreter.py', - 'goa/appobjects/__init__.py', - 'goa/appobjects/components.py', - 'goa/appobjects/dbmgmt.py', - 'goa/appobjects/gauthservice.py', - 'goa/appobjects/sessions.py', - - 'schemas/bootstrap.py', - 'schemas/base.py', - ) - -OVERRIDEN_FILES = ( - ('toolsutils.py', 'toolsutils.py'), - ('mttransforms.py', 'mttransforms.py'), - ('server__init__.py', 'server/__init__.py'), - ('rqlannotation.py', 'server/rqlannotation.py'), - ) - - -def create_init_file(pkgdir, pkgname): - open(join(pkgdir, '__init__.py'), 'w').write('"""%s pkg"""' % pkgname) - - -class NewGoogleAppCommand(Command): - """Create a new google appengine instance. - - - the path to the appengine instance directory - """ - name = 'newgapp' - arguments = '' - - def run(self, args): - if len(args) != 1: - raise BadCommandUsage("exactly one argument is expected") - appldir, = args - appldir = normpath(abspath(appldir)) - appid = basename(appldir) - context = {'appname': appid} - # goa instance'skeleton - copy_skeleton(join(CW_SOFTWARE_ROOT, 'goa', 'skel'), - appldir, context, askconfirm=True) - # cubicweb core dependencies - for directory, subdirectory in slink_directories(): - subdirectory = join(appldir, subdirectory) - if not exists(split(subdirectory)[0]): - create_dir(split(subdirectory)[0]) - create_symlink(directory, join(appldir, subdirectory)) - create_init_file(join(appldir, 'logilab'), 'logilab') - # copy supported part of cubicweb - create_dir(join(appldir, 'cubicweb')) - for fpath in COPY_CW_FILES: - target = join(appldir, 'cubicweb', fpath) - if not exists(split(target)[0]): - create_dir(split(target)[0]) - create_symlink(join(CW_SOFTWARE_ROOT, fpath), target) - # overriden files - for fpath, subfpath in OVERRIDEN_FILES: - create_symlink(join(CW_SOFTWARE_ROOT, 'goa', 'overrides', fpath), - join(appldir, 'cubicweb', subfpath)) - # link every supported components - packagesdir = join(appldir, 'cubes') - create_init_file(join(appldir, 'cubes'), 'cubes') - for include in ('addressbook','basket', 'blog','folder', - 'tag', 'comment', 'file', 'link', - 'mailinglist', 'person', 'task', 'zone', - ): - create_symlink(CubicWebConfiguration.cube_dir(include), - join(packagesdir, include)) - # generate sample config - from cubicweb.goa.goaconfig import GAEConfiguration - from cubicweb.migration import MigrationHelper - config = GAEConfiguration(appid, appldir) - if exists(config.main_config_file()): - mih = MigrationHelper(config) - mih.rewrite_configuration() - else: - config.save() - - -register_commands((NewGoogleAppCommand, - )) diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/goavreg.py --- a/goa/goavreg.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,86 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""goa specific registry - -""" -__docformat__ = "restructuredtext en" - -from os import listdir -from os.path import join, isdir - -from cubicweb import CW_SOFTWARE_ROOT -from cubicweb.cwvreg import CubicWebVRegistry - - -def _pkg_name(cube, module): - if cube is None: - return module - return 'cubes.%s.%s' % (cube, module) - -class GAEVRegistry(CubicWebVRegistry): - - def set_schema(self, schema): - """disable reload hooks of cubicweb registry set_schema method""" - self.schema = schema - - def load(self, applroot): - from cubicweb.goa import db - self.load_module(db) # AnyEntity class - # explicit loading, we don't want to load __init__.py - self.load_directory(join(CW_SOFTWARE_ROOT, 'entities'), - 'cubicweb.entities', skip=('__init__.py',)) - self.load_directory(join(CW_SOFTWARE_ROOT, 'web', 'views'), - 'cubicweb.web.views') - self.load_directory(join(CW_SOFTWARE_ROOT, 'goa', 'appobjects'), - 'cubicweb.goa.appobjects') - for cube in reversed(self.config.cubes()): - self.load_cube(cube) - self.load_instance(applroot) - - def load_directory(self, directory, cube, skip=()): - for filename in listdir(directory): - if filename[-3:] == '.py' and not filename in skip: - self._import('%s.%s' % (cube, filename[:-3])) - - def load_cube(self, cube): - self._auto_load(self.config.cube_dir(cube), - cube in self.config['included-cubes'], - cube) - - def load_instance(self, applroot): - self._auto_load(applroot, self.config['schema-type'] == 'dbmodel') - - def _import(self, modname): - obj = __import__(modname) - for attr in modname.split('.')[1:]: - obj = getattr(obj, attr) - self.load_module(obj) - - def _auto_load(self, path, loadschema, cube=None): - vobjpath = self.config.cube_appobject_path - for filename in listdir(path): - if filename[-3:] == '.py' and filename[:-3] in vobjpath: - self._import(_pkg_name(cube, filename[:-3])) - else: - abspath = join(path, filename) - if isdir(abspath) and filename in vobjpath: - self.load_directory(abspath, _pkg_name(cube, filename)) - if loadschema: - # when using db.Model defined schema, the defined class is used as - # entity class as well and so have to be registered - self._import(_pkg_name(cube, 'schema')) diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/overrides/__init__.py --- a/goa/overrides/__init__.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,21 +0,0 @@ -# server.__init__ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/overrides/mttransforms.py --- a/goa/overrides/mttransforms.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,61 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""mime type transformation engine for cubicweb, based on mtconverter - -""" -__docformat__ = "restructuredtext en" - -from logilab import mtconverter - -from logilab.mtconverter.engine import TransformEngine -from logilab.mtconverter.transform import Transform -from cubicweb.uilib import rest_publish, html_publish, remove_html_tags - -HTML_MIMETYPES = ('text/html', 'text/xhtml', 'application/xhtml+xml') -# CubicWeb specific transformations - -class rest_to_html(Transform): - inputs = ('text/rest', 'text/x-rst') - output = 'text/html' - def _convert(self, trdata): - return rest_publish(trdata.appobject, trdata.decode()) - -class html_to_html(Transform): - inputs = HTML_MIMETYPES - output = 'text/html' - def _convert(self, trdata): - return html_publish(trdata.appobject, trdata.data) - - -# Instantiate and configure the transformation engine - -mtconverter.UNICODE_POLICY = 'replace' - -ENGINE = TransformEngine() -ENGINE.add_transform(rest_to_html()) -ENGINE.add_transform(html_to_html()) - -HAS_PIL_TRANSFORMS = False -HAS_PYGMENTS_TRANSFORMS = False - -class html_to_text(Transform): - inputs = HTML_MIMETYPES - output = 'text/plain' - def _convert(self, trdata): - return remove_html_tags(trdata.data) -ENGINE.add_transform(html_to_text()) diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/overrides/rqlannotation.py --- a/goa/overrides/rqlannotation.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,34 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" -""" -__docformat__ = "restructuredtext en" - -def set_qdata(getrschema, union, noinvariant): - pass - -class SQLGenAnnotator(object): - def __init__(self, schema): - self.schema = schema - self.nfdomain = frozenset(eschema.type for eschema in schema.entities() - if not eschema.final) - def annotate(self, rqlst): - rqlst.has_text_query = False - rqlst.need_distinct = False - - diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/overrides/server__init__.py --- a/goa/overrides/server__init__.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,36 +0,0 @@ -# server debugging flag -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -DEBUG = False - -# sqlite'stored procedures have to be registered at connexion opening time -SQL_CONNECT_HOOKS = {} - -# add to this set relations which should have their add security checking done -# *BEFORE* adding the actual relation (done after by default) -BEFORE_ADD_RELATIONS = set(('owned_by',)) - -# add to this set relations which should have their add security checking done -# *at COMMIT TIME* (done after by default) -ON_COMMIT_ADD_RELATIONS = set(()) - -# available sources registry -SOURCE_TYPES = {} diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/overrides/server_utils.py --- a/goa/overrides/server_utils.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,32 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" - -class RepoThread(object): - def __init__(self, *args): - pass # XXX raise - def start(self): - pass - def join(self): - pass - -class LoopTask(RepoThread): - def cancel(self): - pass diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/overrides/toolsutils.py --- a/goa/overrides/toolsutils.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,70 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -import sys -from cubicweb import warning - -def lines(path, comments=None): - result = [] - for line in open(path, 'U'): - line = line.strip() - if line and (comments is None or not line.startswith(comments)): - result.append(line) - return result - -def read_config(config_file): - """read the instance configuration from a file and return it as a - dictionnary - - :type config_file: str - :param config_file: path to the configuration file - - :rtype: dict - :return: a dictionary with specified values associated to option names - """ - config = current = {} - try: - for line in lines(config_file, comments='#'): - try: - option, value = line.split('=', 1) - except ValueError: - option = line.strip().lower() - if option[0] == '[': - # start a section - section = option[1:-1] - assert not config.has_key(section), \ - 'Section %s is defined more than once' % section - config[section] = current = {} - continue - print >> sys.stderr, 'ignoring malformed line\n%r' % line - continue - option = option.strip().replace(' ', '_') - value = value.strip() - current[option] = value or None - except IOError, ex: - warning('missing or non readable configuration file %s (%s)', - config_file, ex) - return config - -def env_path(env_var, default, name): - return default - -def create_dir(*args): - raise RuntimeError() diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/rqlinterpreter.py --- a/goa/rqlinterpreter.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,684 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""provide a minimal RQL support for google appengine dbmodel - -""" -__docformat__ = "restructuredtext en" - -from datetime import datetime - -from rql import RQLHelper, nodes - -from cubicweb import Binary -from cubicweb.rset import ResultSet -from cubicweb.server import SQL_CONNECT_HOOKS - -from google.appengine.api.datastore import Key, Get, Query, Entity -from google.appengine.api.datastore_types import Text, Blob -from google.appengine.api.datastore_errors import EntityNotFoundError, BadKeyError - - -def etype_from_key(key): - return Key(key).kind() - -def poss_var_types(myvar, ovar, kind, solutions): - return frozenset(etypes[myvar] for etypes in solutions - if etypes[ovar] == kind) - -def expand_result(results, result, myvar, values, dsget=None): - values = map(dsget, values) - if values: - result[myvar] = values.pop(0) - for value in values: - newresult = result.copy() - newresult[myvar] = value - results.append(newresult) - else: - results.remove(result) - -def _resolve(restrictions, solutions, fixed): - varname = restrictions[0].searched_var - objs = [] - for etype in frozenset(etypes[varname] for etypes in solutions): - gqlargs = {} - query = Query(etype) - for restriction in restrictions: - restriction.fill_query(fixed, query) - pobjs = query.Run() - if varname in fixed: - value = fixed[varname] - objs += (x for x in pobjs if x == value) - else: - objs += pobjs - if varname in fixed and not objs: - raise EidMismatch(varname, value) - return objs - -def _resolve_not(restrictions, solutions, fixed): - restr = restrictions[0] - constrvarname = restr.constraint_var - if len(restrictions) > 1 or not constrvarname in fixed: - raise NotImplementedError() - varname = restr.searched_var - objs = [] - for etype in frozenset(etypes[varname] for etypes in solutions): - gqlargs = {} - for operator in ('<', '>'): - query = Query(etype) - restr.fill_query(fixed, query, operator) - objs += query.Run() - return objs - -def _print_results(rlist): - return '[%s]' % ', '.join(_print_result(r) for r in rlist) - -def _print_result(rdict): - string = [] - for k, v in rdict.iteritems(): - if isinstance(v, Entity): - string.append('%s: %s' % (k, v.key()))#_print_result(v))) - elif isinstance(v, list): - string.append('%s: [%s]' % (k, ', '.join(str(i) for i in v))) - else: - string.append('%s: %s' % (k, v)) - return '{%s}' % ', '.join(string) - - -class EidMismatch(Exception): - def __init__(self, varname, value): - self.varname = varname - self.value = value - - -class Restriction(object): - supported_operators = ('=',) - def __init__(self, rel): - operator = rel.children[1].operator - if not operator in self.supported_operators: - raise NotImplementedError('unsupported operator') - self.rel = rel - self.operator = operator - self.rtype = rel.r_type - self.var = rel.children[0] - - def __repr__(self): - return '<%s for %s>' % (self.__class__.__name__, self.rel) - - @property - def rhs(self): - return self.rel.children[1].children[0] - - -class MultipleRestriction(object): - def __init__(self, restrictions): - self.restrictions = restrictions - - def resolve(self, solutions, fixed): - return _resolve(self.restrictions, solutions, fixed) - - -class VariableSelection(Restriction): - def __init__(self, rel, dsget, prefix='s'): - Restriction.__init__(self, rel) - self._dsget = dsget - self._not = self.rel.neged(strict=True) - self._prefix = prefix + '_' - - def __repr__(self): - return '<%s%s for %s>' % (self._prefix[0], self.__class__.__name__, self.rel) - - @property - def searched_var(self): - if self._prefix == 's_': - return self.var.name - return self.rhs.name - - @property - def constraint_var(self): - if self._prefix == 's_': - return self.rhs.name - return self.var.name - - def _possible_values(self, myvar, ovar, entity, solutions, dsprefix): - if self.rtype == 'identity': - return (entity.key(),) - value = entity.get(dsprefix + self.rtype) - if value is None: - return [] - if not isinstance(value, list): - value = [value] - vartypes = poss_var_types(myvar, ovar, entity.kind(), solutions) - return (v for v in value if v.kind() in vartypes) - - def complete_and_filter(self, solutions, results): - myvar = self.rhs.name - ovar = self.var.name - rtype = self.rtype - if self.schema.rschema(rtype).final: - # should be detected by rql.stcheck: "Any C WHERE NOT X attr C" doesn't make sense - #if self._not: - # raise NotImplementedError() - for result in results: - result[myvar] = result[ovar].get('s_'+rtype) - elif self.var.name in results[0]: - if self.rhs.name in results[0]: - self.filter(solutions, results) - else: - if self._not: - raise NotImplementedError() - for result in results[:]: - values = self._possible_values(myvar, ovar, result[ovar], - solutions, 's_') - expand_result(results, result, myvar, values, self._dsget) - else: - assert self.rhs.name in results[0] - self.object_complete_and_filter(solutions, results) - - def filter(self, solutions, results): - myvar = self.rhs.name - ovar = self.var.name - newsols = {} - for result in results[:]: - entity = result[ovar] - key = entity.key() - if not key in newsols: - values = self._possible_values(myvar, ovar, entity, solutions, 's_') - newsols[key] = frozenset(v for v in values) - if self._not: - if result[myvar].key() in newsols[key]: - results.remove(result) - elif not result[myvar].key() in newsols[key]: - results.remove(result) - - def object_complete_and_filter(self, solutions, results): - if self._not: - raise NotImplementedError() - myvar = self.var.name - ovar = self.rhs.name - for result in results[:]: - values = self._possible_values(myvar, ovar, result[ovar], - solutions, 'o_') - expand_result(results, result, myvar, values, self._dsget) - - -class EidRestriction(Restriction): - def __init__(self, rel, dsget): - Restriction.__init__(self, rel) - self._dsget = dsget - - def resolve(self, kwargs): - value = self.rel.children[1].children[0].eval(kwargs) - return self._dsget(value) - - -class RelationRestriction(VariableSelection): - - def _get_value(self, fixed): - return fixed[self.constraint_var].key() - - def fill_query(self, fixed, query, operator=None): - restr = '%s%s %s' % (self._prefix, self.rtype, operator or self.operator) - query[restr] = self._get_value(fixed) - - def resolve(self, solutions, fixed): - if self.rtype == 'identity': - if self._not: - raise NotImplementedError() - return [fixed[self.constraint_var]] - if self._not: - return _resolve_not([self], solutions, fixed) - return _resolve([self], solutions, fixed) - - -class NotRelationRestriction(RelationRestriction): - - def _get_value(self, fixed): - return None - - def resolve(self, solutions, fixed): - if self.rtype == 'identity': - raise NotImplementedError() - return _resolve([self], solutions, fixed) - - -class AttributeRestriction(RelationRestriction): - supported_operators = ('=', '>', '>=', '<', '<=', 'ILIKE') - def __init__(self, rel, kwargs): - RelationRestriction.__init__(self, rel, None) - value = self.rhs.eval(kwargs) - self.value = value - if self.operator == 'ILIKE': - if value.startswith('%'): - raise NotImplementedError('LIKE is only supported for prefix search') - if not value.endswith('%'): - raise NotImplementedError('LIKE is only supported for prefix search') - self.operator = '>' - self.value = value[:-1] - - def complete_and_filter(self, solutions, results): - # check lhs var first in case this is a restriction - assert self._not - myvar, rtype, value = self.var.name, self.rtype, self.value - for result in results[:]: - if result[myvar].get('s_'+rtype) == value: - results.remove(result) - - def _get_value(self, fixed): - return self.value - - -class DateAttributeRestriction(AttributeRestriction): - """just a thin layer on top af `AttributeRestriction` that - tries to convert date strings such as in : - Any X WHERE X creation_date >= '2008-03-04' - """ - def __init__(self, rel, kwargs): - super(DateAttributeRestriction, self).__init__(rel, kwargs) - if isinstance(self.value, basestring): -# try: - self.value = datetime.strptime(self.value, '%Y-%m-%d') -# except Exception, exc: -# from logging import error -# error('unable to parse date %s with format %%Y-%%m-%%d (exc=%s)', value, exc) - - -class AttributeInRestriction(AttributeRestriction): - def __init__(self, rel, kwargs): - RelationRestriction.__init__(self, rel, None) - values = [] - for c in self.rel.children[1].iget_nodes(nodes.Constant): - values.append(c.eval(kwargs)) - self.value = values - - @property - def operator(self): - return 'in' - - -class TypeRestriction(AttributeRestriction): - def __init__(self, var): - self.var = var - - def __repr__(self): - return '<%s for %s>' % (self.__class__.__name__, self.var) - - def resolve(self, solutions, fixed): - objs = [] - for etype in frozenset(etypes[self.var.name] for etypes in solutions): - objs += Query(etype).Run() - return objs - - -def append_result(res, descr, i, j, value, etype): - if value is not None: - if isinstance(value, Text): - value = unicode(value) - elif isinstance(value, Blob): - value = Binary(str(value)) - if j == 0: - res.append([value]) - descr.append([etype]) - else: - res[i].append(value) - descr[i].append(etype) - - -class ValueResolver(object): - def __init__(self, functions, args, term): - self.functions = functions - self.args = args - self.term = term - self._solution = self.term.stmt.solutions[0] - - def compute(self, result): - """return (entity type, value) to which self.term is evaluated according - to the given result dictionnary and to query arguments (self.args) - """ - return self.term.accept(self, result) - - def visit_function(self, node, result): - args = tuple(n.accept(self, result)[1] for n in node.children) - value = self.functions[node.name](*args) - return node.get_type(self._solution, self.args), value - - def visit_variableref(self, node, result): - value = result[node.name] - try: - etype = value.kind() - value = str(value.key()) - except AttributeError: - etype = self._solution[node.name] - return etype, value - - def visit_constant(self, node, result): - return node.get_type(kwargs=self.args), node.eval(self.args) - - -class RQLInterpreter(object): - """algorithm: - 1. visit the restriction clauses and collect restriction for each subject - of a relation. Different restriction types are: - * EidRestriction - * AttributeRestriction - * RelationRestriction - * VariableSelection (not really a restriction) - -> dictionary {: [restriction...], ...} - 2. resolve eid restrictions - 3. for each select in union: - for each solution in select'solutions: - 1. resolve variables which have attribute restriction - 2. resolve relation restriction - 3. resolve selection and add to global results - """ - def __init__(self, schema): - self.schema = schema - Restriction.schema = schema # yalta! - self.rqlhelper = RQLHelper(schema, {'eid': etype_from_key}) - self._stored_proc = {'LOWER': lambda x: x.lower(), - 'UPPER': lambda x: x.upper()} - for cb in SQL_CONNECT_HOOKS.get('sqlite', []): - cb(self) - - # emulate sqlite connection interface so we can reuse stored procedures - def create_function(self, name, nbargs, func): - self._stored_proc[name] = func - - def create_aggregate(self, name, nbargs, func): - self._stored_proc[name] = func - - - def execute(self, operation, parameters=None, eid_key=None, build_descr=True): - rqlst = self.rqlhelper.parse(operation, annotate=True) - try: - self.rqlhelper.compute_solutions(rqlst, kwargs=parameters) - except BadKeyError: - results, description = [], [] - else: - results, description = self.interpret(rqlst, parameters) - return ResultSet(results, operation, parameters, description, rqlst=rqlst) - - def interpret(self, node, kwargs, dsget=None): - if dsget is None: - self._dsget = Get - else: - self._dsget = dsget - try: - return node.accept(self, kwargs) - except NotImplementedError: - self.critical('support for query not implemented: %s', node) - raise - - def visit_union(self, node, kwargs): - results, description = [], [] - extra = {'kwargs': kwargs} - for child in node.children: - pres, pdescr = self.visit_select(child, extra) - results += pres - description += pdescr - return results, description - - def visit_select(self, node, extra): - constraints = {} - if node.where is not None: - node.where.accept(self, constraints, extra) - fixed, toresolve, postresolve, postfilters = {}, {}, {}, [] - # extract NOT filters - for vname, restrictions in constraints.items(): - for restr in restrictions[:]: - if isinstance(restr, AttributeRestriction) and restr._not: - postfilters.append(restr) - restrictions.remove(restr) - if not restrictions: - del constraints[vname] - # add TypeRestriction for variable which have no restrictions at all - for varname, var in node.defined_vars.iteritems(): - if not varname in constraints: - constraints[varname] = [TypeRestriction(var)] - #print node, constraints - # compute eid restrictions - kwargs = extra['kwargs'] - for varname, restrictions in constraints.iteritems(): - for restr in restrictions[:]: - if isinstance(restr, EidRestriction): - assert not varname in fixed - try: - value = restr.resolve(kwargs) - fixed[varname] = value - except EntityNotFoundError: - return [], [] - restrictions.remove(restr) - #print 'fixed', fixed.keys() - # combine remaining restrictions - for varname, restrictions in constraints.iteritems(): - for restr in restrictions: - if isinstance(restr, AttributeRestriction): - toresolve.setdefault(varname, []).append(restr) - elif isinstance(restr, NotRelationRestriction) or ( - isinstance(restr, RelationRestriction) and - not restr.searched_var in fixed and restr.constraint_var in fixed): - toresolve.setdefault(varname, []).append(restr) - else: - postresolve.setdefault(varname, []).append(restr) - try: - if len(toresolve[varname]) > 1: - toresolve[varname] = MultipleRestriction(toresolve[varname]) - else: - toresolve[varname] = toresolve[varname][0] - except KeyError: - pass - #print 'toresolve %s' % toresolve - #print 'postresolve %s' % postresolve - # resolve additional restrictions - if fixed: - partres = [fixed.copy()] - else: - partres = [] - for varname, restr in toresolve.iteritems(): - varpartres = partres[:] - try: - values = tuple(restr.resolve(node.solutions, fixed)) - except EidMismatch, ex: - varname = ex.varname - value = ex.value - partres = [res for res in partres if res[varname] != value] - if partres: - continue - # some join failed, no possible results - return [], [] - if not values: - # some join failed, no possible results - return [], [] - if not varpartres: - # init results - for value in values: - partres.append({varname: value}) - elif not varname in partres[0]: - # cartesian product - for res in partres: - res[varname] = values[0] - for res in partres[:]: - for value in values[1:]: - res = res.copy() - res[varname] = value - partres.append(res) - else: - # union - for res in varpartres: - for value in values: - res = res.copy() - res[varname] = value - partres.append(res) - #print 'partres', len(partres) - #print partres - # Note: don't check for empty partres since constant selection may still - # produce result at this point - # sort to get RelationRestriction before AttributeSelection - restrictions = sorted((restr for restrictions in postresolve.itervalues() - for restr in restrictions), - key=lambda x: not isinstance(x, RelationRestriction)) - # compute stuff not doable in the previous step using datastore queries - for restr in restrictions + postfilters: - restr.complete_and_filter(node.solutions, partres) - if not partres: - # some join failed, no possible results - return [], [] - if extra.pop('has_exists', False): - # remove potential duplicates introduced by exists - toremovevars = [v.name for v in node.defined_vars.itervalues() - if not v.scope is node] - if toremovevars: - newpartres = [] - for result in partres: - for var in toremovevars: - del result[var] - if not result in newpartres: - newpartres.append(result) - if not newpartres: - # some join failed, no possible results - return [], [] - partres = newpartres - if node.orderby: - for sortterm in reversed(node.orderby): - resolver = ValueResolver(self._stored_proc, kwargs, sortterm.term) - partres.sort(reverse=not sortterm.asc, - key=lambda x: resolver.compute(x)[1]) - if partres: - if node.offset: - partres = partres[node.offset:] - if node.limit: - partres = partres[:node.limit] - if not partres: - return [], [] - #print 'completed partres', _print_results(partres) - # compute results - res, descr = [], [] - for j, term in enumerate(node.selection): - resolver = ValueResolver(self._stored_proc, kwargs, term) - if not partres: - etype, value = resolver.compute({}) - # only constant selected - if not res: - res.append([]) - descr.append([]) - res[0].append(value) - descr[0].append(etype) - else: - for i, sol in enumerate(partres): - etype, value = resolver.compute(sol) - append_result(res, descr, i, j, value, etype) - #print '--------->', res - return res, descr - - def visit_and(self, node, constraints, extra): - for child in node.children: - child.accept(self, constraints, extra) - def visit_exists(self, node, constraints, extra): - extra['has_exists'] = True - self.visit_and(node, constraints, extra) - - def visit_not(self, node, constraints, extra): - for child in node.children: - child.accept(self, constraints, extra) - try: - extra.pop(node) - except KeyError: - raise NotImplementedError() - - def visit_relation(self, node, constraints, extra): - if node.is_types_restriction(): - return - rschema = self.schema.rschema(node.r_type) - neged = node.neged(strict=True) - if neged: - # ok, we *may* process this Not node (not implemented error will be - # raised later if we can't) - extra[node.parent] = True - if rschema.final: - self._visit_final_relation(rschema, node, constraints, extra) - elif neged: - self._visit_non_final_neged_relation(rschema, node, constraints) - else: - self._visit_non_final_relation(rschema, node, constraints) - - def _visit_non_final_relation(self, rschema, node, constraints, not_=False): - lhs, rhs = node.get_variable_parts() - for v1, v2, prefix in ((lhs, rhs, 's'), (rhs, lhs, 'o')): - #if not_: - nbrels = len(v2.variable.stinfo['relations']) - #else: - # nbrels = len(v2.variable.stinfo['relations']) - len(v2.variable.stinfo['uidrels']) - if nbrels > 1: - constraints.setdefault(v1.name, []).append( - RelationRestriction(node, self._dsget, prefix)) - # just init an empty list for v2 variable to avoid a - # TypeRestriction being added for it - constraints.setdefault(v2.name, []) - break - else: - constraints.setdefault(rhs.name, []).append( - VariableSelection(node, self._dsget, 's')) - - def _visit_non_final_neged_relation(self, rschema, node, constraints): - lhs, rhs = node.get_variable_parts() - for v1, v2, prefix in ((lhs, rhs, 's'), (rhs, lhs, 'o')): - stinfo = v2.variable.stinfo - if not stinfo['selected'] and len(stinfo['relations']) == 1: - constraints.setdefault(v1.name, []).append( - NotRelationRestriction(node, self._dsget, prefix)) - constraints.setdefault(v2.name, []) - break - else: - self._visit_non_final_relation(rschema, node, constraints, True) - - def _visit_final_relation(self, rschema, node, constraints, extra): - varname = node.children[0].name - if rschema.type == 'eid': - constraints.setdefault(varname, []).append( - EidRestriction(node, self._dsget)) - else: - rhs = node.children[1].children[0] - if isinstance(rhs, nodes.VariableRef): - constraints.setdefault(rhs.name, []).append( - VariableSelection(node, self._dsget)) - elif isinstance(rhs, nodes.Constant): - if rschema.objects()[0] in ('Datetime', 'Date'): # XXX - constraints.setdefault(varname, []).append( - DateAttributeRestriction(node, extra['kwargs'])) - else: - constraints.setdefault(varname, []).append( - AttributeRestriction(node, extra['kwargs'])) - elif isinstance(rhs, nodes.Function) and rhs.name == 'IN': - constraints.setdefault(varname, []).append( - AttributeInRestriction(node, extra['kwargs'])) - else: - raise NotImplementedError() - - def _not_implemented(self, *args, **kwargs): - raise NotImplementedError() - - visit_or = _not_implemented - # shouldn't occurs - visit_set = _not_implemented - visit_insert = _not_implemented - visit_delete = _not_implemented - - -from logging import getLogger -from cubicweb import set_log_methods -set_log_methods(RQLInterpreter, getLogger('cubicweb.goa.rqlinterpreter')) -set_log_methods(Restriction, getLogger('cubicweb.goa.rqlinterpreter')) diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/skel/app.yaml.tmpl --- a/goa/skel/app.yaml.tmpl Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,72 +0,0 @@ -application: %(appname)s -version: 0-1 -runtime: python -api_version: 1 - -handlers: -- url: /admin/.* - script: $PYTHON_LIB/google/appengine/ext/admin - login: admin -- url: /data - static_dir: cubes/shared/data -- url: /fckeditor - static_dir: fckeditor -- url: /_load - script: loader.py - login: admin -- url: .* - script: main.py - # comment the line below to allow anonymous access or if you don't want to use - # google authentication service - login: required - -skip_files: | - ^(.*/)?( - (app\.yaml)| - (app\.yml)| - (index\.yaml)| - (index\.yml)| - (#.*#)| - (.*~)| - (.*\.py[co])| - (.*\.xcf)| - (.*\.asp)| - (.*\.aspx)| - (.*\.cfm)| - (.*\.po)| - (.*/RCS/.*)| - (\..*)| - (.*ChangeLog)| - (.*README)| - (.*TODO)| - (.*DEPENDS)| - (.*MANIFEST)| - (.*MANIFEST.in)| - (.*setup\.py)| - (.*,cover)| - (.*\.orig)| - (.*/test/.*)| - (.*/tests/.*)| - (.*/bin/.*)| - (.*/build/.*)| - (.*/debian/.*)| - (.*/doc/.*)| - (.*/skins/office2003/.*)| - (.*/editor/skins/silver/.*)| - (.*/editor/filemanager/.*)| - (.*/editor/plugins/.*)| - (.*/editor/images/smiley/.*)| - (.*/editor/.*spellerpages.*)| - (.*/docutils/writers/s5_html/.*)| - (.*/docutils/writers/latex2e/.*)| - (.*/docutils/writers/newlatex2e/.*)| - (.*/docutils/writers/pep_html/.*)| - (bin/.*)| - (tools/.*)| - (cubicweb.*/data/.*\.js)| - (cubicweb.*/data/.*\.css)| - (cubicweb.*/data/.*\.png)| - (cubicweb.*/data/.*\.gif)| - (cubicweb.*/data/.*\.gif)| - )$ - diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/skel/bootstrap_cubes diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/skel/custom.py --- a/goa/skel/custom.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,30 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -def postinit(vreg): - """this callback is called at the end of initialization process - and can be used to load explicit modules (views or entities). - - For instance : - import someviews - vreg.load_module(someviws) - """ - # from migration import migrate - # migrate(vreg) diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/skel/cw-cubes/README.txt --- a/goa/skel/cw-cubes/README.txt Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,7 +0,0 @@ -This directory is where you should put your lax components. - -For your application to actually use a component, you also -have to modify the ``INCLUDED_COMPONENTS`` variable in -the ``custom.py`` module. - - diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/skel/i18n/en.po --- a/goa/skel/i18n/en.po Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,14 +0,0 @@ -# LAX application po file - -msgid "" -msgstr "" -"Project-Id-Version: erudi 2.48.2\n" -"PO-Revision-Date: 2008-03-28 18:14+0100\n" -"Last-Translator: Logilab Team \n" -"Language-Team: fr \n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: ginco-devtools\n" -"Plural-Forms: nplurals=2; plural=(n > 1);\n" - diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/skel/i18n/fr.po --- a/goa/skel/i18n/fr.po Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,15 +0,0 @@ -# LAX application po file - -msgid "" -msgstr "" -"Project-Id-Version: erudi 2.48.2\n" -"PO-Revision-Date: 2008-03-28 18:14+0100\n" -"Last-Translator: Logilab Team \n" -"Language-Team: fr \n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: ginco-devtools\n" -"Plural-Forms: nplurals=2; plural=(n > 1);\n" - - diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/skel/loader.py --- a/goa/skel/loader.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,44 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -if __name__ == '__main__': - - from os.path import dirname, abspath - from cubicweb import goa - from cubicweb.goa.goaconfig import GAEConfiguration - from cubicweb.goa.dbinit import create_user, create_groups - - # compute instance's root directory - APPLROOT = dirname(abspath(__file__)) - # apply monkey patches first - goa.do_monkey_patch() - # get instance's configuration (will be loaded from app.conf file) - GAEConfiguration.ext_resources['JAVASCRIPTS'].append('DATADIR/goa.js') - config = GAEConfiguration('toto', APPLROOT) - # create default groups - create_groups() - if not config['use-google-auth']: - # create default admin - create_user('admin', 'admin', ('managers', 'users')) - # create anonymous user if specified - anonlogin = config['anonymous-user'] - if anonlogin: - create_user(anonlogin, config['anonymous-password'], ('guests',)) - print 'content initialized' diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/skel/main.py --- a/goa/skel/main.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,63 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""module defining the root handler for a lax instance. You should not have -to change anything here. - -""" -__docformat__ = "restructuredtext en" - -# compute instance's root directory -from os.path import dirname, abspath -APPLROOT = dirname(abspath(__file__)) - -# apply monkey patches first -from cubicweb import goa -goa.do_monkey_patch() - -# get instance's configuration (will be loaded from app.conf file) -from cubicweb.goa.goaconfig import GAEConfiguration -GAEConfiguration.ext_resources['JAVASCRIPTS'].append('DATADIR/goa.js') -config = GAEConfiguration('toto', APPLROOT) - -# dynamic objects registry -from cubicweb.goa.goavreg import GAEVregistry -vreg = GAEVregistry(config, debug=goa.MODE == 'dev') - -# trigger automatic classes registration (metaclass magic), should be done -# before schema loading -import custom - -# load instance'schema -vreg.schema = config.load_schema() - -# load dynamic objects -vreg.load(APPLROOT) - -# call the postinit so custom get a chance to do instance specific stuff -custom.postinit(vreg) - -from cubicweb.wsgi.handler import CubicWebWSGIApplication -application = CubicWebWSGIApplication(config, vreg=vreg) - -# main function so this handler module is cached -def main(): - from wsgiref.handlers import CGIHandler - CGIHandler().run(application) - -if __name__ == "__main__": - main() diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/skel/schema.py --- a/goa/skel/schema.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,31 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" - -class Blog(EntityType): - title = String(maxsize=50, required=True) - description = String() - -class BlogEntry(EntityType): - title = String(maxsize=100, required=True) - publish_date = Date(default='TODAY') - text = RichString(fulltextindexed=True) - category = String(vocabulary=('important','business')) - entry_of = SubjectRelation('Blog', cardinality='?*') diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/skel/views.py --- a/goa/skel/views.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,70 +0,0 @@ -# custom application views -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -from datetime import date - -from logilab.common.date import last_day - -from cubicweb.web.views import baseviews, boxes, calendar -from cubicweb.web.htmlwidgets import BoxLink, BoxWidget - -_ = unicode - - -class BlogEntryPrimaryView(baseviews.PrimaryView): - accepts = ('BlogEntry',) - - def cell_call(self, row, col): - entity = self.rset.get_entity(row, col) - self.w(u'

        %s

        ' % entity.dc_title()) - entity.view('metadata', w=self.w) - self.w(entity.printable_value('text')) - - -class BlogArchiveBox(boxes.BoxTemplate): - """side box usually displaying some related entities in a primary view""" - id = 'blog_archives_box' - title = _('blog archives') - - def call(self, **kwargs): - """display a list of entities by calling their view - """ - _ = self.req._ - rset = self.req.execute('Any CD ORDERBY CD DESC WHERE B is Blog, B creation_date CD') - blogmonths = [] - for (blogdate,) in rset: - year, month = blogdate.year, blogdate.month - if (year, month) not in blogmonths: - blogmonths.append( (year, month) ) - box = BoxWidget(_('Blog archives'), id=self.id) - for year, month in blogmonths: - firstday = date(year, month, 1) - lastday = last_day(firstday) - rql = ('Any B WHERE B is BlogEntry, B creation_date >= "%s", B creation_date <= "%s"' - % (firstday.strftime('%Y-%m-%d'), lastday.strftime('%Y-%m-%d'))) - url = self.build_url(rql=rql) - label = u'%s %s' % (_(calendar.MONTHNAMES[month-1]), year) - box.append( BoxLink(url, label) ) - box.render(self.w) - - - - diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/test/data/__init__.py --- a/goa/test/data/__init__.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,20 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""zou - -""" diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/test/data/bootstrap_cubes diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/test/data/schema.py --- a/goa/test/data/schema.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,31 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" - - -class YamsEntity(EntityType): - if 'Blog' in defined_types and 'Article' in defined_types: - ambiguous_relation = SubjectRelation(('Blog', 'Article')) - if 'Blog' in defined_types: - inlined_relation = SubjectRelation('Blog', cardinality='?*') - -class inlined_relation(RelationType): - inlined = True - diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/test/data/settings.py --- a/goa/test/data/settings.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,21 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -TEMPLATE_DEBUG = False diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/test/data/views.py --- a/goa/test/data/views.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,47 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -import os -os.environ["DJANGO_SETTINGS_MODULE"] = 'data.settings' - -from django import template - - -def encode_output(self, output): - # Check type so that we don't run str() on a Unicode object - if not isinstance(output, basestring): - return unicode(output) - return output - -template.VariableNode.encode_output = encode_output - -from cubicweb.view import StartupView - -INDEX_TEMPLATE = template.Template(u''' -

        hellô {{ user.login }}

        -''') - -class MyIndex(StartupView): - id = 'index' - - def call(self): - ctx = template.Context({'user': self.req.user}) - return INDEX_TEMPLATE.render(ctx) diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/test/unittest_db.py --- a/goa/test/unittest_db.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,85 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -from cubicweb.goa.testlib import * - -from cubicweb import Binary -from cubicweb.goa.goaconfig import GAEConfiguration -from cubicweb.server.utils import crypt_password - -from google.appengine.api.datastore_types import Text, Blob - - -class Blog(db.Model): - data = db.BlobProperty() - -class DBTest(GAEBasedTC): - config = GAEConfiguration('toto') - config.global_set_option('use-google-auth', False) - - MODEL_CLASSES = (Blog,) - - def test_set_none_relation(self): - eprop = self.add_entity('CWProperty', pkey=u'ui.language', value=u'en') - self.failUnless('s_for_user' in eprop._dbmodel) - self.assertEquals(eprop._dbmodel['s_for_user'], None) - - def test_euser_key(self): - euser = self.add_entity('CWUser', login=u'toto', upassword='toto') - self.assertEquals(euser.key().name(), 'key_toto') - - def test_egroup_key(self): - egroup = self.add_entity('CWGroup', name=u'toto') - self.assertEquals(egroup.key().name(), 'key_toto') - - def test_password_encryption(self): - euser = self.add_entity('CWUser', login=u'toto', upassword='toto') - self.failUnless(euser.upassword != 'toto', euser.upassword) - self.assertEquals(crypt_password('toto', euser.upassword[:2]), euser.upassword) - - def test_long_text(self): - # datastore string type is limited to 500 bytes - text = u'e'*501 - entity = self.add_entity('State', name=u'test', description=text) - self.assertIsInstance(entity.description, unicode) - self.failIf(isinstance(entity.description, Text)) - self.assertEquals(entity.description, text) - - def test_long_accentued_text(self): - # datastore string type is limited to 500 bytes - text = u'é'*500 - entity = self.add_entity('State', name=u'test', description=text) - self.assertIsInstance(entity.description, unicode) - self.failIf(isinstance(entity.description, Text)) - self.assertEquals(entity.description, text) - - def test_blob(self): - data = 'e'*501 - entity = self.add_entity('Blog', data=data) - self.assertIsInstance(entity.data, Binary) - value = entity.data.getvalue() - self.failIf(isinstance(value, Blob)) - self.assertEquals(value, data) - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/test/unittest_editcontroller.py --- a/goa/test/unittest_editcontroller.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,430 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -from cubicweb.goa.testlib import * - -from urllib import unquote - -from cubicweb import ValidationError -from cubicweb.uilib import rql_for_eid - -from cubicweb.web import INTERNAL_FIELD_VALUE, Redirect - -from cubicweb.goa.goaconfig import GAEConfiguration -from cubicweb.entities.authobjs import CWUser - - -class EditControllerTC(GAEBasedTC): - - config = GAEConfiguration('toto') - config.global_set_option('use-google-auth', False) - config.global_set_option('schema-type', 'yams') - config.global_set_option('included-cubes', ()) - config.global_set_option('included-yams-cubes', ('blog',)) - - MODEL_CLASSES = () - from cubicweb.web.views import editcontroller - from cubicweb.entities import lib - LOAD_APP_MODULES = (editcontroller, lib) - - def setUp(self): - GAEBasedTC.setUp(self) - self.req = self.request() - self.ctrl = self.get_ctrl(self.req) - - def get_ctrl(self, req): - return self.vreg.select('controllers', 'edit', req=req, appli=self) - - def publish(self, req): - assert req is self.ctrl.req - try: - result = self.ctrl.publish() - req.cnx.commit() - except Redirect: - req.cnx.commit() - raise - except: - req.cnx.rollback() - raise - return result - - def expect_redirect_publish(self, req=None): - if req is not None: - self.ctrl = self.get_ctrl(req) - else: - req = self.req - try: - res = self.publish(req) - except Redirect, ex: - try: - path, params = ex.location.split('?', 1) - except: - path, params = ex.location, "" - req._url = path - cleanup = lambda p: (p[0], unquote(p[1])) - params = dict(cleanup(p.split('=', 1)) for p in params.split('&') if p) - return req.relative_path(False), params # path.rsplit('/', 1)[-1], params - else: - self.fail('expected a Redirect exception') - - def test_noparam_edit(self): - """check behaviour of this controller without any form parameter""" - self.req.form = {} - self.assertRaises(ValidationError, self.publish, self.req) - - def test_validation_unique(self): - """test creation of two linked entities""" - user = self.user - self.req.form = {'eid': 'X', '__type:X': 'CWUser', - 'login:X': self.user.login, 'edits-login:X': u'', - 'upassword:X': u'toto', 'upassword-confirm:X': u'toto', 'edits-upassword:X': u'', - } - self.assertRaises(ValidationError, self.publish, self.req) - - - def test_user_editing_itself(self): - """checking that a manager user can edit itself""" - self.skip('missing actual gae support, retry latter') - user = self.user - basegroups = [str(eid) for eid, in self.req.execute('CWGroup G WHERE X in_group G, X eid %(x)s', {'x': user.eid})] - groupeids = [eid for eid, in self.req.execute('CWGroup G WHERE G name in ("managers", "users")')] - groups = [str(eid) for eid in groupeids] - stateeid = [eid for eid, in self.req.execute('State S WHERE S name "activated"')][0] - self.req.form = { - 'eid': user.eid, - '__type:'+user.eid: 'CWUser', - 'login:'+user.eid: unicode(user.login), - 'firstname:'+user.eid: u'Th\xe9nault', - 'surname:'+user.eid: u'Sylvain', - 'in_group:'+user.eid: groups, - 'in_state:'+user.eid: stateeid, - # - 'edits-login:'+user.eid: unicode(user.login), - 'edits-firstname:'+user.eid: u'', - 'edits-surname:'+user.eid: u'', - 'edits-in_group:'+user.eid: basegroups, - 'edits-in_state:'+user.eid: stateeid, - } - path, params = self.expect_redirect_publish() - e = self.req.execute('Any X WHERE X eid %(x)s', {'x': user.eid}, 'x').get_entity(0, 0) - self.assertEquals(e.firstname, u'Th\xe9nault') - self.assertEquals(e.surname, u'Sylvain') - self.assertEquals(e.login, user.login) - self.assertEquals([g.eid for g in e.in_group], groupeids) - self.assertEquals(e.in_state[0].eid, stateeid) - - def test_user_can_change_its_password(self): - user = self.create_user('user') - cnx = self.login('user') - req = self.request() - #self.assertEquals(self.ctrl.schema['CWUser']._groups['read'], - # ('managers', 'users')) - req.form = { - 'eid': user.eid, '__type:'+user.eid: 'CWUser', - '__maineid' : str(user.eid), - 'upassword:'+user.eid: 'tournicoton', - 'upassword-confirm:'+user.eid: 'tournicoton', - 'edits-upassword:'+user.eid: '', - } - path, params = self.expect_redirect_publish(req) - cnx.commit() # commit to check we don't get late validation error for instance - self.assertEquals(path, 'euser/user') - self.failIf('vid' in params) - - def test_user_editing_itself_no_relation(self): - """checking we can edit an entity without specifying some required - relations (meaning no changes) - """ - user = self.user - groupeids = [eid for eid, in self.req.execute('CWGroup G WHERE X in_group G, X eid %(x)s', {'x': user.eid})] - self.req.form = { - 'eid': user.eid, - '__type:'+user.eid: 'CWUser', - 'login:'+user.eid: unicode(user.login), - 'firstname:'+user.eid: u'Th\xe9nault', - 'surname:'+user.eid: u'Sylvain', - # - 'edits-login:'+user.eid: unicode(user.login), - 'edits-firstname:'+user.eid: u'', - 'edits-surname:'+user.eid: u'', - } - path, params = self.expect_redirect_publish() - self.req.drop_entity_cache(user.eid) - e = self.req.execute('Any X WHERE X eid %(x)s', {'x': user.eid}, 'x').get_entity(0, 0) - self.assertEquals(e.login, user.login) - self.assertEquals(e.firstname, u'Th\xe9nault') - self.assertEquals(e.surname, u'Sylvain') - self.assertUnorderedIterableEquals([g.eid for g in e.in_group], groupeids) - #stateeids = [eid for eid, in self.req.execute('State S WHERE S name "activated"')] - #self.assertEquals([s.eid for s in e.in_state], stateeids) - - - def test_create_multiple_linked(self): - gueid = self.req.execute('CWGroup G WHERE G name "users"')[0][0] - self.req.form = {'eid': ['X', 'Y'], - - '__type:X': 'CWUser', - '__maineid' : 'X', - 'login:X': u'adim', 'edits-login:X': u'', - 'upassword:X': u'toto', 'upassword-confirm:X': u'toto', 'edits-upassword:X': u'', - 'surname:X': u'Di Mascio', 'edits-surname:X': '', - - 'in_group:X': gueid, 'edits-in_group:X': INTERNAL_FIELD_VALUE, - - '__type:Y': 'EmailAddress', - 'address:Y': u'dima@logilab.fr', 'edits-address:Y': '', - 'use_email:X': 'Y', 'edits-use_email:X': INTERNAL_FIELD_VALUE, - } - path, params = self.expect_redirect_publish() - # should be redirected on the created person - self.assertEquals(path, 'euser/adim') - e = self.req.execute('Any P WHERE P surname "Di Mascio"').get_entity(0, 0) - self.assertEquals(e.surname, 'Di Mascio') - email = e.use_email[0] - self.assertEquals(email.address, 'dima@logilab.fr') - - def test_edit_multiple_linked(self): - peid = self.create_user('adim').eid - self.req.form = {'eid': [peid, 'Y'], - '__type:%s'%peid: 'CWUser', - 'surname:%s'%peid: u'Di Masci', 'edits-surname:%s'%peid: '', - - '__type:Y': 'EmailAddress', - 'address:Y': u'dima@logilab.fr', 'edits-address:Y': '', - 'use_email:%s'%peid: 'Y', 'edits-use_email:%s'%peid: INTERNAL_FIELD_VALUE, - - '__redirectrql': 'Any X WHERE X eid %s'%peid, - } - path, params = self.expect_redirect_publish() - # should be redirected on the created person - eid = params['rql'].split()[-1] - e = self.req.execute('Any X WHERE X eid %(x)s', {'x': eid}, 'x').get_entity(0, 0) - self.assertEquals(e.surname, 'Di Masci') - email = e.use_email[0] - self.assertEquals(email.address, 'dima@logilab.fr') - - emaileid = email.eid - self.req.form = {'eid': [peid, emaileid], - '__type:%s'%peid: 'CWUser', - 'surname:%s'%peid: u'Di Masci', 'edits-surname:%s'%peid: 'Di Masci', - '__type:%s'%emaileid: 'EmailAddress', - 'address:%s'%emaileid: u'adim@logilab.fr', 'edits-address:%s'%emaileid: 'dima@logilab.fr', - 'use_email:%s'%peid: emaileid, 'edits-use_email:%s'%peid: emaileid, - '__redirectrql': 'Any X WHERE X eid %s'%peid, - } - path, params = self.expect_redirect_publish() - # should be redirected on the created person - eid = params['rql'].split()[-1] - # XXX this should not be necessary, it isn't with regular cubicweb - self.req._eid_cache = {} - e = self.req.execute('Any X WHERE X eid %(x)s', {'x': eid}, 'x').get_entity(0, 0) - self.assertEquals(e.surname, 'Di Masci') - email = e.use_email[0] - self.assertEquals(email.address, 'adim@logilab.fr') - - - def test_password_confirm(self): - """test creation of two linked entities - """ - user = self.user - self.req.form = {'__cloned_eid:X': user.eid, - 'eid': 'X', '__type:X': 'CWUser', - 'login:X': u'toto', 'edits-login:X': u'', - 'upassword:X': u'toto', 'edits-upassword:X': u'', - } - self.assertRaises(ValidationError, self.publish, self.req) - self.req.form = {'__cloned_eid:X': user.eid, - 'eid': 'X', '__type:X': 'CWUser', - 'login:X': u'toto', 'edits-login:X': u'', - 'upassword:X': u'toto', 'upassword-confirm:X': u'tutu', 'edits-upassword:X': u'', - } - self.assertRaises(ValidationError, self.publish, self.req) - - - def test_req_pending_insert(self): - """make sure req's pending insertions are taken into account""" - tmpgroup = self.add_entity('CWGroup', name=u"test") - user = self.user - self.req.set_session_data('pending_insert', set([(user.eid, 'in_group', tmpgroup.eid)])) - path, params = self.expect_redirect_publish() - usergroups = [gname for gname, in - self.req.execute('Any N WHERE G name N, U in_group G, U eid %(u)s', {'u': user.eid})] - self.assertUnorderedIterableEquals(usergroups, ['managers', 'users', 'test']) - self.assertEquals(self.req.get_pending_inserts(), []) - - - def test_req_pending_delete(self): - """make sure req's pending deletions are taken into account""" - user = self.user - groupeid = self.req.execute('INSERT CWGroup G: G name "test", U in_group G WHERE U eid %(x)s', - {'x': user.eid})[0][0] - usergroups = [gname for gname, in - self.req.execute('Any N WHERE G name N, U in_group G, U eid %(u)s', {'u': user.eid})] - # just make sure everything was set correctly - self.assertUnorderedIterableEquals(usergroups, ['managers', 'users', 'test']) - # now try to delete the relation - self.req.set_session_data('pending_delete', set([(user.eid, 'in_group', groupeid)])) - path, params = self.expect_redirect_publish() - usergroups = [gname for gname, in - self.req.execute('Any N WHERE G name N, U in_group G, U eid %(u)s', {'u': user.eid})] - self.assertUnorderedIterableEquals(usergroups, ['managers', 'users']) - #self.assertUnorderedIterableEquals(usergroups, ['managers']) - self.assertEquals(self.req.get_pending_deletes(), []) - - def test_custom_attribute_handler(self): - def custom_login_edit(self, formparams, value, relations): - formparams['login'] = value.upper() - relations.append('X login %(login)s') - CWUser.custom_login_edit = custom_login_edit - try: - user = self.user - eid = repr(user.eid) - self.req.form = { - 'eid': eid, - '__type:'+eid: 'CWUser', - 'login:'+eid: u'foo', - 'edits-login:'+eid: unicode(user.login), - } - path, params = self.expect_redirect_publish() - rset = self.req.execute('Any L WHERE X eid %(x)s, X login L', {'x': user.eid}, 'x') - self.assertEquals(rset[0][0], 'FOO') - finally: - del CWUser.custom_login_edit - - def test_redirect_apply_button(self): - redirectrql = rql_for_eid(4012) # whatever - self.req.form = { - 'eid': 'A', '__type:A': 'BlogEntry', - '__maineid' : 'A', - 'content:A': u'"13:03:43"', 'edits-content:A': '', - 'title:A': u'huuu', 'edits-title:A': '', - '__redirectrql': redirectrql, - '__redirectvid': 'primary', - '__redirectparams': 'toto=tutu&tata=titi', - '__form_id': 'edition', - '__action_apply': '', - } - path, params = self.expect_redirect_publish() - self.failUnless(path.startswith('blogentry/')) - eid = path.split('/')[1] - self.assertEquals(params['vid'], 'edition') - self.assertNotEquals(eid, '4012') - self.assertEquals(params['__redirectrql'], redirectrql) - self.assertEquals(params['__redirectvid'], 'primary') - self.assertEquals(params['__redirectparams'], 'toto=tutu&tata=titi') - - def test_redirect_ok_button(self): - redirectrql = rql_for_eid(4012) # whatever - self.req.form = { - 'eid': 'A', '__type:A': 'BlogEntry', - '__maineid' : 'A', - 'content:A': u'"13:03:43"', 'edits-content:A': '', - 'title:A': u'huuu', 'edits-title:A': '', - '__redirectrql': redirectrql, - '__redirectvid': 'primary', - '__redirectparams': 'toto=tutu&tata=titi', - '__form_id': 'edition', - } - path, params = self.expect_redirect_publish() - self.assertEquals(path, 'view') - self.assertEquals(params['rql'], redirectrql) - self.assertEquals(params['vid'], 'primary') - self.assertEquals(params['tata'], 'titi') - self.assertEquals(params['toto'], 'tutu') - - def test_redirect_delete_button(self): - eid = self.add_entity('BlogEntry', title=u'hop', content=u'hop').eid - self.req.form = {'eid': str(eid), '__type:%s'%eid: 'BlogEntry', - '__action_delete': ''} - path, params = self.expect_redirect_publish() - self.assertEquals(path, 'blogentry') - self.assertEquals(params, {u'__message': u'entity deleted'}) - eid = self.add_entity('EmailAddress', address=u'hop@logilab.fr').eid - self.req.execute('SET X use_email E WHERE E eid %(e)s, X eid %(x)s', - {'x': self.user.eid, 'e': eid}, 'x') - self.commit() - self.req.form = {'eid': str(eid), '__type:%s'%eid: 'EmailAddress', - '__action_delete': ''} - path, params = self.expect_redirect_publish() - self.assertEquals(unquote(path), 'euser/'+self.user.login) - self.assertEquals(params, {u'__message': u'entity deleted'}) - eid1 = self.add_entity('BlogEntry', title=u'hop', content=u'hop').eid - eid2 = self.add_entity('EmailAddress', address=u'hop@logilab.fr').eid - self.req.form = {'eid': [str(eid1), str(eid2)], - '__type:%s'%eid1: 'BlogEntry', - '__type:%s'%eid2: 'EmailAddress', - '__action_delete': ''} - path, params = self.expect_redirect_publish() - self.assertEquals(path, 'view') - self.assertEquals(params, {u'__message': u'entities deleted'}) - - - def test_nonregr_multiple_empty_email_addr(self): - gueid = self.req.execute('CWGroup G WHERE G name "users"')[0][0] - self.req.form = {'eid': ['X', 'Y'], - - '__type:X': 'CWUser', - 'login:X': u'adim', 'edits-login:X': u'', - 'upassword:X': u'toto', 'upassword-confirm:X': u'toto', 'edits-upassword:X': u'', - 'in_group:X': gueid, 'edits-in_group:X': INTERNAL_FIELD_VALUE, - - '__type:Y': 'EmailAddress', - 'address:Y': u'', 'edits-address:Y': '', - 'alias:Y': u'', 'edits-alias:Y': '', - 'use_email:X': 'Y', 'edits-use_email:X': INTERNAL_FIELD_VALUE, - } - self.assertRaises(ValidationError, self.publish, self.req) - - - def test_nonregr_rollback_on_validation_error(self): - self.skip('lax fix me') - p = self.create_user("doe") - # do not try to skip 'primary_email' for this test - old_skips = p.__class__.skip_copy_for - p.__class__.skip_copy_for = () - try: - e = self.add_entity('EmailAddress', address=u'doe@doe.com') - self.req.execute('SET P use_email E, P primary_email E WHERE P eid %(p)s, E eid %(e)s', - {'p' : p.eid, 'e' : e.eid}) - self.req.form = {'__cloned_eid:X': p.eid, - 'eid': 'X', '__type:X': 'CWUser', - 'login': u'dodo', 'edits-login': u'dodo', - 'surname:X': u'Boom', 'edits-surname:X': u'', - '__errorurl' : "whatever but required", - } - # try to emulate what really happens in the web application - # 1/ validate form => EditController.publish raises a ValidationError - # which fires a Redirect - # 2/ When re-publishing the copy form, the publisher implicitly commits - try: - self.app.publish('edit', self.req) - except Redirect: - self.req.form['rql'] = 'Any X WHERE X eid %s' % p.eid - self.req.form['vid'] = 'copy' - self.app.publish('view', self.req) - rset = self.req.execute('CWUser P WHERE P surname "Boom"') - self.assertEquals(len(rset), 0) - finally: - p.__class__.skip_copy_for = old_skips - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/test/unittest_metadata.py --- a/goa/test/unittest_metadata.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,126 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -from cubicweb.goa.testlib import * - -import time -from mx.DateTime import DateTimeType -from datetime import datetime -from cubicweb.goa import db - -from google.appengine.api import datastore - -class Article(db.Model): - content = db.TextProperty() - synopsis = db.StringProperty(default='hello') - -class Blog(db.Model): - diem = db.DateProperty(required=True, auto_now_add=True) - title = db.StringProperty(required=True) - content = db.TextProperty() - talks_about = db.ReferenceProperty(Article) - cites = db.SelfReferenceProperty() - - -class MetaDataTC(GAEBasedTC): - MODEL_CLASSES = (Article, Blog) - - def setUp(self): - GAEBasedTC.setUp(self) - self.req = self.request() - self.a = self.add_entity('Article') - self.p = self.add_entity('CWProperty', pkey=u'ui.language', value=u'en') - self.session.commit() - - def _test_timestamp(self, entity, attr, sleep=0.1): - timestamp = getattr(entity, attr) - self.failUnless(timestamp) - self.assertIsInstance(timestamp, DateTimeType) - self.assertIsInstance(entity.to_gae_model()['s_'+attr], datetime) - time.sleep(sleep) - if entity.id == 'Article': - entity.set_attributes(content=u'zou') - else: - entity.set_attributes(value=u'en') - self.session.commit() - return timestamp - - def test_creation_date_dbmodel(self): - cdate = self._test_timestamp(self.a, 'creation_date') - self.assertEquals(cdate, self.a.creation_date) - - def test_creation_date_yams(self): - cdate = self._test_timestamp(self.p, 'creation_date') - self.assertEquals(cdate, self.p.creation_date) - - def test_modification_date_dbmodel(self): - mdate = self._test_timestamp(self.a, 'modification_date', sleep=1) - a = self.execute('Any X WHERE X eid %(x)s', {'x': self.a.eid}, 'x').get_entity(0, 0) - self.failUnless(mdate < a.modification_date, (mdate, a.modification_date)) - - def test_modification_date_yams(self): - mdate = self._test_timestamp(self.p, 'modification_date', sleep=1) - p = self.execute('Any X WHERE X eid %(x)s', {'x': self.p.eid}, 'x').get_entity(0, 0) - self.failUnless(mdate < p.modification_date, (mdate, p.modification_date)) - - def _test_owned_by(self, entity): - self.assertEquals(len(entity.owned_by), 1) - owner = entity.owned_by[0] - self.assertIsInstance(owner, db.Model) - dbmodel = entity.to_gae_model() - self.assertEquals(len(dbmodel['s_owned_by']), 1) - self.assertIsInstance(dbmodel['s_owned_by'][0], datastore.Key) - - def test_owned_by_dbmodel(self): - self._test_owned_by(self.a) - - def test_owned_by_yams(self): - self._test_owned_by(self.p) - - def _test_created_by(self, entity): - self.assertEquals(len(entity.created_by), 1) - creator = entity.created_by[0] - self.assertIsInstance(creator, db.Model) - self.assertIsInstance(entity.to_gae_model()['s_created_by'], datastore.Key) - - def test_created_by_dbmodel(self): - self._test_created_by(self.a) - - def test_created_by_dbmodel(self): - self._test_created_by(self.p) - - def test_user_owns_dbmodel(self): - self.failUnless(self.req.user.owns(self.a.eid)) - - def test_user_owns_yams(self): - self.failUnless(self.req.user.owns(self.p.eid)) - - def test_is_relation(self): - en = self.execute('Any EN WHERE E name EN, X is E, X eid %(x)s', {'x': self.a.eid}, 'x')[0][0] - self.assertEquals(en, 'Article') - en = self.execute('Any EN WHERE E name EN, X is E, X eid %(x)s', {'x': self.p.eid}, 'x')[0][0] - self.assertEquals(en, 'CWProperty') - en = self.execute('Any EN WHERE E name EN, X is E, X eid %(x)s', {'x': self.req.user.eid}, 'x')[0][0] - self.assertEquals(en, 'CWUser') - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/test/unittest_rql.py --- a/goa/test/unittest_rql.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,628 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -from cubicweb.goa.testlib import * - -from cubicweb import Binary - -from logilab.common.testlib import unittest_main -from mx.DateTime import now, today, DateTimeType -import rql - -from google.appengine.api.datastore_types import Blob, Text - -# stored procedure definition ################################################# - -from rql.utils import register_function, FunctionDescr - -class itemtype_sort_value(FunctionDescr): - supported_backends = ('sqlite',) - rtype = 'Int' - -try: - register_function(itemtype_sort_value) -except AssertionError: - pass - -def init_sqlite_connexion(cnx): - def itemtype_sort_value(text): - return {"personal":2, "business":1}[text] - cnx.create_function("ITEMTYPE_SORT_VALUE", 1, itemtype_sort_value) - -from cubicweb.server import SQL_CONNECT_HOOKS -sqlite_hooks = SQL_CONNECT_HOOKS.setdefault('sqlite', []) -sqlite_hooks.append(init_sqlite_connexion) - -# end stored procedure definition ############################################# - -class Article(db.Model): - content = db.TextProperty() - synopsis = db.StringProperty(default=u'hello') - -class Blog(db.Model): - diem = db.DateProperty(required=True, auto_now_add=True) - content = db.TextProperty() - itemtype = db.StringProperty(required=True, choices=(u'personal', u'business')) - talks_about = db.ReferenceProperty(Article) - cites = db.SelfReferenceProperty() - data = db.BlobProperty() - - -class RQLTest(GAEBasedTC): - MODEL_CLASSES = (Article, Blog) - - def setUp(self): - GAEBasedTC.setUp(self) - # hack to make talks_about cardinality to ** instead of ?* - self.schema.rschema('talks_about').set_rproperty('Blog', 'Article', - 'cardinality', '**') - self.req = self.request() - self.article = self.add_entity('Article', content=u'very interesting') - self.blog = self.add_entity('Blog', itemtype=u'personal', content=u'hop') - self.execute('SET X talks_about Y WHERE X eid %(x)s, Y eid %(y)s', - {'x': self.blog.eid, 'y': self.article.eid}) - self.commit() - - def _check_rset_size(self, rset, row, col): - self.assertEquals(len(rset), row) - self.assertEquals(len(rset[0]), col) - self.assertEquals(len(rset.description), row) - self.assertEquals(len(rset.description[0]), col) - - def _check_blog_rset(self, rset): - self._check_rset_size(rset, 1, 1) - self.assertEquals(rset.description[0][0], 'Blog') - self.assertEquals(rset[0][0], self.blog.eid) - self.assertEquals(rset.get_entity(0, 0).eid, self.blog.eid) - - def test_0_const(self): - rset = self.req.execute('Any 1') - self._check_rset_size(rset, 1, 1) - self.assertEquals(rset[0][0], 1) - self.assertEquals(rset.description[0][0], 'Int') - - def test_0_now_const(self): - rset = self.req.execute('Any NOW') - self._check_rset_size(rset, 1, 1) - self.assertIsInstance(rset[0][0], DateTimeType) - self.assertEquals(rset.description[0][0], 'Datetime') - - def test_0_today_const(self): - rset = self.req.execute('Any TODAY') - self._check_rset_size(rset, 1, 1) - self.assertIsInstance(rset[0][0], DateTimeType) - self.assertEquals(rset[0][0], today()) - self.assertEquals(rset.description[0][0], 'Date') - - - def test_1_eid(self): - rset = self.req.execute('Any X WHERE X eid %(x)s', {'x': self.blog.eid}) - self._check_blog_rset(rset) - rset = self.req.execute('Any X WHERE X eid "%s"' % self.blog.eid) - self._check_blog_rset(rset) - - def test_1_eid_eid(self): - rset = self.req.execute('Any X,Y WHERE X eid %(x)s, Y eid %(y)s', {'x': self.blog.eid, - 'y': self.article.eid}) - self._check_rset_size(rset, 1, 2) - self.assertEquals(rset.description[0], ('Blog', 'Article')) - self.assertEquals(rset[0][0], self.blog.eid) - self.assertEquals(rset[0][1], self.article.eid) - - def test_1_eid_with_is(self): - self.assertRaises(rql.TypeResolverException, - self.req.execute, 'Any X WHERE X eid %(x)s, X is Article', {'x': self.blog.eid}) - rset = self.req.execute('Any X WHERE X eid %(x)s, X is Blog', {'x': self.blog.eid}) - self._check_blog_rset(rset) - - def test_1_is(self): - rset = self.req.execute('Any X WHERE X is Blog') - self._check_blog_rset(rset) - blog2 = Blog(itemtype=u'personal', content=u'hop') - blog2.put() - rset = self.req.execute('Any X WHERE X is Blog') - self.assertEquals(len(rset), 2) - self.assertEquals(rset.description, [('Blog',), ('Blog',)]) - - - def test_2_attribute_selection_1(self): - rset = self.req.execute('Any X,D,C WHERE X is Blog, X diem D, X content C') - self._check_rset_size(rset, 1, 3) - self.assertEquals(rset[0], [self.blog.eid, today(), u'hop']) - self.assertEquals(rset.description[0], ('Blog', 'Date', 'String')) - self.assertIsInstance(rset[0][1], DateTimeType) - - def test_2_attribute_selection_2(self): - rset = self.req.execute('Any D,C WHERE X is Blog, X diem D, X content C') - self._check_rset_size(rset, 1, 2) - self.assertEquals(rset[0], [today(), u'hop']) - self.assertEquals(rset.description[0], ('Date', 'String')) - - def test_2_attribute_selection_binary(self): - rset = self.req.execute('Any D WHERE X is Blog, X data D') - self._check_rset_size(rset, 1, 1) - self.assertEquals(rset[0], [None]) - self.assertEquals(rset.description[0], ('Bytes',)) - self.blog['data'] = Binary('raw data') - self.blog.put() - rset = self.req.execute('Any D WHERE X is Blog, X data D') - self._check_rset_size(rset, 1, 1) - self.assertIsInstance(rset[0][0], Binary) - value = rset[0][0].getvalue() - self.assertIsInstance(value, str) - self.failIf(isinstance(value, Blob)) - self.assertEquals(value, 'raw data') - self.assertEquals(rset.description[0], ('Bytes',)) - - def test_2_attribute_selection_long_text(self): - self.blog['content'] = text = 'a'*501 - self.blog.put() - rset = self.req.execute('Any C WHERE X is Blog, X content C') - self._check_rset_size(rset, 1, 1) - self.assertIsInstance(rset[0][0], unicode) - self.failIf(isinstance(rset[0][0], Text)) - self.assertEquals(rset[0][0], text) - - def test_2_attribute_selection_transformation(self): - rset = self.req.execute('Any X,UPPER(C) WHERE X is Blog, X content C') - self._check_rset_size(rset, 1, 2) - self.assertEquals(rset[0], [self.blog.eid, u'HOP']) - self.assertEquals(rset.description[0], ('Blog', 'String',)) - - - def test_3_attribute_restriction(self): - rset = self.req.execute('Any X WHERE X itemtype "personal"') - self._check_blog_rset(rset) - rset = self.req.execute('Any X WHERE X itemtype "business"') - self.assertEquals(len(rset), 0) - - def test_3_ambigous_attribute_restriction_1(self): - rset = self.req.execute('Any X WHERE X content "hello"') - self.assertEquals(len(rset), 0) - - def test_3_ambigous_attribute_restriction_2(self): - rset = self.req.execute('Any X WHERE X content "hop"') - self._check_blog_rset(rset) - - def test_3_ambigous_attribute_restriction_3(self): - article = Article(content=u'hop') - article.put() - rset = self.req.execute('Any X WHERE X content "hop"') - self._check_rset_size(rset, 2, 1) - self.assertUnorderedIterableEquals([r[0] for r in rset], [self.blog.eid, article.eid]) - self.assertUnorderedIterableEquals([r[0] for r in rset.description], ['Blog', 'Article']) - - def test_3_incoherant_attribute_restriction(self): - rset = self.req.execute('Any X WHERE X eid %(x)s, X content "hola"', - {'x': self.blog.eid}) - self.assertEquals(len(rset), 0) - - def test_3_multiple_attribute_restriction(self): - rset = self.req.execute('Any X WHERE X content "hop", X itemtype "personal"') - self._check_blog_rset(rset) - - def test_3_incoherant_multiple_attribute_restriction(self): - rset = self.req.execute('Any X WHERE X content "hip", X itemtype "personal"') - self.assertEquals(len(rset), 0) - - def test_3_today_attribute_restriction(self): - rset = self.req.execute('Any X WHERE X diem < TODAY') - self.assertEquals(len(rset), 0) - rset = self.req.execute('Any X WHERE X diem <= TODAY') - self._check_blog_rset(rset) - rset = self.req.execute('Any X WHERE X diem > TODAY') - self.assertEquals(len(rset), 0) - rset = self.req.execute('Any X WHERE X diem >= TODAY') - self._check_blog_rset(rset) - - def test_3_now_attribute_restriction(self): - rset = self.req.execute('Any X WHERE X diem < NOW') - self._check_blog_rset(rset) - rset = self.req.execute('Any X WHERE X diem <= NOW') - self._check_blog_rset(rset) - rset = self.req.execute('Any X WHERE X diem > NOW') - self.assertEquals(len(rset), 0) - rset = self.req.execute('Any X WHERE X diem >= NOW') - self.assertEquals(len(rset), 0) - - def test_3_in_attribute_restriction(self): - self.skip('missing actual gae support, retry latter') - article2 = Article(content=u'hip') - rset = self.req.execute('Any X WHERE X content IN ("hop", "hip")') - self._check_rset_size(rset, 2, 1) - self.assertUnorderedIterableEquals([r[0] for r in rset], [self.blog.eid, article.eid]) - self.assertUnorderedIterableEquals([r[0] for r in rset.description], ['Blog', 'Article']) - - def test_3_like(self): - repo = self.config.repository() - versions = repo.get_versions() - self.assertEquals(versions.keys(), ['cubicweb']) - - def _setup_relation_description(self): - self.article2 = self.add_entity('Article', content=u'hop') - self.blog2 = self.add_entity('Blog', itemtype=u'personal', content=u'hip') - self.execute('SET X talks_about Y WHERE X eid %(x)s, Y eid %(y)s', - {'x': self.blog2.eid, 'y': self.article2.eid}) - self.blog3 = self.add_entity('Blog', itemtype=u'business', content=u'hep') - self.commit() - - def test_4_relation_restriction_1(self): - self._setup_relation_description() - rset = self.req.execute('Any X WHERE X talks_about Y') - self._check_rset_size(rset, 2, 1) - self.assertUnorderedIterableEquals([r[0] for r in rset], - [self.blog.eid, self.blog2.eid]) - self.assertUnorderedIterableEquals([r[0] for r in rset.description], ['Blog', 'Blog']) - - def test_4_relation_restriction_2(self): - self._setup_relation_description() - rset = self.req.execute('Any Y WHERE X talks_about Y') - self._check_rset_size(rset, 2, 1) - self.assertUnorderedIterableEquals([r[0] for r in rset], - [self.article.eid, self.article2.eid]) - self.assertUnorderedIterableEquals([r[0] for r in rset.description], - ['Article', 'Article']) - - def test_4_relation_restriction_3(self): - self._setup_relation_description() - rset = self.req.execute('Any X,Y WHERE X talks_about Y') - self._check_rset_size(rset, 2, 2) - self.assertUnorderedIterableEquals([tuple(r) for r in rset], - [(self.blog.eid, self.article.eid), - (self.blog2.eid, self.article2.eid)]) - self.assertUnorderedIterableEquals([tuple(r) for r in rset.description], - [('Blog', 'Article'), - ('Blog', 'Article')]) - - def test_4_relation_restriction_4(self): - self._setup_relation_description() - rset = self.req.execute('Any X,Y WHERE X talks_about Y, X eid %(x)s', - {'x': self.blog.eid}) - self._check_rset_size(rset, 1, 2) - self.assertEquals(rset[0], [self.blog.eid, self.article.eid]) - self.assertUnorderedIterableEquals(rset.description[0], ['Blog', 'Article']) - - def test_4_relation_restriction_5(self): - self._setup_relation_description() - rset = self.req.execute('Any X,Y WHERE X talks_about Y, Y eid %(x)s', - {'x': self.article.eid}) - self._check_rset_size(rset, 1, 2) - self.assertEquals(rset[0], [self.blog.eid, self.article.eid]) - self.assertUnorderedIterableEquals(rset.description[0], ['Blog', 'Article']) - - def test_4_relation_subject_restriction(self): - self._setup_relation_description() - rset = self.req.execute('Any X,Y WHERE X talks_about Y, X content %(c)s', - {'c': 'hop'}) - self._check_rset_size(rset, 1, 2) - self.assertEquals(rset[0], [self.blog.eid, self.article.eid]) - self.assertUnorderedIterableEquals(rset.description[0], ['Blog', 'Article']) - - def test_4_relation_object_restriction(self): - self._setup_relation_description() - rset = self.req.execute('Any X WHERE X is Blog, X talks_about Y, Y content %(c)s', - {'c': 'very interesting'}) - self._check_rset_size(rset, 1, 1) - self.assertEquals(rset[0], [self.blog.eid]) - self.assertUnorderedIterableEquals(rset.description[0], ['Blog']) - - def test_4_relation_subject_object_restriction(self): - article2 = self.add_entity('Article', content=u'very interesting') - rset = self.req.execute('Any X,XC WHERE X is Blog, X content XC, X content %(xc)s, ' - 'X talks_about Y, Y content %(c)s', - {'xc': 'hop', 'c': 'very interesting'}) - self._check_rset_size(rset, 1, 2) - self.assertEquals(rset[0], [self.blog.eid, self.blog.content]) - self.assertUnorderedIterableEquals(rset.description[0], ['Blog', 'String']) - - def test_4_relation_subject_object_restriction_no_res(self): - article2 = self.add_entity('Article', content=u'very interesting') - rset = self.req.execute('Any X,XC WHERE X is Blog, X content XC, X content %(xc)s, ' - 'X talks_about Y, Y content %(c)s', - {'xc': 'hip', 'c': 'very interesting'}) - self.assertEquals(len(rset), 0) - - def test_4_relation_subject_object_restriction_no_res_2(self): - rset = self.req.execute('Any X,XC WHERE X is Blog, X content XC, X content %(xc)s, ' - 'X talks_about Y, Y content %(c)s', - {'xc': 'hop', 'c': 'not interesting'}) - self.assertEquals(len(rset), 0) - - def test_4_relation_restriction_7(self): - self._setup_relation_description() - rset = self.req.execute('Any XC,XD,YC WHERE X talks_about Y, Y eid %(x)s,' - 'X content XC, X diem XD, Y content YC', - {'x': self.article.eid}) - self._check_rset_size(rset, 1, 3) - self.assertEquals(rset[0], [self.blog.content, self.blog.diem, self.article.content]) - self.assertUnorderedIterableEquals(rset.description[0], ['String', 'Date', 'String']) - - def test_4_relation_restriction_8(self): - self._setup_relation_description() - rset = self.req.execute('Any X,Y WHERE X cites Y, Y eid %(x)s', {'x': self.blog.eid}) - self.assertEquals(len(rset), 0) - - def test_4_relation_restriction_9(self): - article2 = self.add_entity('Article', content=u'hop') - self.req.execute('SET X talks_about Y WHERE X eid %(x)s, Y eid %(y)s', - {'x': self.blog.eid, 'y': article2.eid}) - rset = self.req.execute('Any X,Y WHERE X talks_about Y, X eid %(x)s, Y eid %(y)s', - {'x': self.blog.eid, 'y': article2.eid}) - self._check_rset_size(rset, 1, 2) - - def test_4_ambiguous_subject_relation(self): - ye = self.add_entity('YamsEntity') - self.req.execute('SET X ambiguous_relation Y WHERE X eid %(x)s, Y eid %(y)s', - {'x': ye.eid, 'y': self.blog.eid}) - self.req.execute('SET X ambiguous_relation Y WHERE X eid %(x)s, Y eid %(y)s', - {'x': ye.eid, 'y': self.article.eid}) - self.commit() - #ye = self.vreg.etype_class('YamsEntity ')(req, None) - #ye.to_gae_model()['s_ambiguous_relation'] = [self.blog.key(), self.article.key()] - #ye.put() - rset = self.req.execute('Any X WHERE Y ambiguous_relation X') - self._check_rset_size(rset, 2, 1) - self.assertUnorderedIterableEquals([r[0] for r in rset], [self.blog.eid, self.article.eid]) - self.assertUnorderedIterableEquals([r[0] for r in rset.description], ['Blog', 'Article']) - rset = self.req.execute('Any X WHERE Y ambiguous_relation X, Y eid %(x)s', {'x': ye.eid}) - self._check_rset_size(rset, 2, 1) - self.assertUnorderedIterableEquals([r[0] for r in rset], [self.blog.eid, self.article.eid]) - self.assertUnorderedIterableEquals([r[0] for r in rset.description], ['Blog', 'Article']) - - def test_4_relation_selection(self): - req = self.request() - rset = req.execute('Any N WHERE G content N, U talks_about G, U eid %(u)s', {'u': self.blog.eid}) - self._check_rset_size(rset, 1, 1) - self.assertEquals(rset[0][0], 'very interesting') - - - def test_5_orderby(self): - self._setup_relation_description() - rset = self.req.execute('Any X,XC ORDERBY XC WHERE X is Blog, X content XC') - self._check_rset_size(rset, 3, 2) - self.assertEquals(rset.rows, - [[self.blog3.eid, 'hep'], - [self.blog2.eid, 'hip'], - [self.blog.eid, 'hop']]) - - def test_5_orderby_desc(self): - self._setup_relation_description() - rset = self.req.execute('Any X,XC ORDERBY XC DESC WHERE X is Blog, X content XC') - self._check_rset_size(rset, 3, 2) - self.assertEquals(rset.rows, - [[self.blog.eid, 'hop'], - [self.blog2.eid, 'hip'], - [self.blog3.eid, 'hep']]) - - def test_5_orderby_several_terms(self): - self._setup_relation_description() - rset = self.req.execute('Any X,XC,XI ORDERBY XI,XC WHERE X is Blog, X content XC, X itemtype XI') - self._check_rset_size(rset, 3, 3) - self.assertEquals(rset.rows, - [[self.blog3.eid, 'hep', 'business'], - [self.blog2.eid, 'hip', 'personal'], - [self.blog.eid, 'hop', 'personal']]) - - def test_5_orderby_several_terms_mixed_implicit(self): - self._setup_relation_description() - rset = self.req.execute('Any X,XC,XI ORDERBY XI,XC DESC WHERE X is Blog, X content XC, X itemtype XI') - self._check_rset_size(rset, 3, 3) - self.assertEquals(rset.rows, - [[self.blog3.eid, 'hep', 'business'], - [self.blog.eid, 'hop', 'personal'], - [self.blog2.eid, 'hip', 'personal']]) - - def test_5_orderby_several_terms_explicit_order(self): - self._setup_relation_description() - rset = self.req.execute('Any X,XC,XI ORDERBY XI DESC,XC DESC WHERE X is Blog, X content XC, X itemtype XI') - self._check_rset_size(rset, 3, 3) - self.assertEquals(rset.rows, - [[self.blog.eid, 'hop', 'personal'], - [self.blog2.eid, 'hip', 'personal'], - [self.blog3.eid, 'hep', 'business']]) - - def test_5_orderby_several_terms_mixed_order(self): - self._setup_relation_description() - rset = self.req.execute('Any X,XC,XI ORDERBY XI ASC,XC DESC WHERE X is Blog, X content XC, X itemtype XI') - self._check_rset_size(rset, 3, 3) - self.assertEquals(rset.rows, - [[self.blog3.eid, 'hep', 'business'], - [self.blog.eid, 'hop', 'personal'], - [self.blog2.eid, 'hip', 'personal']]) - - - def test_5_orderby_lower(self): - blog2 = self.add_entity('Blog', itemtype=u'business', content=u'Hup') - rset = self.req.execute('Any X ORDERBY LOWER(XC) ' - 'WHERE X is Blog, X content XC') - self._check_rset_size(rset, 2, 1) - self.assertEquals(rset.rows, [[self.blog.eid], [blog2.eid]]) - rset = self.req.execute('Any X ORDERBY LOWER(XC) DESC' - 'WHERE X is Blog, X content XC') - self._check_rset_size(rset, 2, 1) - self.assertEquals(rset.rows, [[blog2.eid], [self.blog.eid]]) - - def test_5_orderby_stored_proc(self): - blog2 = self.add_entity('Blog', itemtype=u'business', content=u'hop') - rset = self.req.execute('Any X ORDERBY ITEMTYPE_SORT_VALUE(XIT) ' - 'WHERE X is Blog, X itemtype XIT') - self._check_rset_size(rset, 2, 1) - self.assertEquals(rset.rows, [[blog2.eid], [self.blog.eid]]) - rset = self.req.execute('Any X ORDERBY ITEMTYPE_SORT_VALUE(XIT) DESC' - 'WHERE X is Blog, X itemtype XIT') - self._check_rset_size(rset, 2, 1) - self.assertEquals(rset.rows, [[self.blog.eid], [blog2.eid]]) - - - def test_6_limit(self): - self._setup_relation_description() - rset = self.req.execute('Any X LIMIT 2 WHERE X is Blog') - self._check_rset_size(rset, 2, 1) - - def test_6_offset(self): - self._setup_relation_description() - rset = self.req.execute('Any XC ORDERBY XC DESC OFFSET 1 WHERE X is Blog, X content XC') - self._check_rset_size(rset, 2, 1) - self.assertEquals(rset.rows, [['hip'], ['hep']]) - - def test_6_limit_and_orderby(self): - self._setup_relation_description() - rset = self.req.execute('Any XC ORDERBY XC LIMIT 2 WHERE X is Blog, X content XC') - self._check_rset_size(rset, 2, 1) - self.assertEquals(rset.rows, [['hep'], ['hip']]) - - def test_6_limit_offset_and_orderby(self): - self._setup_relation_description() - rset = self.req.execute('Any XC ORDERBY XC LIMIT 2 OFFSET 0 WHERE X is Blog, X content XC') - self._check_rset_size(rset, 2, 1) - self.assertEquals(rset.rows, [['hep'], ['hip']]) - rset = self.req.execute('Any XC ORDERBY XC LIMIT 2 OFFSET 1 WHERE X is Blog, X content XC') - self._check_rset_size(rset, 2, 1) - self.assertEquals(rset.rows, [['hip'], ['hop']]) - rset = self.req.execute('Any XC ORDERBY XC LIMIT 2 OFFSET 2 WHERE X is Blog, X content XC') - self._check_rset_size(rset, 1, 1) - self.assertEquals(rset.rows, [['hop']]) - rset = self.req.execute('Any XC ORDERBY XC LIMIT 2 OFFSET 3 WHERE X is Blog, X content XC') - self.failIf(rset) - - - def test_7_simple_datetimecast(self): - self._setup_relation_description() - _today = today() - _tomorrow = _today + 1 - rset = self.req.execute('Any X WHERE X is Blog, X creation_date >= "%s"' - % _tomorrow.strftime('%Y-%m-%d')) - self.failUnless(len(rset) == 0) - rset = self.req.execute('Any X WHERE X is Blog, X creation_date >= "%s"' - % _today.strftime('%Y-%m-%d')) - self._check_rset_size(rset, 3, 1) - rset = self.req.execute('Any X WHERE X is Blog, X creation_date <= "%s"' - % _tomorrow.strftime('%Y-%m-%d')) - self._check_rset_size(rset, 3, 1) - - def test_7_identity_relation(self): - rset = self.req.execute('Any X WHERE X identity Y, X eid %(x)s, Y eid %(y)s', - {'x': self.user.eid, 'y': self.user.eid}) - self._check_rset_size(rset, 1, 1) - rset = self.req.execute('Any Y WHERE X identity Y, X eid %(x)s', - {'x': self.user.eid}) - self._check_rset_size(rset, 1, 1) - self.assertEquals(rset.rows, [[self.user.eid]]) - blog2 = self.add_entity('Blog', itemtype=u'personal', content=u'hip') - rset = self.req.execute('Any X WHERE X identity Y, X eid %(x)s, Y eid %(y)s', - {'x': self.blog.eid, 'y': blog2.eid}) - self.failIf(rset) - - def test_8_not_relation_1(self): - rset = self.req.execute('Any X WHERE X identity U, NOT U in_group G, ' - 'G name "guests", X eid %(x)s, U eid %(u)s', - {'x': self.user.eid, 'u': self.user.eid}) - self._check_rset_size(rset, 1, 1) - self.assertEquals(rset.rows, [[self.user.eid]]) - - def test_8_not_relation_linked_subject(self): - rset = self.req.execute('Any X WHERE NOT X talks_about Y, Y eid %(y)s', - {'y': self.article.eid}) - self.failIf(rset) - blog2 = self.add_entity('Blog', content=u'hop', itemtype=u'personal') - self.commit() - rset = self.req.execute('Any X WHERE NOT X talks_about Y, Y eid %(y)s', - {'y': self.article.eid}) - self._check_rset_size(rset, 1, 1) - self.assertEquals(rset.rows, [[blog2.eid]]) - - def test_8_not_relation_linked_object(self): - rset = self.req.execute('Any Y WHERE NOT X talks_about Y, X eid %(x)s', - {'x': self.blog.eid}) - self.failIf(rset) - article2 = self.add_entity('Article', content=u'hop') - self.commit() - rset = self.req.execute('Any Y WHERE NOT X talks_about Y, X eid %(x)s', - {'x': self.blog.eid}) - self._check_rset_size(rset, 1, 1) - self.assertEquals(rset.rows, [[article2.eid]]) - - def test_8_not_relation_linked_attr(self): - self.skip('not yet implemented') - # TODO: this should generated - # Query(X)[s_talks_about] > "hop" || Query(X)[s_talks_about] < "hop" - article2 = self.add_entity('Article', content=u'hop') - self.req.execute('SET X talks_about Y WHERE X eid %(x)s, Y eid %(y)s', - {'x': self.blog.eid, 'y': article2.eid}) - self.commit() - rset = self.req.execute('Any X WHERE NOT X talks_about Y, Y content "hop"') - self._check_rset_size(rset, 1, 2) - self.assertEquals(rset.rows, [[self.blog.eid, self.article.eid]]) - - def test_8_not_relation_unlinked_subject(self): - blog2 = self.add_entity('Blog', content=u'hop', itemtype=u'personal') - self.commit() - rset = self.req.execute('Any X WHERE NOT X talks_about Y') - self._check_rset_size(rset, 1, 1) - self.assertEquals(rset.rows, [[blog2.eid]]) - - def test_8_not_relation_unlinked_object(self): - article2 = self.add_entity('Article', content=u'hop') - self.commit() - rset = self.req.execute('Any Y WHERE NOT X talks_about Y') - self._check_rset_size(rset, 1, 1) - self.assertEquals(rset.rows, [[article2.eid]]) - - def test_8_not_relation_final_1(self): - rset = self.req.execute('Any G WHERE G is CWGroup, NOT G name "guests"') - self._check_rset_size(rset, 2, 1) - self.assertUnorderedIterableEquals([g.name for g in rset.entities()], - ['users', 'managers']) - - def test_8_not_relation_final_2(self): - rset = self.req.execute('Any GN WHERE G is CWGroup, NOT G name "guests", G name GN') - self._check_rset_size(rset, 2, 1) - self.assertUnorderedIterableEquals([gn for gn, in rset.rows], - ['users', 'managers']) - - - def test_9_exists(self): - blog2 = self.add_entity('Article', content=u'hop') - article2 = self.add_entity('Article', content=u'hop') - self.req.execute('SET X talks_about Y WHERE X eid %(x)s, Y eid %(y)s', - {'x': self.blog.eid, 'y': article2.eid}) - self.commit() - rset = self.req.execute('Any X WHERE X is Blog, EXISTS(X talks_about Y)') - self._check_rset_size(rset, 1, 1) - self.assertEquals(rset.rows, [[self.blog.eid]]) - - - def test_error_unknown_eid(self): - rset = self.req.execute('Any X WHERE X eid %(x)s', {'x': '1234'}) - self.assertEquals(len(rset), 0) - self.blog.delete() - rset = self.req.execute('Any X WHERE X eid %(x)s', {'x': self.blog.eid}) - self.assertEquals(len(rset), 0) - - def test_nonregr_inlined_relation(self): - eid = self.execute('INSERT YamsEntity X: X inlined_relation Y WHERE Y eid %(y)s', - {'y': self.blog.eid})[0][0] - self.commit() - rset = self.execute('Any X WHERE Y inlined_relation X, Y eid %(y)s', {'y': eid}) - self._check_rset_size(rset, 1, 1) - self.assertEquals(rset[0][0], self.blog.eid) - -if __name__ == '__main__': - unittest_main() diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/test/unittest_schema.py --- a/goa/test/unittest_schema.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,129 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -from cubicweb.goa.testlib import * - -class Article(db.Model): - content = db.TextProperty() - synopsis = db.StringProperty(default='hello') - -class Blog(db.Model): - diem = db.DateProperty(required=True, auto_now_add=True) - title = db.StringProperty(required=True) - content = db.TextProperty() - talks_about = db.ReferenceProperty(Article) - cites = db.SelfReferenceProperty() - - -class SomeViewsTC(GAEBasedTC): - MODEL_CLASSES = (Article, Blog) - - def test_entities_and_relation(self): - schema = self.schema - self.assertSetEquals(set(str(e) for e in schema.entities()), - set(('Boolean', 'Bytes', 'Date', 'Datetime', 'Float', - 'Decimal', - 'Int', 'Interval', 'Password', 'String', 'Time', - 'CWEType', 'CWGroup', 'CWPermission', 'CWProperty', 'CWRType', - 'CWUser', 'EmailAddress', - 'RQLExpression', 'State', 'Transition', 'TrInfo', - 'Article', 'Blog', 'YamsEntity'))) - self.assertSetEquals(set(str(e) for e in schema.relations()), - set(('add_permission', 'address', 'alias', 'allowed_transition', - 'ambiguous_relation', 'canonical', 'cites', - 'comment', 'comment_format', 'condition', 'content', - 'created_by', 'creation_date', 'delete_permission', - 'description', 'description_format', 'destination_state', - 'diem', 'eid', 'expression', 'exprtype', 'final', 'firstname', - 'for_user', 'from_state', 'fulltext_container', 'has_text', - 'identical_to', 'identity', 'in_group', 'initial_state', - 'inlined', 'inlined_relation', 'is', 'is_instance_of', - 'label', 'last_login_time', 'login', - 'mainvars', 'meta', 'modification_date', 'name', 'owned_by', 'pkey', 'primary_email', - 'read_permission', 'require_group', 'state_of', 'surname', 'symmetric', - 'synopsis', 'talks_about', 'title', 'to_state', 'transition_of', - 'update_permission', 'use_email', 'value'))) - - def test_dbmodel_imported(self): - eschema = self.schema['Blog'] - orels = [str(e) for e in eschema.ordered_relations()] - # only relations defined in the class are actually ordered - orels, others = orels[:5], orels[5:] - self.assertEquals(orels, - ['diem', 'title', 'content', 'talks_about', 'cites']) - self.assertUnorderedIterableEquals(others, - ['eid', 'identity', 'owned_by', 'modification_date', - 'created_by', 'creation_date', 'is', 'is_instance_of']) - self.assertUnorderedIterableEquals((str(e) for e in eschema.object_relations()), - ('ambiguous_relation', 'cites', 'identity', 'inlined_relation')) - eschema = self.schema['Article'] - orels = [str(e) for e in eschema.ordered_relations()] - # only relations defined in the class are actually ordered - orels, others = orels[:2], orels[2:] - self.assertEquals(orels, - ['content', 'synopsis']) - self.assertUnorderedIterableEquals(others, - ['eid', 'identity', 'owned_by', 'modification_date', - 'created_by', 'creation_date', 'is', 'is_instance_of']) - self.assertUnorderedIterableEquals((str(e) for e in eschema.object_relations()), - ('ambiguous_relation', 'talks_about', 'identity')) - - def test_yams_imported(self): - eschema = self.schema['CWProperty'] - # only relations defined in the class are actually ordered - orels = [str(e) for e in eschema.ordered_relations()] - orels, others = orels[:3], orels[3:] - self.assertEquals(orels, - ['pkey', 'value', 'for_user']) - self.assertEquals(others, - ['created_by', 'creation_date', 'eid', 'identity', - 'is', 'is_instance_of', 'modification_date', 'owned_by']) - self.assertUnorderedIterableEquals((str(e) for e in eschema.object_relations()), - ('identity',)) - - def test_yams_ambiguous_relation(self): - rschema = self.schema['ambiguous_relation'] - # only relations defined in the class are actually ordered - self.assertUnorderedIterableEquals((str(e) for e in rschema.subjects()), - ('YamsEntity',)) - self.assertUnorderedIterableEquals((str(e) for e in rschema.objects()), - ('Blog', 'Article')) - - def test_euser(self): - eschema = self.schema['CWUser'] - # XXX pretend to have some relations it has not - self.assertEquals([str(e) for e in eschema.ordered_relations()], - ['login', 'firstname', 'surname', 'last_login_time', - 'primary_email', 'use_email', 'in_group', 'created_by', - 'creation_date', 'eid', 'has_text', 'identity', - 'is', 'is_instance_of', 'modification_date', - 'owned_by']) - self.assertUnorderedIterableEquals((str(e) for e in eschema.object_relations()), - ('owned_by', 'created_by', 'identity', 'for_user')) - - def test_eid(self): - rschema = self.schema['eid'] - self.assertEquals(rschema.objects(), ('Bytes',)) - self.assertEquals(rschema.rproperty('Blog', 'Bytes', 'cardinality'), '?1') - - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/test/unittest_views.py --- a/goa/test/unittest_views.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,72 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -from cubicweb.goa.testlib import * - -from cubicweb.interfaces import ICalendarable - - -class Blog(db.Model): - diem = db.DateProperty(required=True, auto_now_add=True) - title = db.StringProperty(required=True) - content = db.TextProperty() - - __implements__ = (ICalendarable,) - - @property - def start(self): - return self.diem - - @property - def stop(self): - return self.diem - - def matching_dates(self, begin, end): - """calendar views interface""" - mydate = self.diem - if mydate: - return [mydate] - return [] - - -class SomeViewsTC(GAEBasedTC): - MODEL_CLASSES = (Blog, ) - from cubicweb.web.views import basecontrollers, baseviews, navigation, boxes, calendar - from data import views - LOAD_APP_MODULES = (basecontrollers, baseviews, navigation, boxes, calendar, views) - - def setUp(self): - GAEBasedTC.setUp(self) - self.req = self.request() - self.blog = Blog(title=u'a blog', content=u'hop') - self.blog.put(self.req) - - def test_hcal(self): - self.vreg['views'].render('hcal', self.req, rset=self.blog.rset) - - def test_django_index(self): - self.vreg['views'].render('index', self.req, rset=None) - -for vid in ('primary', 'oneline', 'incontext', 'outofcontext', 'text'): - setattr(SomeViewsTC, 'test_%s'%vid, lambda self, vid=vid: self.blog.view(vid)) - -if __name__ == '__main__': - from logilab.common.testlib import unittest_main - unittest_main() diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/testlib.py --- a/goa/testlib.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,198 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" -""" -__docformat__ = "restructuredtext en" - -from logilab.common.testlib import TestCase, TestSkipped -try: - import google.appengine -except ImportError: - raise TestSkipped('Can not import google.appengine. Skip this module') - -import os, os.path as osp -import time -from shutil import copy - -# additional monkey patches necessary in regular cubicweb environment -from cubicweb.server import rqlannotation -from cubicweb.goa.overrides import rqlannotation as goarqlannotation -rqlannotation.SQLGenAnnotator = goarqlannotation.SQLGenAnnotator -rqlannotation.set_qdata = goarqlannotation.set_qdata - -from google.appengine.api import apiproxy_stub_map -from google.appengine.api import datastore_file_stub -from google.appengine.ext import db as gdb - -from cubicweb.devtools.fake import FakeRequest - -from cubicweb.goa import db, do_monkey_patch -from cubicweb.goa.goavreg import GAEVRegistry -from cubicweb.goa.goaconfig import GAEConfiguration -from cubicweb.goa.dbinit import (create_user, create_groups, fix_entities, - init_persistent_schema, insert_versions) - -import logging -logger = logging.getLogger() -logger.setLevel(logging.CRITICAL) - -do_monkey_patch() - -class GAEBasedTC(TestCase): - APP_ID = u'test_app' - AUTH_DOMAIN = 'gmail.com' - LOGGED_IN_USER = u't...@example.com' # set to '' for no logged in user - MODEL_CLASSES = None - LOAD_APP_MODULES = None - config = None - _DS_TEMPL_FILE = 'tmpdb-template' - - def load_schema_hook(self, loader): - loader.import_yams_cube_schema('data') - - @property - def DS_FILE(self): - return self.DS_TEMPL_FILE.replace('-template', '') - - @property - def DS_TEMPL_FILE(self): - return self._DS_TEMPL_FILE + '_'.join(sorted(cls.__name__ for cls in self.MODEL_CLASSES)) - - def _set_ds_file(self, dsfile): - # Start with a fresh api proxy. - apiproxy_stub_map.apiproxy = apiproxy_stub_map.APIProxyStubMap() - # Use a fresh stub datastore. - stub = datastore_file_stub.DatastoreFileStub(self.APP_ID, dsfile, - dsfile+'.history') - apiproxy_stub_map.apiproxy.RegisterStub('datastore_v3', stub) - - def setUp(self): - # Ensure we're in UTC. - os.environ['TZ'] = 'UTC' - time.tzset() - if osp.exists(self.DS_TEMPL_FILE): - copy(self.DS_TEMPL_FILE, self.DS_FILE) - need_ds_init = False - self._set_ds_file(self.DS_FILE) - else: - need_ds_init = True - self._set_ds_file(self.DS_TEMPL_FILE) -# from google.appengine.api import mail_stub -# from google3.apphosting.api import urlfetch_stub -# from google3.apphosting.api import user_service_stub -# # Use a fresh stub UserService. -# apiproxy_stub_map.apiproxy.RegisterStub( -# 'user', user_service_stub.UserServiceStub()) - os.environ['AUTH_DOMAIN'] = self.AUTH_DOMAIN - os.environ['USER_EMAIL'] = self.LOGGED_IN_USER -# # Use a fresh urlfetch stub. -# apiproxy_stub_map.apiproxy.RegisterStub( -# 'urlfetch', urlfetch_stub.URLFetchServiceStub()) -# # Use a fresh mail stub. -# apiproxy_stub_map.apiproxy.RegisterStub( -# 'mail', mail_stub.MailServiceStub()) - if self.MODEL_CLASSES is None: - raise Exception('GAEBasedTC should set MODEL_CLASSES class attribute') - gdb._kind_map = {} - self.config = self.config or GAEConfiguration('toto') - self.config.init_log(logging.CRITICAL) - self.schema = self.config.load_schema(self.MODEL_CLASSES, - self.load_schema_hook) - self.vreg = GAEVregistry(self.config) - self.vreg.schema = self.schema - self.vreg.load_module(db) - from cubicweb.goa.appobjects import sessions - self.vreg.load_module(sessions) - from cubicweb.entities import authobjs, schemaobjs - self.vreg.load_module(authobjs) - self.vreg.load_module(schemaobjs) - if self.config['use-google-auth']: - from cubicweb.goa.appobjects import gauthservice - self.vreg.load_module(gauthservice) - if self.LOAD_APP_MODULES is not None: - for module in self.LOAD_APP_MODULES: - self.vreg.load_module(module) - for cls in self.MODEL_CLASSES: - self.vreg.register(cls) - self.session_manager = self.vreg.select('components', 'sessionmanager') - if need_ds_init: - # create default groups and create entities according to the schema - create_groups() - if not self.config['use-google-auth']: - create_user(self.LOGGED_IN_USER, 'toto', ('users', 'managers')) - self.session = self.login(self.LOGGED_IN_USER, 'toto') - else: - req = FakeRequest(vreg=self.vreg) - self.session = self.session_manager.open_session(req) - self.user = self.session.user() - ssession = self.config.repo_session(self.session.sessionid) - ssession.set_pool() - init_persistent_schema(ssession, self.schema) - insert_versions(ssession, self.config) - ssession.commit() - fix_entities(self.schema) - copy(self.DS_TEMPL_FILE, self.DS_FILE) - self._set_ds_file(self.DS_FILE) - else: - if not self.config['use-google-auth']: - self.session = self.login(self.LOGGED_IN_USER, 'toto') - else: - req = FakeRequest(vreg=self.vreg) - self.session = self.session_manager.open_session(req) - self.user = self.session.user() - - def tearDown(self): - self.session.close() - - def request(self): - req = FakeRequest(vreg=self.vreg) - req.set_connection(self.session, self.user) - return req - - def add_entity(self, etype, **kwargs): - cu = self.session.cursor() - rql = 'INSERT %s X' % etype - if kwargs: - rql += ': %s' % ', '.join('X %s %%(%s)s' % (key, key) for key in kwargs) - rset = cu.execute(rql, kwargs) - return rset.get_entity(0, 0) - - def execute(self, *args): - return self.session.cursor().execute(*args) - - def commit(self): - self.session.commit() - - def rollback(self): - self.session.rollback() - - def create_user(self, login, groups=('users',), req=None): - assert not self.config['use-google-auth'] - user = self.add_entity('CWUser', upassword=str(login), login=unicode(login)) - cu = self.session.cursor() - cu.execute('SET X in_group G WHERE X eid %%(x)s, G name IN(%s)' - % ','.join(repr(g) for g in groups), - {'x': user.eid}, 'x') - return user - - def login(self, login, password=None): - assert not self.config['use-google-auth'] - req = FakeRequest(vreg=self.vreg) - req.form['__login'] = login - req.form['__password'] = password or login - return self.session_manager.open_session(req) diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/tools/__init__.py --- a/goa/tools/__init__.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,20 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""lax tools cube - -""" diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/tools/generate_schema_img.py --- a/goa/tools/generate_schema_img.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,43 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -""" - -""" -import sys -from os.path import dirname, abspath, join -from yams import schema2dot -from cubicweb.web.views.schema import SKIP_TYPES - -APPLROOT = abspath(join(dirname(abspath(__file__)), '..')) - -try: - import custom -except ImportError: - sys.path.insert(0, APPLROOT) - import custom - - -schema = custom.SCHEMA -skip_rels = ('owned_by', 'created_by', 'identity', 'is', 'is_instance_of') -path = join(APPLROOT, 'data', 'schema.png') -schema2dot.schema2dot(schema, path, #size=size, - skiptypes=SKIP_TYPES) -print 'generated', path -path = join(APPLROOT, 'data', 'metaschema.png') -schema2dot.schema2dot(schema, path) -print 'generated', path diff -r f4d1d5d9ccbb -r 90f2f20367bc goa/tools/laxctl.py --- a/goa/tools/laxctl.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,269 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""provides all lax instances management commands into a single utility script - -""" -__docformat__ = "restructuredtext en" - -import sys -import os -import os.path as osp -import time -import re -import urllib2 -from urllib import urlencode -from Cookie import SimpleCookie - -from logilab.common.clcommands import Command, register_commands, main_run - -from cubicweb.uilib import remove_html_tags -from cubicweb.web.views.schema import SKIP_TYPES - -APPLROOT = osp.abspath(osp.join(osp.dirname(osp.abspath(__file__)), '..')) - - -def initialize_vregistry(applroot): - # apply monkey patches first - from cubicweb.goa import do_monkey_patch - do_monkey_patch() - from cubicweb.goa.goavreg import GAEVregistry - from cubicweb.goa.goaconfig import GAEConfiguration - #WebConfiguration.ext_resources['JAVASCRIPTS'].append('DATADIR/goa.js') - config = GAEConfiguration('toto', applroot) - vreg = GAEVregistry(config) - vreg.set_schema(config.load_schema()) - return vreg - -def alistdir(directory): - return [osp.join(directory, f) for f in os.listdir(directory)] - - -class LaxCommand(Command): - """base command class for all lax commands - creates vreg, schema and calls - """ - min_args = max_args = 0 - - def run(self, args): - self.vreg = initialize_vregistry(APPLROOT) - self._run(args) - - -class GenerateSchemaCommand(LaxCommand): - """generates the schema's png file""" - name = 'genschema' - - def _run(self, args): - assert not args, 'no argument expected' - from yams import schema2dot - schema = self.vreg.schema - path = osp.join(APPLROOT, 'data', 'schema.png') - schema2dot.schema2dot(schema, path, #size=size, - skiptypes=SKIP_TYPES) - print 'generated', path - path = osp.join(APPLROOT, 'data', 'metaschema.png') - schema2dot.schema2dot(schema, path) - print 'generated', path - - -class PopulateDataDirCommand(LaxCommand): - """populate instance's data directory according to used cubes""" - name = 'populatedata' - - def _run(self, args): - assert not args, 'no argument expected' - # first clean everything which is a symlink from the data directory - datadir = osp.join(APPLROOT, 'data') - if not osp.exists(datadir): - print 'created data directory' - os.mkdir(datadir) - for filepath in alistdir(datadir): - if osp.islink(filepath): - print 'removing', filepath - os.remove(filepath) - cubes = list(self.vreg.config.cubes()) + ['shared'] - for templ in cubes: - templpath = self.vreg.config.cube_dir(templ) - templdatadir = osp.join(templpath, 'data') - if not osp.exists(templdatadir): - print 'no data provided by', templ - continue - for resource in os.listdir(templdatadir): - if resource == 'external_resources': - continue - if not osp.exists(osp.join(datadir, resource)): - print 'symlinked %s from %s' % (resource, templ) - os.symlink(osp.join(templdatadir, resource), - osp.join(datadir, resource)) - - -class NoRedirectHandler(urllib2.HTTPRedirectHandler): - def http_error_302(self, req, fp, code, msg, headers): - raise urllib2.HTTPError(req.get_full_url(), code, msg, headers, fp) - http_error_301 = http_error_303 = http_error_307 = http_error_302 - - -class GetSessionIdHandler(urllib2.HTTPRedirectHandler): - def __init__(self, config): - self.config = config - - def http_error_303(self, req, fp, code, msg, headers): - cookie = SimpleCookie(headers['Set-Cookie']) - sessionid = cookie['__session'].value - print 'session id', sessionid - setattr(self.config, 'cookie', '__session=' + sessionid) - return 1 # on exception should be raised - - -class URLCommand(LaxCommand): - """abstract class for commands doing stuff by accessing the web instance - """ - min_args = max_args = 1 - arguments = '' - - options = ( - ('cookie', - {'short': 'C', 'type' : 'string', 'metavar': 'key=value', - 'default': None, - 'help': 'session/authentication cookie.'}), - ('user', - {'short': 'u', 'type' : 'string', 'metavar': 'login', - 'default': None, - 'help': 'user login instead of giving raw cookie string (require lax ' - 'based authentication).'}), - ('password', - {'short': 'p', 'type' : 'string', 'metavar': 'password', - 'default': None, - 'help': 'user password instead of giving raw cookie string (require ' - 'lax based authentication).'}), - ) - - def _run(self, args): - baseurl = args[0] - if not baseurl.startswith('http'): - baseurl = 'http://' + baseurl - if not baseurl.endswith('/'): - baseurl += '/' - self.base_url = baseurl - if not self.config.cookie and self.config.user: - # no cookie specified but a user is. Try to open a session using - # given authentication info - print 'opening session for', self.config.user - opener = urllib2.build_opener(GetSessionIdHandler(self.config)) - urllib2.install_opener(opener) - data = urlencode(dict(__login=self.config.user, - __password=self.config.password)) - self.open_url(urllib2.Request(baseurl, data)) - opener = urllib2.build_opener(NoRedirectHandler()) - urllib2.install_opener(opener) - self.do_base_url(baseurl) - - def build_req(self, url): - req = urllib2.Request(url) - if self.config.cookie: - req.headers['Cookie'] = self.config.cookie - return req - - def open_url(self, req): - try: - return urllib2.urlopen(req) - except urllib2.HTTPError, ex: - if ex.code == 302: - self.error_302(req, ex) - elif ex.code == 500: - self.error_500(req, ex) - else: - raise - - def error_302(self, req, ex): - print 'authentication required' - print ('visit %s?vid=authinfo with your browser to get ' - 'authentication info' % self.base_url) - sys.exit(1) - - def error_500(self, req, ex): - print 'an unexpected error occured on the server' - print ('you may get more information by visiting ' - '%s' % req.get_full_url()) - sys.exit(1) - - def extract_message(self, data): - match = re.search(r'
        (.*?)
        ', data.read(), re.M|re.S) - if match: - msg = remove_html_tags(match.group(1)) - print msg - return msg - - def do_base_url(self, baseurl): - raise NotImplementedError() - - -class DSInitCommand(URLCommand): - """initialize the datastore""" - name = 'db-init' - - options = URLCommand.options + ( - ('sleep', - {'short': 's', 'type' : 'int', 'metavar': 'nb seconds', - 'default': None, - 'help': 'number of seconds to wait between each request to avoid ' - 'going out of quota.'}), - ) - - def do_base_url(self, baseurl): - req = self.build_req(baseurl + '?vid=contentinit') - while True: - try: - data = self.open_url(req) - except urllib2.HTTPError, ex: - if ex.code == 303: # redirect - print 'process completed' - break - raise - msg = self.extract_message(data) - if msg and msg.startswith('error: '): - print ('you may to cleanup datastore by visiting ' - '%s?vid=contentclear (ALL ENTITIES WILL BE DELETED)' - % baseurl) - break - if self.config.sleep: - time.sleep(self.config.sleep) - - -class CleanSessionsCommand(URLCommand): - """cleanup sessions on the server. This command should usually be called - regularly by a cron job or equivalent. - """ - name = "cleansessions" - def do_base_url(self, baseurl): - req = self.build_req(baseurl + '?vid=cleansessions') - data = self.open_url(req) - self.extract_message(data) - - -register_commands([GenerateSchemaCommand, - PopulateDataDirCommand, - DSInitCommand, - CleanSessionsCommand, - ]) - -def run(): - main_run(sys.argv[1:]) - -if __name__ == '__main__': - run() diff -r f4d1d5d9ccbb -r 90f2f20367bc hooks/bookmark.py --- a/hooks/bookmark.py Tue Jul 27 12:36:03 2010 +0200 +++ b/hooks/bookmark.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""bookmark related hooks +"""bookmark related hooks""" -""" __docformat__ = "restructuredtext en" from cubicweb.server import hook @@ -28,7 +27,7 @@ def precommit_event(self): if not self.session.deleted_in_transaction(self.bookmark.eid): if not self.bookmark.bookmarked_by: - self.bookmark.delete() + self.bookmark.cw_delete() class DelBookmarkedByHook(hook.Hook): diff -r f4d1d5d9ccbb -r 90f2f20367bc hooks/integrity.py --- a/hooks/integrity.py Tue Jul 27 12:36:03 2010 +0200 +++ b/hooks/integrity.py Wed Nov 03 16:38:28 2010 +0100 @@ -26,19 +26,17 @@ from yams.schema import role_name from cubicweb import ValidationError -from cubicweb.schema import RQLConstraint, RQLUniqueConstraint -from cubicweb.selectors import implements +from cubicweb.schema import (META_RTYPES, WORKFLOW_RTYPES, + RQLConstraint, RQLUniqueConstraint) +from cubicweb.selectors import is_instance from cubicweb.uilib import soup2xhtml from cubicweb.server import hook from cubicweb.server.hook import set_operation # special relations that don't have to be checked for integrity, usually # because they are handled internally by hooks (so we trust ourselves) -DONT_CHECK_RTYPES_ON_ADD = set(('owned_by', 'created_by', - 'is', 'is_instance_of', - 'wf_info_for', 'from_state', 'to_state')) -DONT_CHECK_RTYPES_ON_DEL = set(('is', 'is_instance_of', - 'wf_info_for', 'from_state', 'to_state')) +DONT_CHECK_RTYPES_ON_ADD = META_RTYPES | WORKFLOW_RTYPES +DONT_CHECK_RTYPES_ON_DEL = META_RTYPES | WORKFLOW_RTYPES _UNIQUE_CONSTRAINTS_LOCK = Lock() _UNIQUE_CONSTRAINTS_HOLDER = None @@ -253,7 +251,7 @@ """delete the composed of a composite relation when this relation is deleted """ __regid__ = 'checkownersgroup' - __select__ = IntegrityHook.__select__ & implements('CWGroup') + __select__ = IntegrityHook.__select__ & is_instance('CWGroup') events = ('before_delete_entity', 'before_update_entity') def __call__(self): @@ -293,7 +291,7 @@ class StripCWUserLoginHook(IntegrityHook): """ensure user logins are stripped""" __regid__ = 'stripuserlogin' - __select__ = IntegrityHook.__select__ & implements('CWUser') + __select__ = IntegrityHook.__select__ & is_instance('CWUser') events = ('before_add_entity', 'before_update_entity',) def __call__(self): diff -r f4d1d5d9ccbb -r 90f2f20367bc hooks/metadata.py --- a/hooks/metadata.py Tue Jul 27 12:36:03 2010 +0200 +++ b/hooks/metadata.py Wed Nov 03 16:38:28 2010 +0100 @@ -21,7 +21,7 @@ from datetime import datetime -from cubicweb.selectors import implements +from cubicweb.selectors import is_instance from cubicweb.server import hook from cubicweb.server.utils import eschema_eid @@ -140,7 +140,7 @@ class FixUserOwnershipHook(MetaDataHook): """when a user has been created, add owned_by relation on itself""" __regid__ = 'fixuserowner' - __select__ = MetaDataHook.__select__ & implements('CWUser') + __select__ = MetaDataHook.__select__ & is_instance('CWUser') events = ('after_add_entity',) def __call__(self): diff -r f4d1d5d9ccbb -r 90f2f20367bc hooks/notification.py --- a/hooks/notification.py Tue Jul 27 12:36:03 2010 +0200 +++ b/hooks/notification.py Wed Nov 03 16:38:28 2010 +0100 @@ -22,7 +22,7 @@ from logilab.common.textutils import normalize_text -from cubicweb.selectors import implements +from cubicweb.selectors import is_instance from cubicweb.server import hook from cubicweb.sobjects.supervising import SupervisionMailOp @@ -49,7 +49,7 @@ class StatusChangeHook(NotificationHook): """notify when a workflowable entity has its state modified""" __regid__ = 'notifystatuschange' - __select__ = NotificationHook.__select__ & implements('TrInfo') + __select__ = NotificationHook.__select__ & is_instance('TrInfo') events = ('after_add_entity',) def __call__(self): diff -r f4d1d5d9ccbb -r 90f2f20367bc hooks/security.py --- a/hooks/security.py Tue Jul 27 12:36:03 2010 +0200 +++ b/hooks/security.py Wed Nov 03 16:38:28 2010 +0100 @@ -29,9 +29,9 @@ def check_entity_attributes(session, entity, editedattrs=None, creation=False): eid = entity.eid eschema = entity.e_schema - # .skip_security_attributes is there to bypass security for attributes + # ._cw_skip_security_attributes is there to bypass security for attributes # set by hooks by modifying the entity's dictionnary - dontcheck = entity.skip_security_attributes + dontcheck = entity._cw_skip_security_attributes if editedattrs is None: try: editedattrs = entity.edited_attributes @@ -59,7 +59,7 @@ for values in session.transaction_data.pop('check_entity_perm_op'): entity = session.entity_from_eid(values[0]) action = values[1] - entity.check_perm(action) + entity.cw_check_perm(action) check_entity_attributes(session, entity, values[2:], creation=self.creation) @@ -110,10 +110,10 @@ def __call__(self): try: # check user has permission right now, if not retry at commit time - self.entity.check_perm('update') + self.entity.cw_check_perm('update') check_entity_attributes(self._cw, self.entity) except Unauthorized: - self.entity.clear_local_perm_cache('update') + self.entity._cw_clear_local_perm_cache('update') # save back editedattrs in case the entity is reedited later in the # same transaction, which will lead to edited_attributes being # overwritten @@ -127,7 +127,7 @@ events = ('before_delete_entity',) def __call__(self): - self.entity.check_perm('delete') + self.entity.cw_check_perm('delete') class BeforeAddRelationSecurityHook(SecurityHook): diff -r f4d1d5d9ccbb -r 90f2f20367bc hooks/syncschema.py --- a/hooks/syncschema.py Tue Jul 27 12:36:03 2010 +0200 +++ b/hooks/syncschema.py Wed Nov 03 16:38:28 2010 +0100 @@ -33,8 +33,9 @@ from logilab.common.testlib import mock_object from cubicweb import ValidationError -from cubicweb.selectors import implements -from cubicweb.schema import META_RTYPES, VIRTUAL_RTYPES, CONSTRAINTS, display_name +from cubicweb.selectors import is_instance +from cubicweb.schema import (SCHEMA_TYPES, META_RTYPES, VIRTUAL_RTYPES, + CONSTRAINTS, ETYPE_NAME_MAP, display_name) from cubicweb.server import hook, schemaserial as ss from cubicweb.server.sqlutils import SQL_PREFIX @@ -51,16 +52,9 @@ } # core entity and relation types which can't be removed -CORE_ETYPES = list(BASE_TYPES) + ['CWEType', 'CWRType', 'CWUser', 'CWGroup', - 'CWConstraint', 'CWAttribute', 'CWRelation'] -CORE_RTYPES = ['eid', 'creation_date', 'modification_date', 'cwuri', - 'login', 'upassword', 'name', - 'is', 'instanceof', 'owned_by', 'created_by', 'in_group', - 'relation_type', 'from_entity', 'to_entity', - 'constrainted_by', - 'read_permission', 'add_permission', - 'delete_permission', 'updated_permission', - ] +CORE_TYPES = BASE_TYPES | SCHEMA_TYPES | META_RTYPES | set( + ('CWUser', 'CWGroup','login', 'upassword', 'name', 'in_group')) + def get_constraints(session, entity): constraints = [] @@ -80,6 +74,11 @@ def add_inline_relation_column(session, etype, rtype): """add necessary column and index for an inlined relation""" + attrkey = '%s.%s' % (etype, rtype) + createdattrs = session.transaction_data.setdefault('createdattrs', set()) + if attrkey in createdattrs: + return + createdattrs.add(attrkey) table = SQL_PREFIX + etype column = SQL_PREFIX + rtype try: @@ -96,8 +95,27 @@ # is done by the dbhelper) session.pool.source('system').create_index(session, table, column) session.info('added index on %s(%s)', table, column) - session.transaction_data.setdefault('createdattrs', []).append( - '%s.%s' % (etype, rtype)) + + +def insert_rdef_on_subclasses(session, eschema, rschema, rdefdef, props): + # XXX 'infered': True/False, not clear actually + props.update({'constraints': rdefdef.constraints, + 'description': rdefdef.description, + 'cardinality': rdefdef.cardinality, + 'permissions': rdefdef.get_permissions(), + 'order': rdefdef.order, + 'infered': False, 'eid': None + }) + cstrtypemap = ss.cstrtype_mapping(session) + groupmap = group_mapping(session) + object = rschema.schema.eschema(rdefdef.object) + for specialization in eschema.specialized_by(False): + if (specialization, rdefdef.object) in rschema.rdefs: + continue + sperdef = RelationDefinitionSchema(specialization, rschema, + object, props) + ss.execschemarql(session.execute, sperdef, + ss.rdef2rql(sperdef, cstrtypemap, groupmap)) def check_valid_changes(session, entity, ro_attrs=('name', 'final')): @@ -115,6 +133,14 @@ raise ValidationError(entity.eid, errors) +class SyncSchemaHook(hook.Hook): + """abstract class for schema synchronization hooks (in the `syncschema` + category) + """ + __abstract__ = True + category = 'syncschema' + + # operations for low-level database alteration ################################ class DropTable(hook.Operation): @@ -129,6 +155,8 @@ self.session.system_sql('DROP TABLE %s' % self.table) self.info('dropped table %s', self.table) + # XXX revertprecommit_event + class DropRelationTable(DropTable): def __init__(self, session, rtype): @@ -156,6 +184,8 @@ self.error('dropping column not supported by the backend, handle ' 'it yourself (%s.%s)', table, column) + # XXX revertprecommit_event + # base operations for in-memory schema synchronization ######################## @@ -175,7 +205,7 @@ if not eschema.final: clear_cache(eschema, 'ordered_relations') - def commit_event(self): + def postcommit_event(self): rebuildinfered = self.session.data.get('rebuild-infered', True) repo = self.session.repo # commit event should not raise error, while set_schema has chances to @@ -195,60 +225,88 @@ class MemSchemaOperation(hook.Operation): """base class for schema operations""" - def __init__(self, session, kobj=None, **kwargs): - self.kobj = kobj - # once Operation.__init__ has been called, event may be triggered, so - # do this last ! + def __init__(self, session, **kwargs): hook.Operation.__init__(self, session, **kwargs) # every schema operation is triggering a schema update MemSchemaNotifyChanges(session) - def prepare_constraints(self, rdef): - # if constraints is already a list, reuse it (we're updating multiple - # constraints of the same rdef in the same transactions) - if not isinstance(rdef.constraints, list): - rdef.constraints = list(rdef.constraints) - self.constraints = rdef.constraints - - -class MemSchemaEarlyOperation(MemSchemaOperation): - def insert_index(self): - """schema operation which are inserted at the begining of the queue - (typically to add/remove entity or relation types) - """ - i = -1 - for i, op in enumerate(self.session.pending_operations): - if not isinstance(op, MemSchemaEarlyOperation): - return i - return i + 1 - # operations for high-level source database alteration ######################## -class SourceDbCWETypeRename(hook.Operation): +class CWETypeAddOp(MemSchemaOperation): + """after adding a CWEType entity: + * add it to the instance's schema + * create the necessary table + * set creation_date and modification_date by creating the necessary + CWAttribute entities + * add owned_by relation by creating the necessary CWRelation entity + """ + + def precommit_event(self): + session = self.session + entity = self.entity + schema = session.vreg.schema + etype = ybo.EntityType(eid=entity.eid, name=entity.name, + description=entity.description) + eschema = schema.add_entity_type(etype) + # create the necessary table + tablesql = y2sql.eschema2sql(session.pool.source('system').dbhelper, + eschema, prefix=SQL_PREFIX) + for sql in tablesql.split(';'): + if sql.strip(): + session.system_sql(sql) + # add meta relations + gmap = group_mapping(session) + cmap = ss.cstrtype_mapping(session) + for rtype in (META_RTYPES - VIRTUAL_RTYPES): + rschema = schema[rtype] + sampletype = rschema.subjects()[0] + desttype = rschema.objects()[0] + rdef = copy(rschema.rdef(sampletype, desttype)) + rdef.subject = mock_object(eid=entity.eid) + mock = mock_object(eid=None) + ss.execschemarql(session.execute, mock, ss.rdef2rql(rdef, cmap, gmap)) + + def revertprecommit_event(self): + # revert changes on in memory schema + self.session.vreg.schema.del_entity_type(self.entity.name) + # revert changes on database + self.session.system_sql('DROP TABLE %s%s' % (SQL_PREFIX, self.entity.name)) + + +class CWETypeRenameOp(MemSchemaOperation): """this operation updates physical storage accordingly""" oldname = newname = None # make pylint happy - def precommit_event(self): + def rename(self, oldname, newname): + self.session.vreg.schema.rename_entity_type(oldname, newname) # we need sql to operate physical changes on the system database sqlexec = self.session.system_sql - sqlexec('ALTER TABLE %s%s RENAME TO %s%s' % (SQL_PREFIX, self.oldname, - SQL_PREFIX, self.newname)) - self.info('renamed table %s to %s', self.oldname, self.newname) + sqlexec('ALTER TABLE %s%s RENAME TO %s%s' % (SQL_PREFIX, oldname, + SQL_PREFIX, newname)) + self.info('renamed table %s to %s', oldname, newname) sqlexec('UPDATE entities SET type=%s WHERE type=%s', - (self.newname, self.oldname)) + (newname, oldname)) sqlexec('UPDATE deleted_entities SET type=%s WHERE type=%s', - (self.newname, self.oldname)) + (newname, oldname)) + # XXX transaction records + + def precommit_event(self): + self.rename(self.oldname, self.newname) + + def revertprecommit_event(self): + self.rename(self.newname, self.oldname) -class SourceDbCWRTypeUpdate(hook.Operation): +class CWRTypeUpdateOp(MemSchemaOperation): """actually update some properties of a relation definition""" rschema = entity = values = None # make pylint happy + oldvalus = None def precommit_event(self): rschema = self.rschema if rschema.final: - return + return # watched changes to final relation type are unexpected session = self.session if 'fulltext_container' in self.values: for subjtype, objtype in rschema.rdefs: @@ -256,10 +314,14 @@ UpdateFTIndexOp) hook.set_operation(session, 'fti_update_etypes', objtype, UpdateFTIndexOp) + # update the in-memory schema first + self.oldvalues = dict( (attr, getattr(rschema, attr)) for attr in self.values) + self.rschema.__dict__.update(self.values) + # then make necessary changes to the system source database if not 'inlined' in self.values: return # nothing to do inlined = self.values['inlined'] - # check in-lining is necessary / possible + # check in-lining is possible when inlined if inlined: self.entity.check_inlined_allowed() # inlined changed, make necessary physical changes! @@ -295,7 +357,7 @@ except Exception, ex: # the column probably already exists. this occurs when the # entity's type has just been added or if the column has not - # been previously dropped + # been previously dropped (eg sqlite) self.error('error while altering table %s: %s', etype, ex) # copy existant data. # XXX don't use, it's not supported by sqlite (at least at when i tried it) @@ -315,8 +377,13 @@ # drop existant table DropRelationTable(session, rtype) + def revertprecommit_event(self): + # revert changes on in memory schema + self.rschema.__dict__.update(self.oldvalues) + # XXX revert changes on database -class SourceDbCWAttributeAdd(hook.Operation): + +class CWAttributeAddOp(MemSchemaOperation): """an attribute relation (CWAttribute) has been added: * add the necessary column * set default on this column if any and possible @@ -330,24 +397,18 @@ def init_rdef(self, **kwargs): entity = self.entity fromentity = entity.stype + rdefdef = self.rdefdef = ybo.RelationDefinition( + str(fromentity.name), entity.rtype.name, str(entity.otype.name), + description=entity.description, cardinality=entity.cardinality, + constraints=get_constraints(self.session, entity), + order=entity.ordernum, eid=entity.eid, **kwargs) + self.session.vreg.schema.add_relation_def(rdefdef) self.session.execute('SET X ordernum Y+1 ' 'WHERE X from_entity SE, SE eid %(se)s, X ordernum Y, ' 'X ordernum >= %(order)s, NOT X eid %(x)s', {'x': entity.eid, 'se': fromentity.eid, 'order': entity.ordernum or 0}) - subj = str(fromentity.name) - rtype = entity.rtype.name - obj = str(entity.otype.name) - constraints = get_constraints(self.session, entity) - rdef = ybo.RelationDefinition(subj, rtype, obj, - description=entity.description, - cardinality=entity.cardinality, - constraints=constraints, - order=entity.ordernum, - eid=entity.eid, - **kwargs) - MemSchemaRDefAdd(self.session, rdef) - return rdef + return rdefdef def precommit_event(self): session = self.session @@ -361,22 +422,24 @@ 'indexed': entity.indexed, 'fulltextindexed': entity.fulltextindexed, 'internationalizable': entity.internationalizable} - rdef = self.init_rdef(**props) - sysource = session.pool.source('system') + # update the in-memory schema first + rdefdef = self.init_rdef(**props) + # then make necessary changes to the system source database + syssource = session.pool.source('system') attrtype = y2sql.type_from_constraints( - sysource.dbhelper, rdef.object, rdef.constraints) + syssource.dbhelper, rdefdef.object, rdefdef.constraints) # XXX should be moved somehow into lgdb: sqlite doesn't support to # add a new column with UNIQUE, it should be added after the ALTER TABLE # using ADD INDEX - if sysource.dbdriver == 'sqlite' and 'UNIQUE' in attrtype: + if syssource.dbdriver == 'sqlite' and 'UNIQUE' in attrtype: extra_unique_index = True attrtype = attrtype.replace(' UNIQUE', '') else: extra_unique_index = False # added some str() wrapping query since some backend (eg psycopg) don't # allow unicode queries - table = SQL_PREFIX + rdef.subject - column = SQL_PREFIX + rdef.name + table = SQL_PREFIX + rdefdef.subject + column = SQL_PREFIX + rdefdef.name try: session.system_sql(str('ALTER TABLE %s ADD %s %s' % (table, column, attrtype)), @@ -389,7 +452,7 @@ self.error('error while altering table %s: %s', table, ex) if extra_unique_index or entity.indexed: try: - sysource.create_index(session, table, column, + syssource.create_index(session, table, column, unique=extra_unique_index) except Exception, ex: self.error('error while creating index for %s.%s: %s', @@ -397,47 +460,36 @@ # final relations are not infered, propagate schema = session.vreg.schema try: - eschema = schema.eschema(rdef.subject) + eschema = schema.eschema(rdefdef.subject) except KeyError: return # entity type currently being added # propagate attribute to children classes - rschema = schema.rschema(rdef.name) + rschema = schema.rschema(rdefdef.name) # if relation type has been inserted in the same transaction, its final # attribute is still set to False, so we've to ensure it's False rschema.final = True - # XXX 'infered': True/False, not clear actually - props.update({'constraints': rdef.constraints, - 'description': rdef.description, - 'cardinality': rdef.cardinality, - 'constraints': rdef.constraints, - 'permissions': rdef.get_permissions(), - 'order': rdef.order, - 'infered': False, 'eid': None - }) - cstrtypemap = ss.cstrtype_mapping(session) - groupmap = group_mapping(session) - object = schema.eschema(rdef.object) - for specialization in eschema.specialized_by(False): - if (specialization, rdef.object) in rschema.rdefs: - continue - sperdef = RelationDefinitionSchema(specialization, rschema, - object, props) - ss.execschemarql(session.execute, sperdef, - ss.rdef2rql(sperdef, cstrtypemap, groupmap)) + insert_rdef_on_subclasses(session, eschema, rschema, rdefdef, props) # set default value, using sql for performance and to avoid # modification_date update if default: session.system_sql('UPDATE %s SET %s=%%(default)s' % (table, column), {'default': default}) + def revertprecommit_event(self): + # revert changes on in memory schema + self.session.vreg.schema.del_relation_def( + self.rdefdef.subject, self.rdefdef.name, self.rdefdef.object) + # XXX revert changes on database -class SourceDbCWRelationAdd(SourceDbCWAttributeAdd): + +class CWRelationAddOp(CWAttributeAddOp): """an actual relation has been added: - * if this is an inlined relation, add the necessary column - else if it's the first instance of this relation type, add the - necessary table and set default permissions - * register an operation to add the relation definition to the - instance's schema on commit + + * add the relation definition to the instance's schema + + * if this is an inlined relation, add the necessary column else if it's the + first instance of this relation type, add the necessary table and set + default permissions constraints are handled by specific hooks """ @@ -446,280 +498,281 @@ def precommit_event(self): session = self.session entity = self.entity - rdef = self.init_rdef(composite=entity.composite) + # update the in-memory schema first + rdefdef = self.init_rdef(composite=entity.composite) + # then make necessary changes to the system source database schema = session.vreg.schema - rtype = rdef.name + rtype = rdefdef.name rschema = schema.rschema(rtype) # this have to be done before permissions setting if rschema.inlined: # need to add a column if the relation is inlined and if this is the # first occurence of "Subject relation Something" whatever Something - # and if it has not been added during other event of the same - # transaction - key = '%s.%s' % (rdef.subject, rtype) - try: - alreadythere = bool(rschema.objects(rdef.subject)) - except KeyError: - alreadythere = False - if not (alreadythere or - key in session.transaction_data.get('createdattrs', ())): - add_inline_relation_column(session, rdef.subject, rtype) + if len(rschema.objects(rdefdef.subject)) == 1: + add_inline_relation_column(session, rdefdef.subject, rtype) + eschema = schema[rdefdef.subject] + insert_rdef_on_subclasses(session, eschema, rschema, rdefdef, + {'composite': entity.composite}) else: + if rschema.symmetric: + # for symmetric relations, rdefs will store relation definitions + # in both ways (i.e. (subj -> obj) and (obj -> subj)) + relation_already_defined = len(rschema.rdefs) > 2 + else: + relation_already_defined = len(rschema.rdefs) > 1 # need to create the relation if no relation definition in the # schema and if it has not been added during other event of the same # transaction - if not (rschema.subjects() or + if not (relation_already_defined or rtype in session.transaction_data.get('createdtables', ())): - try: - rschema = schema.rschema(rtype) - tablesql = y2sql.rschema2sql(rschema) - except KeyError: - # fake we add it to the schema now to get a correctly - # initialized schema but remove it before doing anything - # more dangerous... - rschema = schema.add_relation_type(rdef) - tablesql = y2sql.rschema2sql(rschema) - schema.del_relation_type(rtype) + rschema = schema.rschema(rtype) # create the necessary table - for sql in tablesql.split(';'): + for sql in y2sql.rschema2sql(rschema).split(';'): if sql.strip(): session.system_sql(sql) session.transaction_data.setdefault('createdtables', []).append( rtype) + # XXX revertprecommit_event -class SourceDbRDefUpdate(hook.Operation): + +class RDefDelOp(MemSchemaOperation): + """an actual relation has been removed""" + rdef = None # make pylint happy + + def precommit_event(self): + session = self.session + rdef = self.rdef + rschema = rdef.rtype + # make necessary changes to the system source database first + rdeftype = rschema.final and 'CWAttribute' or 'CWRelation' + execute = session.execute + rset = execute('Any COUNT(X) WHERE X is %s, X relation_type R,' + 'R eid %%(x)s' % rdeftype, {'x': rschema.eid}) + lastrel = rset[0][0] == 0 + # we have to update physical schema systematically for final and inlined + # relations, but only if it's the last instance for this relation type + # for other relations + if (rschema.final or rschema.inlined): + rset = execute('Any COUNT(X) WHERE X is %s, X relation_type R, ' + 'R eid %%(r)s, X from_entity E, E eid %%(e)s' + % rdeftype, + {'r': rschema.eid, 'e': rdef.subject.eid}) + if rset[0][0] == 0 and not session.deleted_in_transaction(rdef.subject.eid): + ptypes = session.transaction_data.setdefault('pendingrtypes', set()) + ptypes.add(rschema.type) + DropColumn(session, table=SQL_PREFIX + str(rdef.subject), + column=SQL_PREFIX + str(rschema)) + elif lastrel: + DropRelationTable(session, str(rschema)) + # then update the in-memory schema + rschema.del_relation_def(rdef.subject, rdef.object) + # if this is the last relation definition of this type, drop associated + # relation type + if lastrel and not session.deleted_in_transaction(rschema.eid): + execute('DELETE CWRType X WHERE X eid %(x)s', {'x': rschema.eid}) + + def revertprecommit_event(self): + # revert changes on in memory schema + # + # Note: add_relation_def takes a RelationDefinition, not a + # RelationDefinitionSchema, needs to fake it + self.rdef.name = str(self.rdef.rtype) + self.session.vreg.schema.add_relation_def(self.rdef) + + + +class RDefUpdateOp(MemSchemaOperation): """actually update some properties of a relation definition""" - rschema = values = None # make pylint happy + rschema = rdefkey = values = None # make pylint happy + rdef = oldvalues = None + indexed_changed = null_allowed_changed = False def precommit_event(self): session = self.session - etype = self.kobj[0] - table = SQL_PREFIX + etype - column = SQL_PREFIX + self.rschema.type + rdef = self.rdef = self.rschema.rdefs[self.rdefkey] + # update the in-memory schema first + self.oldvalues = dict( (attr, getattr(rdef, attr)) for attr in self.values) + rdef.update(self.values) + # then make necessary changes to the system source database + syssource = session.pool.source('system') if 'indexed' in self.values: - sysource = session.pool.source('system') - if self.values['indexed']: - sysource.create_index(session, table, column) - else: - sysource.drop_index(session, table, column) - if 'cardinality' in self.values and self.rschema.final: - syssource = session.pool.source('system') - if not syssource.dbhelper.alter_column_support: - # not supported (and NOT NULL not set by yams in that case, so - # no worry) XXX (syt) then should we set NOT NULL below ?? - return - atype = self.rschema.objects(etype)[0] - constraints = self.rschema.rdef(etype, atype).constraints - coltype = y2sql.type_from_constraints(syssource.dbhelper, atype, constraints, - creating=False) - # XXX check self.values['cardinality'][0] actually changed? - syssource.set_null_allowed(self.session, table, column, coltype, - self.values['cardinality'][0] != '1') + syssource.update_rdef_indexed(session, rdef) + self.indexed_changed = True + if 'cardinality' in self.values and (rdef.rtype.final or + rdef.rtype.inlined) \ + and self.values['cardinality'][0] != self.oldvalues['cardinality'][0]: + syssource.update_rdef_null_allowed(self.session, rdef) + self.null_allowed_changed = True if 'fulltextindexed' in self.values: - hook.set_operation(session, 'fti_update_etypes', etype, + hook.set_operation(session, 'fti_update_etypes', rdef.subject, UpdateFTIndexOp) + def revertprecommit_event(self): + if self.rdef is None: + return + # revert changes on in memory schema + self.rdef.update(self.oldvalues) + # revert changes on database + syssource = self.session.pool.source('system') + if self.indexed_changed: + syssource.update_rdef_indexed(self.session, self.rdef) + if self.null_allowed_changed: + syssource.update_rdef_null_allowed(self.session, self.rdef) -class SourceDbCWConstraintAdd(hook.Operation): + +def _set_modifiable_constraints(rdef): + # for proper in-place modification of in-memory schema: if rdef.constraints + # is already a list, reuse it (we're updating multiple constraints of the + # same rdef in the same transactions) + if not isinstance(rdef.constraints, list): + rdef.constraints = list(rdef.constraints) + + +class CWConstraintDelOp(MemSchemaOperation): + """actually remove a constraint of a relation definition""" + rdef = oldcstr = newcstr = None # make pylint happy + size_cstr_changed = unique_changed = False + + def precommit_event(self): + session = self.session + rdef = self.rdef + # in-place modification of in-memory schema first + _set_modifiable_constraints(rdef) + rdef.constraints.remove(self.oldcstr) + # then update database: alter the physical schema on size/unique + # constraint changes + syssource = session.pool.source('system') + cstrtype = self.oldcstr.type() + if cstrtype == 'SizeConstraint': + syssource.update_rdef_column(session, rdef) + self.size_cstr_changed = True + elif cstrtype == 'UniqueConstraint': + syssource.update_rdef_unique(session, rdef) + self.unique_changed = True + + def revertprecommit_event(self): + # revert changes on in memory schema + if self.newcstr is not None: + self.rdef.constraints.remove(self.newcstr) + if self.oldcstr is not None: + self.rdef.constraints.append(self.oldcstr) + # revert changes on database + syssource = self.session.pool.source('system') + if self.size_cstr_changed: + syssource.update_rdef_column(self.session, self.rdef) + if self.unique_changed: + syssource.update_rdef_unique(self.session, self.rdef) + + +class CWConstraintAddOp(CWConstraintDelOp): """actually update constraint of a relation definition""" entity = None # make pylint happy - cancelled = False def precommit_event(self): - rdef = self.entity.reverse_constrained_by[0] session = self.session + rdefentity = self.entity.reverse_constrained_by[0] # when the relation is added in the same transaction, the constraint # object is created by the operation adding the attribute or relation, # so there is nothing to do here - if session.added_in_transaction(rdef.eid): + if session.added_in_transaction(rdefentity.eid): return - rdefschema = session.vreg.schema.schema_by_eid(rdef.eid) - subjtype, rtype, objtype = rdefschema.as_triple() + rdef = self.rdef = session.vreg.schema.schema_by_eid(rdefentity.eid) cstrtype = self.entity.type - oldcstr = rtype.rdef(subjtype, objtype).constraint_by_type(cstrtype) - newcstr = CONSTRAINTS[cstrtype].deserialize(self.entity.value) - table = SQL_PREFIX + str(subjtype) - column = SQL_PREFIX + str(rtype) - # alter the physical schema on size constraint changes - if newcstr.type() == 'SizeConstraint' and ( - oldcstr is None or oldcstr.max != newcstr.max): - syssource = self.session.pool.source('system') - card = rtype.rdef(subjtype, objtype).cardinality - coltype = y2sql.type_from_constraints(syssource.dbhelper, objtype, - [newcstr], creating=False) - try: - syssource.change_col_type(session, table, column, coltype, card[0] != '1') - self.info('altered column %s of table %s: now %s', - column, table, coltype) - except Exception, ex: - # not supported by sqlite for instance - self.error('error while altering table %s: %s', table, ex) + oldcstr = self.oldcstr = rdef.constraint_by_type(cstrtype) + newcstr = self.newcstr = CONSTRAINTS[cstrtype].deserialize(self.entity.value) + # in-place modification of in-memory schema first + _set_modifiable_constraints(rdef) + newcstr.eid = self.entity.eid + if oldcstr is not None: + rdef.constraints.remove(oldcstr) + rdef.constraints.append(newcstr) + # then update database: alter the physical schema on size/unique + # constraint changes + syssource = session.pool.source('system') + if cstrtype == 'SizeConstraint' and (oldcstr is None or + oldcstr.max != newcstr.max): + syssource.update_rdef_column(session, rdef) + self.size_cstr_changed = True elif cstrtype == 'UniqueConstraint' and oldcstr is None: - session.pool.source('system').create_index( - self.session, table, column, unique=True) + syssource.update_rdef_unique(session, rdef) + self.unique_changed = True - -class SourceDbCWConstraintDel(hook.Operation): - """actually remove a constraint of a relation definition""" - rtype = subjtype = None # make pylint happy - +class CWUniqueTogetherConstraintAddOp(MemSchemaOperation): + entity = None # make pylint happy def precommit_event(self): - cstrtype = self.cstr.type() - table = SQL_PREFIX + str(self.rdef.subject) - column = SQL_PREFIX + str(self.rdef.rtype) - # alter the physical schema on size/unique constraint changes - if cstrtype == 'SizeConstraint': - syssource = self.session.pool.source('system') - coltype = y2sql.type_from_constraints(syssource.dbhelper, - self.rdef.object, [], - creating=False) - try: - syssource.change_col_type(session, table, column, coltype, - self.rdef.cardinality[0] != '1') - self.info('altered column %s of table %s: now %s', - column, table, coltype) - except Exception, ex: - # not supported by sqlite for instance - self.error('error while altering table %s: %s', table, ex) - elif cstrtype == 'UniqueConstraint': - self.session.pool.source('system').drop_index( - self.session, table, column, unique=True) + session = self.session + prefix = SQL_PREFIX + table = '%s%s' % (prefix, self.entity.constraint_of[0].name) + cols = ['%s%s' % (prefix, r.rtype.name) + for r in self.entity.relations] + dbhelper= session.pool.source('system').dbhelper + sqls = dbhelper.sqls_create_multicol_unique_index(table, cols) + for sql in sqls: + session.system_sql(sql) + + # XXX revertprecommit_event + + def postcommit_event(self): + eschema = self.session.vreg.schema.schema_by_eid(self.entity.constraint_of[0].eid) + attrs = [r.rtype.name for r in self.entity.relations] + eschema._unique_together.append(attrs) +class CWUniqueTogetherConstraintDelOp(MemSchemaOperation): + entity = oldcstr = None # for pylint + cols = [] # for pylint + def precommit_event(self): + session = self.session + prefix = SQL_PREFIX + table = '%s%s' % (prefix, self.entity.type) + dbhelper= session.pool.source('system').dbhelper + cols = ['%s%s' % (prefix, c) for c in self.cols] + sqls = dbhelper.sqls_drop_multicol_unique_index(table, cols) + for sql in sqls: + session.system_sql(sql) + + # XXX revertprecommit_event + + def postcommit_event(self): + eschema = self.session.vreg.schema.schema_by_eid(self.entity.eid) + cols = set(self.cols) + unique_together = [ut for ut in eschema._unique_together + if set(ut) != cols] + eschema._unique_together = unique_together # operations for in-memory schema synchronization ############################# -class MemSchemaCWETypeAdd(MemSchemaEarlyOperation): - """actually add the entity type to the instance's schema""" - eid = None # make pylint happy - def commit_event(self): - self.session.vreg.schema.add_entity_type(self.kobj) - - -class MemSchemaCWETypeRename(MemSchemaOperation): - """this operation updates physical storage accordingly""" - oldname = newname = None # make pylint happy - - def commit_event(self): - self.session.vreg.schema.rename_entity_type(self.oldname, self.newname) - - class MemSchemaCWETypeDel(MemSchemaOperation): """actually remove the entity type from the instance's schema""" - def commit_event(self): - try: - # del_entity_type also removes entity's relations - self.session.vreg.schema.del_entity_type(self.kobj) - except KeyError: - # s/o entity type have already been deleted - pass + def postcommit_event(self): + # del_entity_type also removes entity's relations + self.session.vreg.schema.del_entity_type(self.etype) -class MemSchemaCWRTypeAdd(MemSchemaEarlyOperation): +class MemSchemaCWRTypeAdd(MemSchemaOperation): """actually add the relation type to the instance's schema""" - eid = None # make pylint happy - def commit_event(self): - self.session.vreg.schema.add_relation_type(self.kobj) - + def precommit_event(self): + self.session.vreg.schema.add_relation_type(self.rtypedef) -class MemSchemaCWRTypeUpdate(MemSchemaOperation): - """actually update some properties of a relation definition""" - rschema = values = None # make pylint happy - - def commit_event(self): - # structure should be clean, not need to remove entity's relations - # at this point - self.rschema.__dict__.update(self.values) + def revertprecommit_event(self): + self.session.vreg.schema.del_relation_type(self.rtypedef.name) class MemSchemaCWRTypeDel(MemSchemaOperation): """actually remove the relation type from the instance's schema""" - def commit_event(self): + def postcommit_event(self): try: - self.session.vreg.schema.del_relation_type(self.kobj) + self.session.vreg.schema.del_relation_type(self.rtype) except KeyError: # s/o entity type have already been deleted pass -class MemSchemaRDefAdd(MemSchemaEarlyOperation): - """actually add the attribute relation definition to the instance's - schema - """ - def commit_event(self): - self.session.vreg.schema.add_relation_def(self.kobj) - - -class MemSchemaRDefUpdate(MemSchemaOperation): - """actually update some properties of a relation definition""" - rschema = values = None # make pylint happy - - def commit_event(self): - # structure should be clean, not need to remove entity's relations - # at this point - self.rschema.rdefs[self.kobj].update(self.values) - - -class MemSchemaRDefDel(MemSchemaOperation): - """actually remove the relation definition from the instance's schema""" - def commit_event(self): - subjtype, rtype, objtype = self.kobj - try: - self.session.vreg.schema.del_relation_def(subjtype, rtype, objtype) - except KeyError: - # relation type may have been already deleted - pass - - -class MemSchemaCWConstraintAdd(MemSchemaOperation): - """actually update constraint of a relation definition - - has to be called before SourceDbCWConstraintAdd - """ - cancelled = False - - def precommit_event(self): - rdef = self.entity.reverse_constrained_by[0] - # when the relation is added in the same transaction, the constraint - # object is created by the operation adding the attribute or relation, - # so there is nothing to do here - if self.session.added_in_transaction(rdef.eid): - self.cancelled = True - return - rdef = self.session.vreg.schema.schema_by_eid(rdef.eid) - self.prepare_constraints(rdef) - cstrtype = self.entity.type - self.cstr = rdef.constraint_by_type(cstrtype) - self.newcstr = CONSTRAINTS[cstrtype].deserialize(self.entity.value) - self.newcstr.eid = self.entity.eid - - def commit_event(self): - if self.cancelled: - return - # in-place modification - if not self.cstr is None: - self.constraints.remove(self.cstr) - self.constraints.append(self.newcstr) - - -class MemSchemaCWConstraintDel(MemSchemaOperation): - """actually remove a constraint of a relation definition - - has to be called before SourceDbCWConstraintDel - """ - rtype = subjtype = objtype = None # make pylint happy - def precommit_event(self): - self.prepare_constraints(self.rdef) - - def commit_event(self): - self.constraints.remove(self.cstr) - - class MemSchemaPermissionAdd(MemSchemaOperation): """synchronize schema when a *_permission relation has been added on a group """ - def commit_event(self): + def precommit_event(self): """the observed connections pool has been commited""" try: erschema = self.session.vreg.schema.schema_by_eid(self.eid) @@ -740,13 +793,15 @@ perms.append(perm) erschema.set_action_permissions(self.action, perms) + # XXX revertprecommit_event + class MemSchemaPermissionDel(MemSchemaPermissionAdd): """synchronize schema when a *_permission relation has been deleted from a group """ - def commit_event(self): + def precommit_event(self): """the observed connections pool has been commited""" try: erschema = self.session.vreg.schema.schema_by_eid(self.eid) @@ -771,19 +826,23 @@ self.error('can\'t remove permission %s for %s on %s', perm, self.action, erschema) + # XXX revertprecommit_event + class MemSchemaSpecializesAdd(MemSchemaOperation): - def commit_event(self): + def precommit_event(self): eschema = self.session.vreg.schema.schema_by_eid(self.etypeeid) parenteschema = self.session.vreg.schema.schema_by_eid(self.parentetypeeid) eschema._specialized_type = parenteschema.type parenteschema._specialized_by.append(eschema.type) + # XXX revertprecommit_event + class MemSchemaSpecializesDel(MemSchemaOperation): - def commit_event(self): + def precommit_event(self): try: eschema = self.session.vreg.schema.schema_by_eid(self.etypeeid) parenteschema = self.session.vreg.schema.schema_by_eid(self.parentetypeeid) @@ -793,10 +852,7 @@ eschema._specialized_type = None parenteschema._specialized_by.remove(eschema.type) - -class SyncSchemaHook(hook.Hook): - __abstract__ = True - category = 'syncschema' + # XXX revertprecommit_event # CWEType hooks ################################################################ @@ -808,18 +864,19 @@ * instantiate an operation to delete the entity type on commit """ __regid__ = 'syncdelcwetype' - __select__ = SyncSchemaHook.__select__ & implements('CWEType') + __select__ = SyncSchemaHook.__select__ & is_instance('CWEType') events = ('before_delete_entity',) def __call__(self): # final entities can't be deleted, don't care about that name = self.entity.name - if name in CORE_ETYPES: + if name in CORE_TYPES: raise ValidationError(self.entity.eid, {None: self._cw._('can\'t be deleted')}) # delete every entities of this type - self._cw.execute('DELETE %s X' % name) + if not name in ETYPE_NAME_MAP: + self._cw.execute('DELETE %s X' % name) + MemSchemaCWETypeDel(self._cw, etype=name) DropTable(self._cw, table=SQL_PREFIX + name) - MemSchemaCWETypeDel(self._cw, name) class AfterDelCWETypeHook(DelCWETypeHook): @@ -847,42 +904,7 @@ entity = self.entity if entity.get('final'): return - schema = self._cw.vreg.schema - name = entity['name'] - etype = ybo.EntityType(name=name, description=entity.get('description'), - meta=entity.get('meta')) # don't care about final - # fake we add it to the schema now to get a correctly initialized schema - # but remove it before doing anything more dangerous... - schema = self._cw.vreg.schema - eschema = schema.add_entity_type(etype) - # generate table sql and rql to add metadata - tablesql = y2sql.eschema2sql(self._cw.pool.source('system').dbhelper, - eschema, prefix=SQL_PREFIX) - rdefrqls = [] - gmap = group_mapping(self._cw) - cmap = ss.cstrtype_mapping(self._cw) - for rtype in (META_RTYPES - VIRTUAL_RTYPES): - rschema = schema[rtype] - sampletype = rschema.subjects()[0] - desttype = rschema.objects()[0] - rdef = copy(rschema.rdef(sampletype, desttype)) - rdef.subject = mock_object(eid=entity.eid) - mock = mock_object(eid=None) - rdefrqls.append( (mock, tuple(ss.rdef2rql(rdef, cmap, gmap))) ) - # now remove it ! - schema.del_entity_type(name) - # create the necessary table - for sql in tablesql.split(';'): - if sql.strip(): - self._cw.system_sql(sql) - # register operation to modify the schema on commit - # this have to be done before adding other relations definitions - # or permission settings - etype.eid = entity.eid - MemSchemaCWETypeAdd(self._cw, etype) - # add meta relations - for rdef, relrqls in rdefrqls: - ss.execschemarql(self._cw.execute, rdef, relrqls) + CWETypeAddOp(self._cw, entity=entity) class BeforeUpdateCWETypeHook(DelCWETypeHook): @@ -895,12 +917,9 @@ check_valid_changes(self._cw, entity, ro_attrs=('final',)) # don't use getattr(entity, attr), we would get the modified value if any if 'name' in entity.edited_attributes: - newname = entity.pop('name') - oldname = entity.name + oldname, newname = hook.entity_oldnewvalue(entity, 'name') if newname.lower() != oldname.lower(): - SourceDbCWETypeRename(self._cw, oldname=oldname, newname=newname) - MemSchemaCWETypeRename(self._cw, oldname=oldname, newname=newname) - entity['name'] = newname + CWETypeRenameOp(self._cw, oldname=oldname, newname=newname) # CWRType hooks ################################################################ @@ -912,19 +931,19 @@ * instantiate an operation to delete the relation type on commit """ __regid__ = 'syncdelcwrtype' - __select__ = SyncSchemaHook.__select__ & implements('CWRType') + __select__ = SyncSchemaHook.__select__ & is_instance('CWRType') events = ('before_delete_entity',) def __call__(self): name = self.entity.name - if name in CORE_RTYPES: + if name in CORE_TYPES: raise ValidationError(self.entity.eid, {None: self._cw._('can\'t be deleted')}) # delete relation definitions using this relation type self._cw.execute('DELETE CWAttribute X WHERE X relation_type Y, Y eid %(x)s', {'x': self.entity.eid}) self._cw.execute('DELETE CWRelation X WHERE X relation_type Y, Y eid %(x)s', {'x': self.entity.eid}) - MemSchemaCWRTypeDel(self._cw, name) + MemSchemaCWRTypeDel(self._cw, rtype=name) class AfterAddCWRTypeHook(DelCWRTypeHook): @@ -939,13 +958,12 @@ def __call__(self): entity = self.entity - rtype = ybo.RelationType(name=entity.name, - description=entity.get('description'), - meta=entity.get('meta', False), - inlined=entity.get('inlined', False), - symmetric=entity.get('symmetric', False), - eid=entity.eid) - MemSchemaCWRTypeAdd(self._cw, rtype) + rtypedef = ybo.RelationType(name=entity.name, + description=entity.description, + inlined=entity.get('inlined', False), + symmetric=entity.get('symmetric', False), + eid=entity.eid) + MemSchemaCWRTypeAdd(self._cw, rtypedef=rtypedef) class BeforeUpdateCWRTypeHook(DelCWRTypeHook): @@ -964,9 +982,8 @@ newvalues[prop] = entity[prop] if newvalues: rschema = self._cw.vreg.schema.rschema(entity.name) - SourceDbCWRTypeUpdate(self._cw, rschema=rschema, entity=entity, - values=newvalues) - MemSchemaCWRTypeUpdate(self._cw, rschema=rschema, values=newvalues) + CWRTypeUpdateOp(self._cw, rschema=rschema, entity=entity, + values=newvalues) class AfterDelRelationTypeHook(SyncSchemaHook): @@ -984,9 +1001,12 @@ def __call__(self): session = self._cw - rdef = session.vreg.schema.schema_by_eid(self.eidfrom) + try: + rdef = session.vreg.schema.schema_by_eid(self.eidfrom) + except KeyError: + self.critical('cant get schema rdef associated to %s', self.eidfrom) + return subjschema, rschema, objschema = rdef.as_triple() - pendings = session.transaction_data.get('pendingeids', ()) pendingrdefs = session.transaction_data.setdefault('pendingrdefs', set()) # first delete existing relation if necessary if rschema.final: @@ -995,107 +1015,89 @@ else: rdeftype = 'CWRelation' pendingrdefs.add((subjschema, rschema, objschema)) - if not (subjschema.eid in pendings or objschema.eid in pendings): + if not (session.deleted_in_transaction(subjschema.eid) or + session.deleted_in_transaction(objschema.eid)): session.execute('DELETE X %s Y WHERE X is %s, Y is %s' % (rschema, subjschema, objschema)) - execute = session.execute - rset = execute('Any COUNT(X) WHERE X is %s, X relation_type R,' - 'R eid %%(x)s' % rdeftype, {'x': self.eidto}) - lastrel = rset[0][0] == 0 - # we have to update physical schema systematically for final and inlined - # relations, but only if it's the last instance for this relation type - # for other relations - - if (rschema.final or rschema.inlined): - rset = execute('Any COUNT(X) WHERE X is %s, X relation_type R, ' - 'R eid %%(x)s, X from_entity E, E name %%(name)s' - % rdeftype, {'x': self.eidto, 'name': str(subjschema)}) - if rset[0][0] == 0 and not subjschema.eid in pendings: - ptypes = session.transaction_data.setdefault('pendingrtypes', set()) - ptypes.add(rschema.type) - DropColumn(session, table=SQL_PREFIX + subjschema.type, - column=SQL_PREFIX + rschema.type) - elif lastrel: - DropRelationTable(session, rschema.type) - # if this is the last instance, drop associated relation type - if lastrel and not self.eidto in pendings: - execute('DELETE CWRType X WHERE X eid %(x)s', {'x': self.eidto}) - MemSchemaRDefDel(session, (subjschema, rschema, objschema)) + RDefDelOp(session, rdef=rdef) # CWAttribute / CWRelation hooks ############################################### class AfterAddCWAttributeHook(SyncSchemaHook): __regid__ = 'syncaddcwattribute' - __select__ = SyncSchemaHook.__select__ & implements('CWAttribute') + __select__ = SyncSchemaHook.__select__ & is_instance('CWAttribute') events = ('after_add_entity',) def __call__(self): - SourceDbCWAttributeAdd(self._cw, entity=self.entity) + CWAttributeAddOp(self._cw, entity=self.entity) class AfterAddCWRelationHook(AfterAddCWAttributeHook): __regid__ = 'syncaddcwrelation' - __select__ = SyncSchemaHook.__select__ & implements('CWRelation') + __select__ = SyncSchemaHook.__select__ & is_instance('CWRelation') def __call__(self): - SourceDbCWRelationAdd(self._cw, entity=self.entity) + CWRelationAddOp(self._cw, entity=self.entity) class AfterUpdateCWRDefHook(SyncSchemaHook): __regid__ = 'syncaddcwattribute' - __select__ = SyncSchemaHook.__select__ & implements('CWAttribute', - 'CWRelation') + __select__ = SyncSchemaHook.__select__ & is_instance('CWAttribute', + 'CWRelation') events = ('before_update_entity',) def __call__(self): entity = self.entity if self._cw.deleted_in_transaction(entity.eid): return - desttype = entity.otype.name + subjtype = entity.stype.name + objtype = entity.otype.name rschema = self._cw.vreg.schema[entity.rtype.name] + # note: do not access schema rdef here, it may be added later by an + # operation newvalues = {} - for prop in RelationDefinitionSchema.rproperty_defs(desttype): + for prop in RelationDefinitionSchema.rproperty_defs(objtype): if prop == 'constraints': continue if prop == 'order': - prop = 'ordernum' - if prop in entity.edited_attributes: - old, new = hook.entity_oldnewvalue(entity, prop) + attr = 'ordernum' + else: + attr = prop + if attr in entity.edited_attributes: + old, new = hook.entity_oldnewvalue(entity, attr) if old != new: - newvalues[prop] = entity[prop] + newvalues[prop] = new if newvalues: - subjtype = entity.stype.name - MemSchemaRDefUpdate(self._cw, kobj=(subjtype, desttype), - rschema=rschema, values=newvalues) - SourceDbRDefUpdate(self._cw, kobj=(subjtype, desttype), - rschema=rschema, values=newvalues) + RDefUpdateOp(self._cw, rschema=rschema, rdefkey=(subjtype, objtype), + values=newvalues) # constraints synchronization hooks ############################################ class AfterAddCWConstraintHook(SyncSchemaHook): __regid__ = 'syncaddcwconstraint' - __select__ = SyncSchemaHook.__select__ & implements('CWConstraint') + __select__ = SyncSchemaHook.__select__ & is_instance('CWConstraint') events = ('after_add_entity', 'after_update_entity') def __call__(self): - MemSchemaCWConstraintAdd(self._cw, entity=self.entity) - SourceDbCWConstraintAdd(self._cw, entity=self.entity) + CWConstraintAddOp(self._cw, entity=self.entity) class AfterAddConstrainedByHook(SyncSchemaHook): - __regid__ = 'syncdelconstrainedby' + __regid__ = 'syncaddconstrainedby' __select__ = SyncSchemaHook.__select__ & hook.match_rtype('constrained_by') events = ('after_add_relation',) def __call__(self): if self._cw.added_in_transaction(self.eidfrom): + # used by get_constraints() which is called in CWAttributeAddOp self._cw.transaction_data.setdefault(self.eidfrom, []).append(self.eidto) -class BeforeDeleteConstrainedByHook(AfterAddConstrainedByHook): +class BeforeDeleteConstrainedByHook(SyncSchemaHook): __regid__ = 'syncdelconstrainedby' + __select__ = SyncSchemaHook.__select__ & hook.match_rtype('constrained_by') events = ('before_delete_relation',) def __call__(self): @@ -1109,8 +1111,33 @@ except IndexError: self._cw.critical('constraint type no more accessible') else: - SourceDbCWConstraintDel(self._cw, rdef=rdef, cstr=cstr) - MemSchemaCWConstraintDel(self._cw, rdef=rdef, cstr=cstr) + CWConstraintDelOp(self._cw, rdef=rdef, oldcstr=cstr) + +# unique_together constraints +# XXX: use setoperations and before_add_relation here (on constraint_of and relations) +class AfterAddCWUniqueTogetherConstraintHook(SyncSchemaHook): + __regid__ = 'syncadd_cwuniquetogether_constraint' + __select__ = SyncSchemaHook.__select__ & is_instance('CWUniqueTogetherConstraint') + events = ('after_add_entity', 'after_update_entity') + + def __call__(self): + CWUniqueTogetherConstraintAddOp(self._cw, entity=self.entity) + + +class BeforeDeleteConstraintOfHook(SyncSchemaHook): + __regid__ = 'syncdelconstraintof' + __select__ = SyncSchemaHook.__select__ & hook.match_rtype('constraint_of') + events = ('before_delete_relation',) + + def __call__(self): + if self._cw.deleted_in_transaction(self.eidto): + return + schema = self._cw.vreg.schema + cstr = self._cw.entity_from_eid(self.eidfrom) + entity = schema.schema_by_eid(self.eidto) + cols = [r.rtype.name + for r in cstr.relations] + CWUniqueTogetherConstraintDelOp(self._cw, entity=entity, oldcstr=cstr, cols=cols) # permissions synchronization hooks ############################################ @@ -1176,7 +1203,7 @@ still_fti = list(schema[etype].indexable_attributes()) for entity in rset.entities(): source.fti_unindex_entity(session, entity.eid) - for container in entity.fti_containers(): + for container in entity.cw_adapt_to('IFTIndexable').fti_containers(): if still_fti or container is not entity: source.fti_unindex_entity(session, container.eid) source.fti_index_entity(session, container) diff -r f4d1d5d9ccbb -r 90f2f20367bc hooks/syncsession.py --- a/hooks/syncsession.py Tue Jul 27 12:36:03 2010 +0200 +++ b/hooks/syncsession.py Wed Nov 03 16:38:28 2010 +0100 @@ -22,7 +22,7 @@ from yams.schema import role_name from cubicweb import UnknownProperty, ValidationError, BadConnectionId -from cubicweb.selectors import implements +from cubicweb.selectors import is_instance from cubicweb.server import hook @@ -108,7 +108,7 @@ class CloseDeletedUserSessionsHook(SyncSessionHook): __regid__ = 'closession' - __select__ = SyncSessionHook.__select__ & implements('CWUser') + __select__ = SyncSessionHook.__select__ & is_instance('CWUser') events = ('after_delete_entity',) def __call__(self): @@ -152,7 +152,7 @@ class AddCWPropertyHook(SyncSessionHook): __regid__ = 'addcwprop' - __select__ = SyncSessionHook.__select__ & implements('CWProperty') + __select__ = SyncSessionHook.__select__ & is_instance('CWProperty') events = ('after_add_entity',) def __call__(self): diff -r f4d1d5d9ccbb -r 90f2f20367bc hooks/test/unittest_hooks.py --- a/hooks/test/unittest_hooks.py Tue Jul 27 12:36:03 2010 +0200 +++ b/hooks/test/unittest_hooks.py Wed Nov 03 16:38:28 2010 +0100 @@ -49,7 +49,7 @@ self.commit() def test_delete_required_relations_object(self): - self.skip('no sample in the schema ! YAGNI ? Kermaat ?') + self.skipTest('no sample in the schema ! YAGNI ? Kermaat ?') def test_static_vocabulary_check(self): self.assertRaises(ValidationError, @@ -63,14 +63,14 @@ self.commit) def test_inlined(self): - self.assertEquals(self.repo.schema['sender'].inlined, True) + self.assertEqual(self.repo.schema['sender'].inlined, True) self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"') self.execute('INSERT EmailPart X: X content_format "text/plain", X ordernum 1, X content "this is a test"') eeid = self.execute('INSERT Email X: X messageid "<1234>", X subject "test", X sender Y, X recipients Y, X parts P ' 'WHERE Y is EmailAddress, P is EmailPart')[0][0] self.execute('SET X sender Y WHERE X is Email, Y is EmailAddress') rset = self.execute('Any S WHERE X sender S, X eid %s' % eeid) - self.assertEquals(len(rset), 1) + self.assertEqual(len(rset), 1) def test_composite_1(self): self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"') @@ -81,10 +81,10 @@ self.commit() self.execute('DELETE Email X') rset = self.execute('Any X WHERE X is EmailPart') - self.assertEquals(len(rset), 1) + self.assertEqual(len(rset), 1) self.commit() rset = self.execute('Any X WHERE X is EmailPart') - self.assertEquals(len(rset), 0) + self.assertEqual(len(rset), 0) def test_composite_2(self): self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"') @@ -96,7 +96,7 @@ self.execute('DELETE EmailPart X') self.commit() rset = self.execute('Any X WHERE X is EmailPart') - self.assertEquals(len(rset), 0) + self.assertEqual(len(rset), 0) def test_composite_redirection(self): self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", X alias "hop"') @@ -110,77 +110,74 @@ self.execute('SET X parts Y WHERE X messageid "<2345>"') self.commit() rset = self.execute('Any X WHERE X is EmailPart') - self.assertEquals(len(rset), 1) - self.assertEquals(rset.get_entity(0, 0).reverse_parts[0].messageid, '<2345>') + self.assertEqual(len(rset), 1) + self.assertEqual(rset.get_entity(0, 0).reverse_parts[0].messageid, '<2345>') def test_unsatisfied_constraints(self): - releid = self.execute('INSERT CWRelation X: X from_entity FE, X relation_type RT, X to_entity TE ' - 'WHERE FE name "CWUser", RT name "in_group", TE name "String"')[0][0] - self.execute('SET X read_permission Y WHERE X eid %(x)s, Y name "managers"', - {'x': releid}, 'x') + releid = self.execute('SET U in_group G WHERE G name "owners", U login "admin"')[0][0] ex = self.assertRaises(ValidationError, self.commit) - self.assertEquals(ex.errors, - {'to_entity-object': 'RQLConstraint O final FALSE failed'}) + self.assertEqual(ex.errors, + {'in_group-object': u'RQLConstraint NOT O name "owners" failed'}) def test_html_tidy_hook(self): req = self.request() entity = req.create_entity('Workflow', name=u'wf1', description_format=u'text/html', description=u'yo') - self.assertEquals(entity.description, u'yo') + self.assertEqual(entity.description, u'yo') entity = req.create_entity('Workflow', name=u'wf2', description_format=u'text/html', description=u'yo') - self.assertEquals(entity.description, u'yo') + self.assertEqual(entity.description, u'yo') entity = req.create_entity('Workflow', name=u'wf3', description_format=u'text/html', description=u'yo') - self.assertEquals(entity.description, u'yo') + self.assertEqual(entity.description, u'yo') entity = req.create_entity('Workflow', name=u'wf4', description_format=u'text/html', description=u'R&D') - self.assertEquals(entity.description, u'R&D') + self.assertEqual(entity.description, u'R&D') entity = req.create_entity('Workflow', name=u'wf5', description_format=u'text/html', description=u"
        c'est l'été") - self.assertEquals(entity.description, u"
        c'est l'été
        ") + self.assertEqual(entity.description, u"
        c'est l'été
        ") def test_nonregr_html_tidy_hook_no_update(self): entity = self.request().create_entity('Workflow', name=u'wf1', description_format=u'text/html', description=u'yo') entity.set_attributes(name=u'wf2') - self.assertEquals(entity.description, u'yo') + self.assertEqual(entity.description, u'yo') entity.set_attributes(description=u'R&D

        yo') entity.pop('description') - self.assertEquals(entity.description, u'R&D

        yo

        ') + self.assertEqual(entity.description, u'R&D

        yo

        ') def test_metadata_cwuri(self): entity = self.request().create_entity('Workflow', name=u'wf1') - self.assertEquals(entity.cwuri, self.repo.config['base-url'] + 'eid/%s' % entity.eid) + self.assertEqual(entity.cwuri, self.repo.config['base-url'] + 'eid/%s' % entity.eid) def test_metadata_creation_modification_date(self): _now = datetime.now() entity = self.request().create_entity('Workflow', name=u'wf1') - self.assertEquals((entity.creation_date - _now).seconds, 0) - self.assertEquals((entity.modification_date - _now).seconds, 0) + self.assertEqual((entity.creation_date - _now).seconds, 0) + self.assertEqual((entity.modification_date - _now).seconds, 0) def test_metadata_created_by(self): entity = self.request().create_entity('Bookmark', title=u'wf1', path=u'/view') self.commit() # fire operations - self.assertEquals(len(entity.created_by), 1) # make sure we have only one creator - self.assertEquals(entity.created_by[0].eid, self.session.user.eid) + self.assertEqual(len(entity.created_by), 1) # make sure we have only one creator + self.assertEqual(entity.created_by[0].eid, self.session.user.eid) def test_metadata_owned_by(self): entity = self.request().create_entity('Bookmark', title=u'wf1', path=u'/view') self.commit() # fire operations - self.assertEquals(len(entity.owned_by), 1) # make sure we have only one owner - self.assertEquals(entity.owned_by[0].eid, self.session.user.eid) + self.assertEqual(len(entity.owned_by), 1) # make sure we have only one owner + self.assertEqual(entity.owned_by[0].eid, self.session.user.eid) def test_user_login_stripped(self): u = self.create_user(' joe ') tname = self.execute('Any L WHERE E login L, E eid %(e)s', {'e': u.eid})[0][0] - self.assertEquals(tname, 'joe') + self.assertEqual(tname, 'joe') self.execute('SET X login " jijoe " WHERE X eid %(x)s', {'x': u.eid}) tname = self.execute('Any L WHERE E login L, E eid %(e)s', {'e': u.eid})[0][0] - self.assertEquals(tname, 'jijoe') + self.assertEqual(tname, 'jijoe') @@ -201,15 +198,15 @@ def test_user_group_synchronization(self): user = self.session.user - self.assertEquals(user.groups, set(('managers',))) + self.assertEqual(user.groups, set(('managers',))) self.execute('SET X in_group G WHERE X eid %s, G name "guests"' % user.eid) - self.assertEquals(user.groups, set(('managers',))) + self.assertEqual(user.groups, set(('managers',))) self.commit() - self.assertEquals(user.groups, set(('managers', 'guests'))) + self.assertEqual(user.groups, set(('managers', 'guests'))) self.execute('DELETE X in_group G WHERE X eid %s, G name "guests"' % user.eid) - self.assertEquals(user.groups, set(('managers', 'guests'))) + self.assertEqual(user.groups, set(('managers', 'guests'))) self.commit() - self.assertEquals(user.groups, set(('managers',))) + self.assertEqual(user.groups, set(('managers',))) def test_user_composite_owner(self): ueid = self.create_user('toto').eid @@ -217,7 +214,7 @@ self.execute('INSERT EmailAddress X: X address "toto@logilab.fr", U use_email X ' 'WHERE U login "toto"') self.commit() - self.assertEquals(self.execute('Any A WHERE X owned_by U, U use_email X,' + self.assertEqual(self.execute('Any A WHERE X owned_by U, U use_email X,' 'U login "toto", X address A')[0][0], 'toto@logilab.fr') @@ -233,23 +230,23 @@ def test_unexistant_eproperty(self): ex = self.assertRaises(ValidationError, self.execute, 'INSERT CWProperty X: X pkey "bla.bla", X value "hop", X for_user U') - self.assertEquals(ex.errors, {'pkey-subject': 'unknown property key'}) + self.assertEqual(ex.errors, {'pkey-subject': 'unknown property key'}) ex = self.assertRaises(ValidationError, self.execute, 'INSERT CWProperty X: X pkey "bla.bla", X value "hop"') - self.assertEquals(ex.errors, {'pkey-subject': 'unknown property key'}) + self.assertEqual(ex.errors, {'pkey-subject': 'unknown property key'}) def test_site_wide_eproperty(self): ex = self.assertRaises(ValidationError, self.execute, 'INSERT CWProperty X: X pkey "ui.site-title", X value "hop", X for_user U') - self.assertEquals(ex.errors, {'for_user-subject': "site-wide property can't be set for user"}) + self.assertEqual(ex.errors, {'for_user-subject': "site-wide property can't be set for user"}) def test_bad_type_eproperty(self): ex = self.assertRaises(ValidationError, self.execute, 'INSERT CWProperty X: X pkey "ui.language", X value "hop", X for_user U') - self.assertEquals(ex.errors, {'value-subject': u'unauthorized value'}) + self.assertEqual(ex.errors, {'value-subject': u'unauthorized value'}) ex = self.assertRaises(ValidationError, self.execute, 'INSERT CWProperty X: X pkey "ui.language", X value "hop"') - self.assertEquals(ex.errors, {'value-subject': u'unauthorized value'}) + self.assertEqual(ex.errors, {'value-subject': u'unauthorized value'}) class SchemaHooksTC(CubicWebTC): @@ -269,7 +266,7 @@ self.execute('INSERT CWUser X: X login "admin"') except ValidationError, ex: self.assertIsInstance(ex.entity, int) - self.assertEquals(ex.errors, {'login-subject': 'the value "admin" is already used, use another one'}) + self.assertEqual(ex.errors, {'login-subject': 'the value "admin" is already used, use another one'}) if __name__ == '__main__': diff -r f4d1d5d9ccbb -r 90f2f20367bc hooks/test/unittest_syncschema.py --- a/hooks/test/unittest_syncschema.py Tue Jul 27 12:36:03 2010 +0200 +++ b/hooks/test/unittest_syncschema.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,6 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . +"""cubicweb.server.hooks.syncschema unit and functional tests""" + from logilab.common.testlib import TestCase, unittest_main from cubicweb import ValidationError @@ -98,7 +100,7 @@ self.execute('Societe2 X WHERE X name "logilab"') self.execute('SET X concerne2 X WHERE X name "logilab"') rset = self.execute('Any X WHERE X concerne2 Y') - self.assertEquals(rset.rows, [[s2eid]]) + self.assertEqual(rset.rows, [[s2eid]]) # check that when a relation definition is deleted, existing relations are deleted rdefeid = self.execute('INSERT CWRelation X: X cardinality "**", X relation_type RT, ' ' X from_entity E, X to_entity E ' @@ -125,9 +127,9 @@ def test_is_instance_of_insertions(self): seid = self.execute('INSERT Transition T: T name "subdiv"')[0][0] is_etypes = [etype for etype, in self.execute('Any ETN WHERE X eid %s, X is ET, ET name ETN' % seid)] - self.assertEquals(is_etypes, ['Transition']) + self.assertEqual(is_etypes, ['Transition']) instanceof_etypes = [etype for etype, in self.execute('Any ETN WHERE X eid %s, X is_instance_of ET, ET name ETN' % seid)] - self.assertEquals(sorted(instanceof_etypes), ['BaseTransition', 'Transition']) + self.assertEqual(sorted(instanceof_etypes), ['BaseTransition', 'Transition']) snames = [name for name, in self.execute('Any N WHERE S is BaseTransition, S name N')] self.failIf('subdiv' in snames) snames = [name for name, in self.execute('Any N WHERE S is_instance_of BaseTransition, S name N')] @@ -136,27 +138,27 @@ def test_perms_synchronization_1(self): schema = self.repo.schema - self.assertEquals(schema['CWUser'].get_groups('read'), set(('managers', 'users'))) + self.assertEqual(schema['CWUser'].get_groups('read'), set(('managers', 'users'))) self.failUnless(self.execute('Any X, Y WHERE X is CWEType, X name "CWUser", Y is CWGroup, Y name "users"')[0]) self.execute('DELETE X read_permission Y WHERE X is CWEType, X name "CWUser", Y name "users"') - self.assertEquals(schema['CWUser'].get_groups('read'), set(('managers', 'users', ))) + self.assertEqual(schema['CWUser'].get_groups('read'), set(('managers', 'users', ))) self.commit() - self.assertEquals(schema['CWUser'].get_groups('read'), set(('managers',))) + self.assertEqual(schema['CWUser'].get_groups('read'), set(('managers',))) self.execute('SET X read_permission Y WHERE X is CWEType, X name "CWUser", Y name "users"') self.commit() - self.assertEquals(schema['CWUser'].get_groups('read'), set(('managers', 'users',))) + self.assertEqual(schema['CWUser'].get_groups('read'), set(('managers', 'users',))) def test_perms_synchronization_2(self): schema = self.repo.schema['in_group'].rdefs[('CWUser', 'CWGroup')] - self.assertEquals(schema.get_groups('read'), set(('managers', 'users', 'guests'))) + self.assertEqual(schema.get_groups('read'), set(('managers', 'users', 'guests'))) self.execute('DELETE X read_permission Y WHERE X relation_type RT, RT name "in_group", Y name "guests"') - self.assertEquals(schema.get_groups('read'), set(('managers', 'users', 'guests'))) + self.assertEqual(schema.get_groups('read'), set(('managers', 'users', 'guests'))) self.commit() - self.assertEquals(schema.get_groups('read'), set(('managers', 'users'))) + self.assertEqual(schema.get_groups('read'), set(('managers', 'users'))) self.execute('SET X read_permission Y WHERE X relation_type RT, RT name "in_group", Y name "guests"') - self.assertEquals(schema.get_groups('read'), set(('managers', 'users'))) + self.assertEqual(schema.get_groups('read'), set(('managers', 'users'))) self.commit() - self.assertEquals(schema.get_groups('read'), set(('managers', 'users', 'guests'))) + self.assertEqual(schema.get_groups('read'), set(('managers', 'users', 'guests'))) def test_nonregr_user_edit_itself(self): ueid = self.session.user.eid @@ -187,7 +189,10 @@ self.failIf(self.schema['state_of'].inlined) self.failIf(self.index_exists('State', 'state_of')) rset = self.execute('Any X, Y WHERE X state_of Y') - self.assertEquals(len(rset), 2) # user states + self.assertEqual(len(rset), 2) # user states + except: + import traceback + traceback.print_exc() finally: self.execute('SET X inlined TRUE WHERE X name "state_of"') self.failIf(self.schema['state_of'].inlined) @@ -195,7 +200,7 @@ self.failUnless(self.schema['state_of'].inlined) self.failUnless(self.index_exists('State', 'state_of')) rset = self.execute('Any X, Y WHERE X state_of Y') - self.assertEquals(len(rset), 2) + self.assertEqual(len(rset), 2) def test_indexed_change(self): self.session.set_pool() @@ -255,6 +260,7 @@ self.commit() # should not be able anymore to add cwuser without surname self.assertRaises(ValidationError, self.create_user, "toto") + self.rollback() self.execute('SET DEF cardinality "?1" ' 'WHERE DEF relation_type RT, DEF from_entity E,' 'RT name "surname", E name "CWUser"') @@ -314,7 +320,7 @@ rdef = self.schema['Transition'].rdef('type') cstr = rdef.constraint_by_type('StaticVocabularyConstraint') if not getattr(cstr, 'eid', None): - self.skip('start me alone') # bug in schema reloading, constraint's eid not restored + self.skipTest('start me alone') # bug in schema reloading, constraint's eid not restored self.execute('SET X value %(v)s WHERE X eid %(x)s', {'x': cstr.eid, 'v': u"u'normal', u'auto', u'new'"}) self.execute('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X ' @@ -322,7 +328,7 @@ {'ct': 'SizeConstraint', 'value': u'max=10', 'x': rdef.eid}) self.commit() cstr = rdef.constraint_by_type('StaticVocabularyConstraint') - self.assertEquals(cstr.values, (u'normal', u'auto', u'new')) + self.assertEqual(cstr.values, (u'normal', u'auto', u'new')) self.execute('INSERT Transition T: T name "hop", T type "new"') if __name__ == '__main__': diff -r f4d1d5d9ccbb -r 90f2f20367bc hooks/workflow.py --- a/hooks/workflow.py Tue Jul 27 12:36:03 2010 +0200 +++ b/hooks/workflow.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""Core hooks: workflow related hooks +"""Core hooks: workflow related hooks""" -""" __docformat__ = "restructuredtext en" from datetime import datetime @@ -25,8 +24,7 @@ from yams.schema import role_name from cubicweb import RepositoryError, ValidationError -from cubicweb.interfaces import IWorkflowable -from cubicweb.selectors import implements +from cubicweb.selectors import is_instance, adaptable from cubicweb.server import hook @@ -51,11 +49,12 @@ def precommit_event(self): session = self.session entity = self.entity + iworkflowable = entity.cw_adapt_to('IWorkflowable') # if there is an initial state and the entity's state is not set, # use the initial state as a default state if not (session.deleted_in_transaction(entity.eid) or entity.in_state) \ - and entity.current_workflow: - state = entity.current_workflow.initial + and iworkflowable.current_workflow: + state = iworkflowable.current_workflow.initial if state: session.add_relation(entity.eid, 'in_state', state.eid) _FireAutotransitionOp(session, entity=entity) @@ -65,10 +64,11 @@ def precommit_event(self): entity = self.entity - autotrs = list(entity.possible_transitions('auto')) + iworkflowable = entity.cw_adapt_to('IWorkflowable') + autotrs = list(iworkflowable.possible_transitions('auto')) if autotrs: assert len(autotrs) == 1 - entity.fire_transition(autotrs[0]) + iworkflowable.fire_transition(autotrs[0]) class _WorkflowChangedOp(hook.Operation): @@ -82,29 +82,30 @@ if self.eid in pendingeids: return entity = session.entity_from_eid(self.eid) + iworkflowable = entity.cw_adapt_to('IWorkflowable') # check custom workflow has not been rechanged to another one in the same # transaction - mainwf = entity.main_workflow + mainwf = iworkflowable.main_workflow if mainwf.eid == self.wfeid: deststate = mainwf.initial if not deststate: qname = role_name('custom_workflow', 'subject') msg = session._('workflow has no initial state') raise ValidationError(entity.eid, {qname: msg}) - if mainwf.state_by_eid(entity.current_state.eid): + if mainwf.state_by_eid(iworkflowable.current_state.eid): # nothing to do return # if there are no history, simply go to new workflow's initial state - if not entity.workflow_history: - if entity.current_state.eid != deststate.eid: + if not iworkflowable.workflow_history: + if iworkflowable.current_state.eid != deststate.eid: _change_state(session, entity.eid, - entity.current_state.eid, deststate.eid) + iworkflowable.current_state.eid, deststate.eid) _FireAutotransitionOp(session, entity=entity) return msg = session._('workflow changed to "%s"') msg %= session._(mainwf.name) session.transaction_data[(entity.eid, 'customwf')] = self.wfeid - entity.change_state(deststate, msg, u'text/plain') + iworkflowable.change_state(deststate, msg, u'text/plain') class _CheckTrExitPoint(hook.Operation): @@ -125,9 +126,10 @@ def precommit_event(self): session = self.session forentity = self.forentity + iworkflowable = forentity.cw_adapt_to('IWorkflowable') trinfo = self.trinfo # we're in a subworkflow, check if we've reached an exit point - wftr = forentity.subworkflow_input_transition() + wftr = iworkflowable.subworkflow_input_transition() if wftr is None: # inconsistency detected qname = role_name('to_state', 'subject') @@ -137,9 +139,9 @@ if tostate is not None: # reached an exit point msg = session._('exiting from subworkflow %s') - msg %= session._(forentity.current_workflow.name) + msg %= session._(iworkflowable.current_workflow.name) session.transaction_data[(forentity.eid, 'subwfentrytr')] = True - forentity.change_state(tostate, msg, u'text/plain', tr=wftr) + iworkflowable.change_state(tostate, msg, u'text/plain', tr=wftr) # hooks ######################################################################## @@ -151,7 +153,7 @@ class SetInitialStateHook(WorkflowHook): __regid__ = 'wfsetinitial' - __select__ = WorkflowHook.__select__ & implements(IWorkflowable) + __select__ = WorkflowHook.__select__ & adaptable('IWorkflowable') events = ('after_add_entity',) def __call__(self): @@ -175,7 +177,7 @@ * by_transition or to_state (managers only) inlined relation is set """ __regid__ = 'wffiretransition' - __select__ = WorkflowHook.__select__ & implements('TrInfo') + __select__ = WorkflowHook.__select__ & is_instance('TrInfo') events = ('before_add_entity',) def __call__(self): @@ -189,18 +191,19 @@ msg = session._('mandatory relation') raise ValidationError(entity.eid, {qname: msg}) forentity = session.entity_from_eid(foreid) + iworkflowable = forentity.cw_adapt_to('IWorkflowable') # then check it has a workflow set, unless we're in the process of changing # entity's workflow if session.transaction_data.get((forentity.eid, 'customwf')): wfeid = session.transaction_data[(forentity.eid, 'customwf')] wf = session.entity_from_eid(wfeid) else: - wf = forentity.current_workflow + wf = iworkflowable.current_workflow if wf is None: msg = session._('related entity has no workflow set') raise ValidationError(entity.eid, {None: msg}) # then check it has a state set - fromstate = forentity.current_state + fromstate = iworkflowable.current_state if fromstate is None: msg = session._('related entity has no state') raise ValidationError(entity.eid, {None: msg}) @@ -270,7 +273,7 @@ class FiredTransitionHook(WorkflowHook): """change related entity state""" __regid__ = 'wffiretransition' - __select__ = WorkflowHook.__select__ & implements('TrInfo') + __select__ = WorkflowHook.__select__ & is_instance('TrInfo') events = ('after_add_entity',) def __call__(self): @@ -278,8 +281,9 @@ _change_state(self._cw, trinfo['wf_info_for'], trinfo['from_state'], trinfo['to_state']) forentity = self._cw.entity_from_eid(trinfo['wf_info_for']) - assert forentity.current_state.eid == trinfo['to_state'] - if forentity.main_workflow.eid != forentity.current_workflow.eid: + iworkflowable = forentity.cw_adapt_to('IWorkflowable') + assert iworkflowable.current_state.eid == trinfo['to_state'] + if iworkflowable.main_workflow.eid != iworkflowable.current_workflow.eid: _SubWorkflowExitOp(self._cw, forentity=forentity, trinfo=trinfo) @@ -297,7 +301,8 @@ # state changed through TrInfo insertion, so we already know it's ok return entity = session.entity_from_eid(self.eidfrom) - mainwf = entity.main_workflow + iworkflowable = entity.cw_adapt_to('IWorkflowable') + mainwf = iworkflowable.main_workflow if mainwf is None: msg = session._('entity has no workflow set') raise ValidationError(entity.eid, {None: msg}) @@ -309,7 +314,7 @@ msg = session._("state doesn't belong to entity's workflow. You may " "want to set a custom workflow for this entity first.") raise ValidationError(self.eidfrom, {qname: msg}) - if entity.current_workflow and wf.eid != entity.current_workflow.eid: + if iworkflowable.current_workflow and wf.eid != iworkflowable.current_workflow.eid: qname = role_name('in_state', 'subject') msg = session._("state doesn't belong to entity's current workflow") raise ValidationError(self.eidfrom, {qname: msg}) @@ -359,7 +364,7 @@ def __call__(self): entity = self._cw.entity_from_eid(self.eidfrom) - typewf = entity.cwetype_workflow() + typewf = entity.cw_adapt_to('IWorkflowable').cwetype_workflow() if typewf is not None: _WorkflowChangedOp(self._cw, eid=self.eidfrom, wfeid=typewf.eid) diff -r f4d1d5d9ccbb -r 90f2f20367bc i18n.py --- a/i18n.py Tue Jul 27 12:36:03 2010 +0200 +++ b/i18n.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""Some i18n/gettext utilities. +"""Some i18n/gettext utilities.""" -""" __docformat__ = "restructuredtext en" import re diff -r f4d1d5d9ccbb -r 90f2f20367bc i18n/en.po --- a/i18n/en.po Tue Jul 27 12:36:03 2010 +0200 +++ b/i18n/en.po Wed Nov 03 16:38:28 2010 +0100 @@ -5,9 +5,10 @@ msgstr "" "Project-Id-Version: 2.0\n" "POT-Creation-Date: 2006-01-12 17:35+CET\n" -"PO-Revision-Date: 2010-05-16 18:58+0200\n" +"PO-Revision-Date: 2010-09-15 14:55+0200\n" "Last-Translator: Sylvain Thenault \n" "Language-Team: English \n" +"Language: en\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" @@ -181,6 +182,9 @@ "can also display a complete schema with meta-data.
        " msgstr "" +msgid "" +msgstr "" + msgid "?*" msgstr "0..1 0..n" @@ -235,6 +239,9 @@ msgid "Browse by category" msgstr "" +msgid "Browse by entity type" +msgstr "" + msgid "Bytes" msgstr "Bytes" @@ -313,6 +320,12 @@ msgid "CWRelation_plural" msgstr "Relations" +msgid "CWUniqueTogetherConstraint" +msgstr "" + +msgid "CWUniqueTogetherConstraint_plural" +msgstr "" + msgid "CWUser" msgstr "User" @@ -355,6 +368,10 @@ "supported" msgstr "" +#, python-format +msgid "Data connection graph for %s" +msgstr "" + msgid "Date" msgstr "Date" @@ -379,10 +396,10 @@ msgid "Download schema as OWL" msgstr "" -msgctxt "inlined:CWUser.use_email.subject" msgid "EmailAddress" msgstr "Email address" +msgctxt "inlined:CWUser.use_email.subject" msgid "EmailAddress" msgstr "Email address" @@ -419,6 +436,9 @@ msgid "Garbage collection information" msgstr "" +msgid "Got rhythm?" +msgstr "" + msgid "Help" msgstr "" @@ -485,6 +505,9 @@ msgid "New CWRelation" msgstr "New relation" +msgid "New CWUniqueTogetherConstraint" +msgstr "" + msgid "New CWUser" msgstr "New user" @@ -515,6 +538,10 @@ msgid "New WorkflowTransition" msgstr "New workflow-transition" +#, python-format +msgid "No account? Try public access at %s" +msgstr "" + msgid "No result matching query" msgstr "" @@ -524,7 +551,7 @@ msgid "OR" msgstr "" -msgid "Parent classes:" +msgid "Parent class:" msgstr "" msgid "Password" @@ -624,9 +651,6 @@ msgid "Submit bug report by mail" msgstr "" -msgid "The repository holds the following entities" -msgstr "" - #, python-format msgid "The view %s can not be applied to this query" msgstr "" @@ -674,6 +698,9 @@ msgid "This CWRelation" msgstr "This relation" +msgid "This CWUniqueTogetherConstraint" +msgstr "" + msgid "This CWUser" msgstr "This user" @@ -868,6 +895,9 @@ msgid "add CWRelation relation_type CWRType object" msgstr "relation definition" +msgid "add CWUniqueTogetherConstraint constraint_of CWEType object" +msgstr "" + msgid "add CWUser in_group CWGroup object" msgstr "user" @@ -926,9 +956,6 @@ msgid "add a new permission" msgstr "" -msgid "add_perm" -msgstr "add permission" - # subject and object forms for each relation type # (no object form for final relation types) msgid "add_permission" @@ -944,6 +971,9 @@ msgid "add_permission" msgstr "add permission" +msgid "add_permission_object" +msgstr "has permission to add" + msgctxt "CWGroup" msgid "add_permission_object" msgstr "can add" @@ -952,8 +982,8 @@ msgid "add_permission_object" msgstr "used to define add permission on" -msgid "add_permission_object" -msgstr "has permission to add" +msgid "add_relation" +msgstr "add" #, python-format msgid "added %(etype)s #%(eid)s (%(title)s)" @@ -961,8 +991,8 @@ #, python-format msgid "" -"added relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #%" -"(eidto)s" +"added relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #" +"%(eidto)s" msgstr "" msgid "addrelated" @@ -995,6 +1025,9 @@ msgid "allowed_transition" msgstr "allowed transition" +msgid "allowed_transition_object" +msgstr "incoming states" + msgctxt "BaseTransition" msgid "allowed_transition_object" msgstr "incoming states" @@ -1007,9 +1040,6 @@ msgid "allowed_transition_object" msgstr "incoming states" -msgid "allowed_transition_object" -msgstr "incoming states" - msgid "am/pm calendar (month)" msgstr "" @@ -1025,13 +1055,13 @@ msgid "an electronic mail address associated to a short alias" msgstr "" -msgid "an error occured" -msgstr "" - -msgid "an error occured while processing your request" -msgstr "" - -msgid "an error occured, the request cannot be fulfilled" +msgid "an error occurred" +msgstr "" + +msgid "an error occurred while processing your request" +msgstr "" + +msgid "an error occurred, the request cannot be fulfilled" msgstr "" msgid "an integer is expected" @@ -1093,13 +1123,13 @@ msgid "bookmarked_by" msgstr "bookmarked by" +msgid "bookmarked_by_object" +msgstr "has bookmarks" + msgctxt "CWUser" msgid "bookmarked_by_object" msgstr "uses bookmarks" -msgid "bookmarked_by_object" -msgstr "has bookmarks" - msgid "bookmarks" msgstr "" @@ -1185,6 +1215,9 @@ msgid "by_transition" msgstr "by transition" +msgid "by_transition_object" +msgstr "transition information" + msgctxt "BaseTransition" msgid "by_transition_object" msgstr "transition information" @@ -1197,9 +1230,6 @@ msgid "by_transition_object" msgstr "transition information" -msgid "by_transition_object" -msgstr "transition information" - msgid "calendar" msgstr "" @@ -1282,6 +1312,12 @@ msgid "click on the box to cancel the deletion" msgstr "" +msgid "click to add a value" +msgstr "" + +msgid "click to delete this value" +msgstr "" + msgid "click to edit this field" msgstr "" @@ -1375,10 +1411,10 @@ msgid "condition" msgstr "condition" -msgctxt "RQLExpression" msgid "condition_object" msgstr "condition of" +msgctxt "RQLExpression" msgid "condition_object" msgstr "condition of" @@ -1405,16 +1441,30 @@ msgid "constrained_by" msgstr "constrained by" -msgctxt "CWConstraint" msgid "constrained_by_object" msgstr "constraints" +msgctxt "CWConstraint" msgid "constrained_by_object" msgstr "constraints" msgid "constraint factory" msgstr "" +msgid "constraint_of" +msgstr "" + +msgctxt "CWUniqueTogetherConstraint" +msgid "constraint_of" +msgstr "" + +msgid "constraint_of_object" +msgstr "" + +msgctxt "CWEType" +msgid "constraint_of_object" +msgstr "" + msgid "constraints" msgstr "" @@ -1470,6 +1520,9 @@ msgid "context where this component should be displayed" msgstr "" +msgid "context where this facet should be displayed, leave empty for both" +msgstr "" + msgid "control subject entity's relations order" msgstr "" @@ -1535,6 +1588,11 @@ msgid "creating CWRelation (CWRelation relation_type CWRType %(linkto)s)" msgstr "creating relation %(linkto)s" +msgid "" +"creating CWUniqueTogetherConstraint (CWUniqueTogetherConstraint " +"constraint_of CWEType %(linkto)s)" +msgstr "" + msgid "creating CWUser (CWUser in_group CWGroup %(linkto)s)" msgstr "creating a new user in group %(linkto)s" @@ -1616,8 +1674,8 @@ msgstr "creating workflow-transition leading to state %(linkto)s" msgid "" -"creating WorkflowTransition (WorkflowTransition transition_of Workflow %" -"(linkto)s)" +"creating WorkflowTransition (WorkflowTransition transition_of Workflow " +"%(linkto)s)" msgstr "creating workflow-transition of workflow %(linkto)s" msgid "creation" @@ -1639,13 +1697,13 @@ msgid "cstrtype" msgstr "constraint type" +msgid "cstrtype_object" +msgstr "used by" + msgctxt "CWConstraintType" msgid "cstrtype_object" msgstr "constraint type of" -msgid "cstrtype_object" -msgstr "used by" - msgid "csv entities export" msgstr "" @@ -1728,10 +1786,10 @@ msgid "default_workflow" msgstr "default workflow" -msgctxt "Workflow" msgid "default_workflow_object" msgstr "default workflow of" +msgctxt "Workflow" msgid "default_workflow_object" msgstr "default workflow of" @@ -1776,6 +1834,9 @@ msgid "define how we get out from a sub-workflow" msgstr "" +msgid "defines a sql-level multicolumn unique index" +msgstr "" + msgid "" "defines what's the property is applied for. You must select this first to be " "able to set value" @@ -1793,9 +1854,6 @@ msgid "delete this relation" msgstr "" -msgid "delete_perm" -msgstr "delete permission" - msgid "delete_permission" msgstr "can be deleted by" @@ -1807,6 +1865,9 @@ msgid "delete_permission" msgstr "delete_permission" +msgid "delete_permission_object" +msgstr "has permission to delete" + msgctxt "CWGroup" msgid "delete_permission_object" msgstr "has permission to delete" @@ -1815,17 +1876,14 @@ msgid "delete_permission_object" msgstr "has permission to delete" -msgid "delete_permission_object" -msgstr "has permission to delete" - #, python-format msgid "deleted %(etype)s #%(eid)s (%(title)s)" msgstr "" #, python-format msgid "" -"deleted relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #%" -"(eidto)s" +"deleted relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #" +"%(eidto)s" msgstr "" msgid "depends on the constraint type" @@ -1834,15 +1892,7 @@ msgid "description" msgstr "description" -msgctxt "CWEType" -msgid "description" -msgstr "description" - -msgctxt "CWRelation" -msgid "description" -msgstr "description" - -msgctxt "Workflow" +msgctxt "BaseTransition" msgid "description" msgstr "description" @@ -1850,15 +1900,7 @@ msgid "description" msgstr "description" -msgctxt "Transition" -msgid "description" -msgstr "description" - -msgctxt "WorkflowTransition" -msgid "description" -msgstr "description" - -msgctxt "State" +msgctxt "CWEType" msgid "description" msgstr "description" @@ -1866,10 +1908,34 @@ msgid "description" msgstr "description" -msgctxt "BaseTransition" +msgctxt "CWRelation" +msgid "description" +msgstr "description" + +msgctxt "State" +msgid "description" +msgstr "description" + +msgctxt "Transition" msgid "description" msgstr "description" +msgctxt "Workflow" +msgid "description" +msgstr "description" + +msgctxt "WorkflowTransition" +msgid "description" +msgstr "description" + +msgid "description_format" +msgstr "format" + +msgctxt "BaseTransition" +msgid "description_format" +msgstr "format" + +msgctxt "CWAttribute" msgid "description_format" msgstr "format" @@ -1877,38 +1943,30 @@ msgid "description_format" msgstr "format" +msgctxt "CWRType" +msgid "description_format" +msgstr "format" + msgctxt "CWRelation" msgid "description_format" msgstr "format" +msgctxt "State" +msgid "description_format" +msgstr "format" + +msgctxt "Transition" +msgid "description_format" +msgstr "format" + msgctxt "Workflow" msgid "description_format" msgstr "format" -msgctxt "CWAttribute" -msgid "description_format" -msgstr "format" - -msgctxt "Transition" -msgid "description_format" -msgstr "format" - msgctxt "WorkflowTransition" msgid "description_format" msgstr "format" -msgctxt "State" -msgid "description_format" -msgstr "format" - -msgctxt "CWRType" -msgid "description_format" -msgstr "format" - -msgctxt "BaseTransition" -msgid "description_format" -msgstr "format" - msgid "destination state for this transition" msgstr "" @@ -1926,21 +1984,21 @@ msgid "destination_state" msgstr "destination state" +msgctxt "SubWorkflowExitPoint" +msgid "destination_state" +msgstr "destination state" + msgctxt "Transition" msgid "destination_state" msgstr "destination state" -msgctxt "SubWorkflowExitPoint" -msgid "destination_state" -msgstr "destination state" +msgid "destination_state_object" +msgstr "destination of" msgctxt "State" msgid "destination_state_object" msgstr "destination of" -msgid "destination_state_object" -msgstr "destination of" - msgid "detach attached file" msgstr "" @@ -1950,12 +2008,18 @@ msgid "display order of the component" msgstr "" +msgid "display order of the facet" +msgstr "" + msgid "display the box or not" msgstr "" msgid "display the component or not" msgstr "" +msgid "display the facet or not" +msgstr "" + msgid "" "distinct label to distinguate between other permission entity of the same " "name" @@ -1986,9 +2050,6 @@ msgid "editable-table" msgstr "" -msgid "edition" -msgstr "" - msgid "eid" msgstr "" @@ -2001,6 +2062,9 @@ msgid "embed" msgstr "" +msgid "embedded html" +msgstr "" + msgid "embedding this url is forbidden" msgstr "" @@ -2062,7 +2126,7 @@ msgid "eta_date" msgstr "ETA date" -msgid "exit state must a subworkflow state" +msgid "exit state must be a subworkflow state" msgstr "" msgid "exit_point" @@ -2185,13 +2249,13 @@ msgid "for_user" msgstr "for user" +msgid "for_user_object" +msgstr "use properties" + msgctxt "CWUser" msgid "for_user_object" msgstr "property of" -msgid "for_user_object" -msgstr "use properties" - msgid "friday" msgstr "" @@ -2213,13 +2277,13 @@ msgid "from_entity" msgstr "from entity" +msgid "from_entity_object" +msgstr "subjet relation" + msgctxt "CWEType" msgid "from_entity_object" msgstr "subjec relation" -msgid "from_entity_object" -msgstr "subjet relation" - msgid "from_interval_start" msgstr "from" @@ -2230,10 +2294,10 @@ msgid "from_state" msgstr "from state" -msgctxt "State" msgid "from_state_object" msgstr "transitions from this state" +msgctxt "State" msgid "from_state_object" msgstr "transitions from this state" @@ -2271,15 +2335,20 @@ msgid "granted to groups" msgstr "" -msgid "graphical representation of the instance'schema" +#, python-format +msgid "graphical representation of %(appid)s data model" msgstr "" #, python-format -msgid "graphical schema for %s" +msgid "" +"graphical representation of the %(etype)s entity type from %(appid)s data " +"model" msgstr "" #, python-format -msgid "graphical workflow for %s" +msgid "" +"graphical representation of the %(rtype)s relation type from %(appid)s data " +"model" msgstr "" msgid "group in which a user should be to be allowed to pass this transition" @@ -2386,10 +2455,10 @@ msgid "in_group" msgstr "in group" -msgctxt "CWGroup" msgid "in_group_object" msgstr "contains" +msgctxt "CWGroup" msgid "in_group_object" msgstr "contains" @@ -2444,10 +2513,10 @@ msgid "initial_state" msgstr "initial state" -msgctxt "State" msgid "initial_state_object" msgstr "initial state of" +msgctxt "State" msgid "initial_state_object" msgstr "initial state of" @@ -2679,6 +2748,9 @@ msgid "missing parameters for entity %s" msgstr "" +msgid "modification" +msgstr "" + msgid "modification_date" msgstr "modification date" @@ -2703,15 +2775,19 @@ msgid "name" msgstr "" -msgctxt "CWEType" +msgctxt "BaseTransition" +msgid "name" +msgstr "name" + +msgctxt "CWCache" msgid "name" -msgstr "" - -msgctxt "Transition" +msgstr "name" + +msgctxt "CWConstraintType" msgid "name" msgstr "" -msgctxt "Workflow" +msgctxt "CWEType" msgid "name" msgstr "" @@ -2719,18 +2795,6 @@ msgid "name" msgstr "" -msgctxt "CWConstraintType" -msgid "name" -msgstr "" - -msgctxt "WorkflowTransition" -msgid "name" -msgstr "" - -msgctxt "State" -msgid "name" -msgstr "name" - msgctxt "CWPermission" msgid "name" msgstr "name" @@ -2739,13 +2803,21 @@ msgid "name" msgstr "name" -msgctxt "BaseTransition" +msgctxt "State" msgid "name" msgstr "name" -msgctxt "CWCache" +msgctxt "Transition" +msgid "name" +msgstr "" + +msgctxt "Workflow" msgid "name" -msgstr "name" +msgstr "" + +msgctxt "WorkflowTransition" +msgid "name" +msgstr "" msgid "name of the cache" msgstr "" @@ -2801,6 +2873,9 @@ msgid "no edited fields specified for entity %s" msgstr "" +msgid "no related entity" +msgstr "" + msgid "no related project" msgstr "" @@ -2926,9 +3001,6 @@ msgid "permissions for this entity" msgstr "" -msgid "personnal informations" -msgstr "" - msgid "pick existing bookmarks" msgstr "" @@ -2958,10 +3030,10 @@ msgid "prefered_form" msgstr "prefered form" -msgctxt "EmailAddress" msgid "prefered_form_object" msgstr "prefered over" +msgctxt "EmailAddress" msgid "prefered_form_object" msgstr "prefered over" @@ -2981,12 +3053,15 @@ msgid "primary_email" msgstr "primary email" +msgid "primary_email_object" +msgstr "primary email of" + msgctxt "EmailAddress" msgid "primary_email_object" msgstr "primary email of" -msgid "primary_email_object" -msgstr "primary email of" +msgid "profile" +msgstr "" msgid "progress" msgstr "" @@ -3006,17 +3081,14 @@ msgid "read" msgstr "" -msgid "read_perm" -msgstr "read permission" - msgid "read_permission" msgstr "can be read by" -msgctxt "CWEType" +msgctxt "CWAttribute" msgid "read_permission" msgstr "read permission" -msgctxt "CWAttribute" +msgctxt "CWEType" msgid "read_permission" msgstr "read permission" @@ -3024,6 +3096,9 @@ msgid "read_permission" msgstr "read permission" +msgid "read_permission_object" +msgstr "has permission to read" + msgctxt "CWGroup" msgid "read_permission_object" msgstr "can be read by" @@ -3032,9 +3107,6 @@ msgid "read_permission_object" msgstr "can be read by" -msgid "read_permission_object" -msgstr "has permission to delete" - msgid "registry" msgstr "" @@ -3068,16 +3140,34 @@ msgid "relation_type" msgstr "relation type" +msgid "relation_type_object" +msgstr "relation definitions" + msgctxt "CWRType" msgid "relation_type_object" msgstr "relation definitions" -msgid "relation_type_object" -msgstr "relation definitions" +msgid "relations" +msgstr "" + +msgctxt "CWUniqueTogetherConstraint" +msgid "relations" +msgstr "" msgid "relations deleted" msgstr "" +msgid "relations_object" +msgstr "" + +msgctxt "CWAttribute" +msgid "relations_object" +msgstr "" + +msgctxt "CWRelation" +msgid "relations_object" +msgstr "" + msgid "relative url of the bookmarked page" msgstr "" @@ -3091,11 +3181,11 @@ msgid "require_group" msgstr "require group" -msgctxt "Transition" +msgctxt "CWPermission" msgid "require_group" msgstr "require group" -msgctxt "CWPermission" +msgctxt "Transition" msgid "require_group" msgstr "require group" @@ -3103,10 +3193,10 @@ msgid "require_group" msgstr "require group" -msgctxt "CWGroup" msgid "require_group_object" msgstr "required by" +msgctxt "CWGroup" msgid "require_group_object" msgstr "required by" @@ -3148,7 +3238,7 @@ msgstr "" msgid "same_as" -msgstr "" +msgstr "same as" msgid "sample format" msgstr "" @@ -3277,7 +3367,7 @@ msgid "site-wide property can't be set for user" msgstr "" -msgid "some errors occured:" +msgid "some errors occurred:" msgstr "" msgid "some later transaction(s) touch entity, undo them first" @@ -3299,10 +3389,10 @@ msgid "specializes" msgstr "specializes" -msgctxt "CWEType" msgid "specializes_object" msgstr "specialized by" +msgctxt "CWEType" msgid "specializes_object" msgstr "specialized by" @@ -3339,10 +3429,10 @@ msgid "state_of" msgstr "state of" -msgctxt "Workflow" msgid "state_of_object" msgstr "use states" +msgctxt "Workflow" msgid "state_of_object" msgstr "use states" @@ -3386,20 +3476,20 @@ msgid "subworkflow_exit" msgstr "subworkflow exit" +msgid "subworkflow_exit_object" +msgstr "subworkflow exit of" + msgctxt "SubWorkflowExitPoint" msgid "subworkflow_exit_object" msgstr "subworkflow exit of" -msgid "subworkflow_exit_object" -msgstr "subworkflow exit of" +msgid "subworkflow_object" +msgstr "subworkflow of" msgctxt "Workflow" msgid "subworkflow_object" msgstr "subworkflow of" -msgid "subworkflow_object" -msgstr "subworkflow of" - msgid "subworkflow_state" msgstr "subworkflow state" @@ -3407,10 +3497,10 @@ msgid "subworkflow_state" msgstr "subworkflow state" -msgctxt "State" msgid "subworkflow_state_object" msgstr "exit point" +msgctxt "State" msgid "subworkflow_state_object" msgstr "exit point" @@ -3524,10 +3614,10 @@ msgid "to_entity" msgstr "to entity" -msgctxt "CWEType" msgid "to_entity_object" msgstr "object relations" +msgctxt "CWEType" msgid "to_entity_object" msgstr "object relations" @@ -3541,10 +3631,10 @@ msgid "to_state" msgstr "to state" -msgctxt "State" msgid "to_state_object" msgstr "transitions to this state" +msgctxt "State" msgid "to_state_object" msgstr "transitions to this state" @@ -3585,10 +3675,10 @@ msgid "transition_of" msgstr "transition of" -msgctxt "Workflow" msgid "transition_of_object" msgstr "use transitions" +msgctxt "Workflow" msgid "transition_of_object" msgstr "use transitions" @@ -3686,9 +3776,10 @@ msgid "update" msgstr "" -msgid "update_perm" -msgstr "update permission" - +msgid "update_permission" +msgstr "can be updated by" + +msgctxt "CWAttribute" msgid "update_permission" msgstr "can be updated by" @@ -3696,9 +3787,8 @@ msgid "update_permission" msgstr "can be updated by" -msgctxt "CWAttribute" -msgid "update_permission" -msgstr "can be updated by" +msgid "update_permission_object" +msgstr "has permission to update" msgctxt "CWGroup" msgid "update_permission_object" @@ -3708,8 +3798,8 @@ msgid "update_permission_object" msgstr "has permission to update" -msgid "update_permission_object" -msgstr "has permission to update" +msgid "update_relation" +msgstr "update" msgid "updated" msgstr "" @@ -3741,10 +3831,10 @@ msgid "use_email" msgstr "use email" -msgctxt "EmailAddress" msgid "use_email_object" msgstr "used by" +msgctxt "EmailAddress" msgid "use_email_object" msgstr "used by" @@ -3854,6 +3944,10 @@ msgid "view_index" msgstr "index" +#, python-format +msgid "violates unique_together constraints (%s)" +msgstr "violates unique_together constraints (%s)" + msgid "visible" msgstr "" @@ -3923,10 +4017,10 @@ msgid "workflow_of" msgstr "workflow of" -msgctxt "CWEType" msgid "workflow_of_object" msgstr "may use workflow" +msgctxt "CWEType" msgid "workflow_of_object" msgstr "may use workflow" @@ -3951,3 +4045,15 @@ msgid "you should probably delete that property" msgstr "" + +#~ msgid "add_perm" +#~ msgstr "add permission" + +#~ msgid "delete_perm" +#~ msgstr "delete permission" + +#~ msgid "read_perm" +#~ msgstr "read permission" + +#~ msgid "update_perm" +#~ msgstr "update permission" diff -r f4d1d5d9ccbb -r 90f2f20367bc i18n/es.po --- a/i18n/es.po Tue Jul 27 12:36:03 2010 +0200 +++ b/i18n/es.po Wed Nov 03 16:38:28 2010 +0100 @@ -5,8 +5,10 @@ msgstr "" "Project-Id-Version: cubicweb 2.46.0\n" "PO-Revision-Date: 2010-11-27 07:59+0100\n" -"Last-Translator: Celso Flores\n" +"Last-Translator: Celso Flores, Carlos Balderas " +"\n" "Language-Team: es \n" +"Language: \n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" @@ -23,8 +25,8 @@ "url: %(url)s\n" msgstr "" "\n" -"%(user)s ha cambiado su estado de <%(previous_state)s> hacia <%" -"(current_state)s> por la entidad\n" +"%(user)s ha cambiado su estado de <%(previous_state)s> hacia <" +"%(current_state)s> por la entidad\n" "'%(title)s'\n" "\n" "%(comment)s\n" @@ -36,15 +38,15 @@ msgstr " del estado %(fromstate)s hacia el estado %(tostate)s\n" msgid " :" -msgstr "" +msgstr ":" #, python-format msgid "%(attr)s set to %(newvalue)s" -msgstr "" +msgstr "%(attr)s modificado a %(newvalue)s" #, python-format msgid "%(attr)s updated from %(oldvalue)s to %(newvalue)s" -msgstr "" +msgstr "%(attr)s modificado de %(oldvalue)s a %(newvalue)s" #, python-format msgid "%(cstr)s constraint failed for value %(value)r" @@ -60,7 +62,7 @@ #, python-format msgid "%(value)r doesn't match the %(regexp)r regular expression" -msgstr "" +msgstr "%(value)r no corresponde a la expresión regular %(regexp)r" #, python-format msgid "%d days" @@ -132,10 +134,10 @@ #, python-format msgid "%s updated" -msgstr "" +msgstr "%s actualizado" msgid "(UNEXISTANT EID)" -msgstr "" +msgstr "(EID INEXISTENTE" msgid "(loading ...)" msgstr "(Cargando ...)" @@ -178,7 +180,7 @@ #, python-format msgid "<%s not specified>" -msgstr "" +msgstr "<%s no especificado>" #, python-format msgid "" @@ -189,6 +191,9 @@ "pero se puede ver a un modelo completo con meta-datos." +msgid "" +msgstr "" + msgid "?*" msgstr "0..1 0..n" @@ -208,19 +213,19 @@ msgstr "Cualquiera" msgid "Attributes permissions:" -msgstr "" +msgstr "Permisos de atributos:" msgid "Attributes with non default permissions:" -msgstr "" +msgstr "Atributos con permisos no estándares" # schema pot file, generated on 2009-09-16 16:46:55 # # singular and plural forms for each entity type msgid "BaseTransition" -msgstr "" +msgstr "Transición (abstracta)" msgid "BaseTransition_plural" -msgstr "" +msgstr "Transiciones (abstractas)" msgid "Bookmark" msgstr "Favorito" @@ -235,14 +240,17 @@ msgstr "Booleanos" msgid "BoundConstraint" -msgstr "" +msgstr "Restricción de límite" msgid "BoundaryConstraint" -msgstr "" +msgstr "Restricción de límite" msgid "Browse by category" msgstr "Busca por categoría" +msgid "Browse by entity type" +msgstr "Busca por tipo de entidad" + msgid "Bytes" msgstr "Bytes" @@ -278,11 +286,11 @@ msgctxt "inlined:CWRelation.from_entity.subject" msgid "CWEType" -msgstr "" +msgstr "Tipo de entidad" msgctxt "inlined:CWRelation.to_entity.subject" msgid "CWEType" -msgstr "" +msgstr "Tipo de entidad" msgid "CWEType_plural" msgstr "Tipos de entidades" @@ -291,7 +299,7 @@ msgstr "Groupo" msgid "CWGroup_plural" -msgstr "Groupos" +msgstr "Grupos" msgid "CWPermission" msgstr "Autorización" @@ -310,7 +318,7 @@ msgctxt "inlined:CWRelation.relation_type.subject" msgid "CWRType" -msgstr "" +msgstr "Tipo de relación" msgid "CWRType_plural" msgstr "Tipos de relación" @@ -321,6 +329,12 @@ msgid "CWRelation_plural" msgstr "Relaciones" +msgid "CWUniqueTogetherConstraint" +msgstr "" + +msgid "CWUniqueTogetherConstraint_plural" +msgstr "" + msgid "CWUser" msgstr "Usuario" @@ -332,36 +346,52 @@ "Can't restore %(role)s relation %(rtype)s to entity %(eid)s which is already " "linked using this relation." msgstr "" +"No puede restaurar la relación %(role)s %(rtype)s en la entidad %(eid)s pues " +"ya esta ligada a otra entidad usando esa relación." #, python-format msgid "" "Can't restore relation %(rtype)s between %(subj)s and %(obj)s, that relation " "does not exists anymore in the schema." msgstr "" +"No puede restaurar la relación %(rtype)s entre %(subj)s y %(obj)s, esta " +"relación ya no existe en el esquema." #, python-format msgid "" "Can't restore relation %(rtype)s of entity %(eid)s, this relation does not " "exists anymore in the schema." msgstr "" +"No puede restaurar la relación %(rtype)s de la entidad %(eid)s, esta " +"relación ya no existe en el esquema." #, python-format msgid "" "Can't restore relation %(rtype)s, %(role)s entity %(eid)s doesn't exist " "anymore." msgstr "" +"No puede restaurar la relación %(rtype)s, la entidad %(role)s %(eid)s ya no " +"existe." #, python-format msgid "" "Can't undo addition of relation %(rtype)s from %(subj)s to %(obj)s, doesn't " "exist anymore" msgstr "" +"No puede anular el agregar la relación %(rtype)s de %(subj)s a %(obj)s, esta " +"relación ya no existe" #, python-format msgid "" "Can't undo creation of entity %(eid)s of type %(etype)s, type no more " "supported" msgstr "" +"No puede anular la creación de la entidad %(eid)s de tipo %(etype)s, este " +"tipo ya no existe" + +#, python-format +msgid "Data connection graph for %s" +msgstr "" msgid "Date" msgstr "Fecha" @@ -382,16 +412,16 @@ msgstr "Decimales" msgid "Do you want to delete the following element(s) ?" -msgstr "Desea suprimir el(los) elemento(s) siguiente(s)" +msgstr "Desea eliminar el(los) elemento(s) siguiente(s)" msgid "Download schema as OWL" -msgstr "" +msgstr "Descargar el esquema en formato OWL" + +msgid "EmailAddress" +msgstr "Correo Electrónico" msgctxt "inlined:CWUser.use_email.subject" msgid "EmailAddress" -msgstr "" - -msgid "EmailAddress" msgstr "Correo Electrónico" msgid "EmailAddress_plural" @@ -401,13 +431,13 @@ msgstr "Entidades" msgid "Entity types" -msgstr "" +msgstr "Tipos de entidades" msgid "ExternalUri" -msgstr "" +msgstr "Uri externo" msgid "ExternalUri_plural" -msgstr "" +msgstr "Uris externos" msgid "Float" msgstr "Número flotante" @@ -419,22 +449,25 @@ # # singular and plural forms for each entity type msgid "FormatConstraint" -msgstr "" +msgstr "Restricción de Formato" msgid "From:" msgstr "De: " msgid "Garbage collection information" +msgstr "Recolector de basura en memoria" + +msgid "Got rhythm?" msgstr "" msgid "Help" -msgstr "" +msgstr "Ayuda" msgid "Index" -msgstr "" +msgstr "Ãndice" msgid "Instance" -msgstr "" +msgstr "Instancia" msgid "Int" msgstr "Número entero" @@ -446,19 +479,19 @@ msgstr "Duración" msgid "IntervalBoundConstraint" -msgstr "" +msgstr "Restricción de intervalo" msgid "Interval_plural" msgstr "Duraciones" msgid "Looked up classes" -msgstr "" +msgstr "Clases buscadas" msgid "Most referenced classes" -msgstr "" +msgstr "Clases más referenciadas" msgid "New BaseTransition" -msgstr "" +msgstr "XXX" msgid "New Bookmark" msgstr "Agregar a Favoritos" @@ -467,7 +500,7 @@ msgstr "Nueva definición de relación final" msgid "New CWCache" -msgstr "Agregar Cache" +msgstr "Agregar Caché" msgid "New CWConstraint" msgstr "Agregar Restricción" @@ -493,6 +526,9 @@ msgid "New CWRelation" msgstr "Nueva definición de relación final" +msgid "New CWUniqueTogetherConstraint" +msgstr "" + msgid "New CWUser" msgstr "Agregar usuario" @@ -500,7 +536,7 @@ msgstr "Agregar Email" msgid "New ExternalUri" -msgstr "" +msgstr "Agregar Uri externa" msgid "New RQLExpression" msgstr "Agregar expresión rql" @@ -509,7 +545,7 @@ msgstr "Agregar Estado" msgid "New SubWorkflowExitPoint" -msgstr "" +msgstr "Agregar salida de sub-Workflow" msgid "New TrInfo" msgstr "Agregar Información de Transición" @@ -518,22 +554,26 @@ msgstr "Agregar transición" msgid "New Workflow" -msgstr "" +msgstr "Agregar Workflow" msgid "New WorkflowTransition" -msgstr "" +msgstr "Agregar transición de Workflow" + +#, python-format +msgid "No account? Try public access at %s" +msgstr "No esta registrado? Use el acceso público en %s" msgid "No result matching query" msgstr "Ningún resultado corresponde a su búsqueda" msgid "Non exhaustive list of views that may apply to entities of this type" -msgstr "" +msgstr "Lista no exhaustiva de vistas aplicables a este tipo de entidad" msgid "OR" msgstr "O" -msgid "Parent classes:" -msgstr "" +msgid "Parent class:" +msgstr "Clase padre:" msgid "Password" msgstr "Contraseña" @@ -542,16 +582,16 @@ msgstr "Contraseñas" msgid "Permissions for entity types" -msgstr "" +msgstr "Permisos por tipos de entidad" msgid "Permissions for relations" -msgstr "" +msgstr "Permisos por las relaciones" msgid "Please note that this is only a shallow copy" -msgstr "Recuerde que no es más que una copia superficial" +msgstr "Recuerde que sólo es una copia superficial" msgid "RQLConstraint" -msgstr "" +msgstr "Restricción RQL" msgid "RQLExpression" msgstr "Expresión RQL" @@ -560,28 +600,28 @@ msgstr "Expresiones RQL" msgid "RQLUniqueConstraint" -msgstr "" +msgstr "Restricción RQL de Unicidad" msgid "RQLVocabularyConstraint" -msgstr "" +msgstr "Restricción RQL de Vocabulario" msgid "Recipients:" -msgstr "Destinatarios" +msgstr "Destinatarios :" msgid "RegexpConstraint" -msgstr "" +msgstr "restricción expresión regular" msgid "Registry's content" -msgstr "" +msgstr "Contenido del registro" msgid "Relation types" -msgstr "" +msgstr "Tipos de relación" msgid "Relations" msgstr "Relaciones" msgid "Repository" -msgstr "" +msgstr "Repositorio" #, python-format msgid "Schema %s" @@ -594,10 +634,10 @@ msgstr "Buscar" msgid "SizeConstraint" -msgstr "" +msgstr "Restricción de tamaño" msgid "Startup views" -msgstr "Vistas de Inicio" +msgstr "Vistas de inicio" msgid "State" msgstr "Estado" @@ -606,7 +646,7 @@ msgstr "Estados" msgid "StaticVocabularyConstraint" -msgstr "" +msgstr "Restricción de vocabulario" msgid "String" msgstr "Cadena de caracteres" @@ -615,13 +655,13 @@ msgstr "Cadenas de caracteres" msgid "Sub-classes:" -msgstr "" +msgstr "Clases hijas:" msgid "SubWorkflowExitPoint" -msgstr "" +msgstr "Salida de sub-workflow" msgid "SubWorkflowExitPoint_plural" -msgstr "" +msgstr "Salidas de sub-workflow" msgid "Subject:" msgstr "Sujeto:" @@ -632,9 +672,6 @@ msgid "Submit bug report by mail" msgstr "Enviar este reporte por email" -msgid "The repository holds the following entities" -msgstr "El repositorio contiene las entidades siguientes" - #, python-format msgid "The view %s can not be applied to this query" msgstr "La vista %s no puede ser aplicada a esta búsqueda" @@ -644,10 +681,10 @@ msgstr "La vista %s no ha podido ser encontrada" msgid "There is no default workflow" -msgstr "" +msgstr "Esta entidad no posee workflow por defecto" msgid "This BaseTransition" -msgstr "" +msgstr "Esta transición abstracta" msgid "This Bookmark" msgstr "Este favorito" @@ -656,7 +693,7 @@ msgstr "Esta definición de relación final" msgid "This CWCache" -msgstr "Este Cache" +msgstr "Este Caché" msgid "This CWConstraint" msgstr "Esta Restricción" @@ -671,7 +708,7 @@ msgstr "Este grupo" msgid "This CWPermission" -msgstr "Esta autorización" +msgstr "Este permiso" msgid "This CWProperty" msgstr "Esta propiedad" @@ -682,6 +719,9 @@ msgid "This CWRelation" msgstr "Esta definición de relación no final" +msgid "This CWUniqueTogetherConstraint" +msgstr "" + msgid "This CWUser" msgstr "Este usuario" @@ -689,7 +729,7 @@ msgstr "Esta dirección electrónica" msgid "This ExternalUri" -msgstr "" +msgstr "Este Uri externo" msgid "This RQLExpression" msgstr "Esta expresión RQL" @@ -698,7 +738,7 @@ msgstr "Este estado" msgid "This SubWorkflowExitPoint" -msgstr "" +msgstr "Esta Salida de sub-workflow" msgid "This TrInfo" msgstr "Esta información de transición" @@ -707,13 +747,13 @@ msgstr "Esta transición" msgid "This Workflow" -msgstr "" +msgstr "Este Workflow" msgid "This WorkflowTransition" -msgstr "" +msgstr "Esta transición de Workflow" msgid "This entity type permissions:" -msgstr "" +msgstr "Permisos para este tipo de entidad:" msgid "Time" msgstr "Hora" @@ -734,34 +774,34 @@ msgstr "Transiciones" msgid "UniqueConstraint" -msgstr "" +msgstr "Restricción de Unicidad" msgid "Unreachable objects" -msgstr "" +msgstr "Objetos inaccesibles" msgid "Used by:" msgstr "Utilizado por :" msgid "Web server" -msgstr "" +msgstr "Servidor web" msgid "What's new?" -msgstr "Lo último en el sitio" +msgstr "Lo más reciente" msgid "Workflow" -msgstr "" +msgstr "Workflow" msgid "Workflow history" msgstr "Histórico del Workflow" msgid "WorkflowTransition" -msgstr "" +msgstr "Transición de Workflow" msgid "WorkflowTransition_plural" -msgstr "" +msgstr "Transiciones de Workflow" msgid "Workflow_plural" -msgstr "" +msgstr "work flows" msgid "" "You can either submit a new file using the browse button above, or choose to " @@ -790,14 +830,15 @@ "You have no access to this view or it can not be used to display the current " "data." msgstr "" -"No tiene acceso a esta vista o No se puede utilizare para los datos actuales." +"No tiene permisos para accesar esta vista o No puede utilizarse para " +"desplegar los datos seleccionados." msgid "" "You're not authorized to access this page. If you think you should, please " "contact the site administrator." msgstr "" "Usted no esta autorizado a acceder a esta página. Si Usted cree que \n" -"hay un error, favor de contactar al administrador del sitio." +"hay un error, favor de contactar al administrador del Sistema." #, python-format msgid "[%s supervision] changes summary" @@ -813,10 +854,10 @@ "representan respectivamente la entidad en transición y el usuario actual. " msgid "a URI representing an object in external data store" -msgstr "" +msgstr "una URI designando un objeto en un repositorio de datos externo" msgid "a float is expected" -msgstr "" +msgstr "un número flotante es requerido" msgid "" "a simple cache entity characterized by a name and a validity date. The " @@ -824,18 +865,23 @@ "invalidate the cache (typically in hooks). Also, checkout the AppObject." "get_cache() method." msgstr "" +"un caché simple caracterizado por un nombre y una fecha de validez. Es\n" +"el código de la instancia quién es responsable de actualizar la fecha de\n" +"validez mientras el caché debe ser invalidado (en general en un hook).\n" +"Para recuperar un caché, hace falta utilizar el método\n" +"get_cache(cachename)." msgid "about this site" -msgstr "Sobre este Espacio" +msgstr "Información del Sistema" msgid "abstract base class for transitions" -msgstr "" +msgstr "Clase de base abstracta para la transiciones" msgid "action(s) on this selection" -msgstr "acción(es) en esta selección" +msgstr "Acción(es) en esta selección" msgid "actions" -msgstr "acciones" +msgstr "Acciones" msgid "activate" msgstr "Activar" @@ -853,13 +899,13 @@ msgstr "Restricción" msgid "add CWAttribute read_permission RQLExpression subject" -msgstr "" +msgstr "Expresión RQL de lectura" msgid "add CWAttribute relation_type CWRType object" msgstr "Definición de atributo" msgid "add CWAttribute update_permission RQLExpression subject" -msgstr "" +msgstr "Permiso de actualización" msgid "add CWEType add_permission RQLExpression subject" msgstr "Expresión RQL de agregación" @@ -877,20 +923,23 @@ msgstr "Propiedad" msgid "add CWRelation add_permission RQLExpression subject" -msgstr "" +msgstr "Expresión RQL de agregar" msgid "add CWRelation constrained_by CWConstraint subject" msgstr "Restricción" msgid "add CWRelation delete_permission RQLExpression subject" -msgstr "" +msgstr "Expresión RQL de supresión" msgid "add CWRelation read_permission RQLExpression subject" -msgstr "" +msgstr "Expresión RQL de lectura" msgid "add CWRelation relation_type CWRType object" msgstr "Definición de relación" +msgid "add CWUniqueTogetherConstraint constraint_of CWEType object" +msgstr "" + msgid "add CWUser in_group CWGroup object" msgstr "Usuario" @@ -904,10 +953,10 @@ msgstr "Transición en salida" msgid "add State allowed_transition WorkflowTransition subject" -msgstr "" +msgstr "Transición workflow en salida" msgid "add State state_of Workflow object" -msgstr "" +msgstr "Estado" msgid "add Transition condition RQLExpression subject" msgstr "Restricción" @@ -919,39 +968,36 @@ msgstr "Estado de salida" msgid "add Transition transition_of Workflow object" -msgstr "" +msgstr "Transición" msgid "add WorkflowTransition condition RQLExpression subject" -msgstr "" +msgstr "Condición" msgid "add WorkflowTransition subworkflow_exit SubWorkflowExitPoint subject" -msgstr "" +msgstr "Salida de sub-workflow" msgid "add WorkflowTransition transition_of Workflow object" -msgstr "" +msgstr "Transición Workflow" msgctxt "inlined:CWRelation.from_entity.subject" msgid "add a CWEType" -msgstr "" +msgstr "Agregar un tipo de entidad" msgctxt "inlined:CWRelation.to_entity.subject" msgid "add a CWEType" -msgstr "" +msgstr "Agregar un tipo de entidad" msgctxt "inlined:CWRelation.relation_type.subject" msgid "add a CWRType" -msgstr "" +msgstr "Agregar un tipo de relación" msgctxt "inlined:CWUser.use_email.subject" msgid "add a EmailAddress" -msgstr "" +msgstr "Agregar correo electrónico" msgid "add a new permission" msgstr "Agregar una autorización" -msgid "add_perm" -msgstr "Agregado" - # subject and object forms for each relation type # (no object form for final relation types) msgid "add_permission" @@ -961,77 +1007,82 @@ # (no object form for final relation types) msgctxt "CWEType" msgid "add_permission" -msgstr "" +msgstr "Permiso de agregar" msgctxt "CWRelation" msgid "add_permission" -msgstr "" +msgstr "Permiso de agregar" + +msgid "add_permission_object" +msgstr "tiene permiso de agregar" msgctxt "CWGroup" msgid "add_permission_object" -msgstr "" +msgstr "tiene permiso de agregar" msgctxt "RQLExpression" msgid "add_permission_object" -msgstr "" - -msgid "add_permission_object" -msgstr "tiene la autorización para agregar" +msgstr "tiene permiso de agregar" + +msgid "add_relation" +msgstr "agregar" #, python-format msgid "added %(etype)s #%(eid)s (%(title)s)" -msgstr "Agregado %(etype)s #%(eid)s (%(title)s)" +msgstr "se agregó %(etype)s #%(eid)s (%(title)s)" #, python-format msgid "" -"added relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #%" -"(eidto)s" +"added relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #" +"%(eidto)s" msgstr "" +"la relación %(rtype)s de %(frometype)s #%(eidfrom)s a %(toetype)s #%(eidto)s " +"ha sido agregada" msgid "addrelated" -msgstr "" +msgstr "Agregar" msgid "address" -msgstr "dirección" +msgstr "correo electrónico" msgctxt "EmailAddress" msgid "address" -msgstr "" +msgstr "correo electrónico" msgid "alias" msgstr "alias" msgctxt "EmailAddress" msgid "alias" -msgstr "" +msgstr "alias" msgid "allow to set a specific workflow for an entity" -msgstr "" +msgstr "permite definir un Workflow específico para una entidad" msgid "allowed transitions from this state" msgstr "transiciones autorizadas desde este estado" msgid "allowed_transition" -msgstr "transición autorizada" +msgstr "transiciones autorizadas" msgctxt "State" msgid "allowed_transition" -msgstr "" +msgstr "transiciones autorizadas" + +msgid "allowed_transition_object" +msgstr "Estados de entrada" msgctxt "BaseTransition" msgid "allowed_transition_object" -msgstr "" +msgstr "transición autorizada de" msgctxt "Transition" msgid "allowed_transition_object" -msgstr "" +msgstr "transición autorizada de" msgctxt "WorkflowTransition" msgid "allowed_transition_object" -msgstr "" - -msgid "allowed_transition_object" -msgstr "Estados de entrada" +msgstr "transición autorizada de" msgid "am/pm calendar (month)" msgstr "calendario am/pm (mes)" @@ -1048,26 +1099,26 @@ msgid "an electronic mail address associated to a short alias" msgstr "una dirección electrónica asociada a este alias" -msgid "an error occured" -msgstr "ha ocurrido un error" - -msgid "an error occured while processing your request" +msgid "an error occurred" +msgstr "Ha ocurrido un error" + +msgid "an error occurred while processing your request" msgstr "un error ocurrió al procesar su demanda" -msgid "an error occured, the request cannot be fulfilled" +msgid "an error occurred, the request cannot be fulfilled" msgstr "un error ha ocurrido, la búsqueda no ha podido ser realizada" msgid "an integer is expected" -msgstr "" +msgstr "un número entero es esperado" msgid "and linked" -msgstr "y ligada" +msgstr "y relacionada" msgid "and/or between different values" msgstr "y/o entre los diferentes valores" msgid "anonymous" -msgstr "Anónimo" +msgstr "anónimo" msgid "application entities" msgstr "Entidades de la aplicación" @@ -1078,8 +1129,8 @@ #, python-format msgid "at least one relation %(rtype)s is required on %(etype)s (%(eid)s)" msgstr "" -"La entidad #%(eid)s de tipo %(etype)s debe necesariamente estar ligada a \n" -"otra via la relación %(rtype)s" +"La entidad #%(eid)s de tipo %(etype)s debe necesariamente tener almenos una " +"relación de tipo %(rtype)s" msgid "attribute" msgstr "Atributo" @@ -1091,7 +1142,7 @@ msgstr "Usuario o contraseña incorrecta" msgid "auto" -msgstr "" +msgstr "Automático" msgid "automatic" msgstr "Automático" @@ -1103,33 +1154,33 @@ msgstr "Url de base" msgid "bookmark has been removed" -msgstr "ha sido eliminado de sus favoritos" +msgstr "Ha sido eliminado de sus favoritos" msgid "bookmark this page" -msgstr "Agregar esta página a sus favoritos" +msgstr "Agregar esta página a los favoritos" msgid "bookmark this search" msgstr "Guardar esta búsqueda" msgid "bookmarked_by" -msgstr "está en los favoritos de" +msgstr "está en los Favoritos de" msgctxt "Bookmark" msgid "bookmarked_by" -msgstr "" +msgstr "está en los Favoritos de" + +msgid "bookmarked_by_object" +msgstr "tiene como Favoritos" msgctxt "CWUser" msgid "bookmarked_by_object" -msgstr "" - -msgid "bookmarked_by_object" -msgstr "selecciona en sus favoritos a" +msgstr "tiene como Favoritos" msgid "bookmarks" msgstr "Favoritos" msgid "bookmarks are used to have user's specific internal links" -msgstr "favoritos son usados para que un usuario recorde ligas" +msgstr "los Favoritos son ligas directas a espacios guardados por el usuario" msgid "boxes" msgstr "Cajas" @@ -1138,50 +1189,51 @@ msgstr "Caja de Favoritos" msgid "boxes_bookmarks_box_description" -msgstr "Caja que contiene los espacios favoritos del usuario" +msgstr "Muestra y permite administrar los favoritos del usuario" msgid "boxes_download_box" -msgstr "Caja de download" +msgstr "Configuración de caja de descargas" msgid "boxes_download_box_description" -msgstr "Caja que contiene los elementos bajados" +msgstr "Caja que contiene los elementos descargados" msgid "boxes_edit_box" -msgstr "Caja de acciones" +msgstr "Caja de Acciones" msgid "boxes_edit_box_description" -msgstr "" -"Caja que muestra las diferentes acciones posibles sobre los datos presentes" +msgstr "Muestra las acciones posibles a ejecutar para los datos seleccionados" msgid "boxes_filter_box" msgstr "Filtros" msgid "boxes_filter_box_description" -msgstr "Caja que permite realizar filtros sobre los resultados de una búsqueda" +msgstr "Muestra los filtros aplicables a una búsqueda realizada" msgid "boxes_possible_views_box" msgstr "Caja de Vistas Posibles" msgid "boxes_possible_views_box_description" -msgstr "Caja mostrando las vistas posibles para los datos actuales" +msgstr "Muestra las vistas posibles a aplicar a los datos seleccionados" msgid "boxes_rss" -msgstr "ícono RSS" +msgstr "Ãcono RSS" msgid "boxes_rss_description" -msgstr "El ícono RSS permite recuperar las vistas RSS de los datos presentes" +msgstr "Muestra el ícono RSS para vistas RSS" msgid "boxes_search_box" msgstr "Caja de búsqueda" msgid "boxes_search_box_description" -msgstr "Caja con un espacio de búsqueda simple" +msgstr "" +"Permite realizar una búsqueda simple para cualquier tipo de dato en la " +"aplicación" msgid "boxes_startup_views_box" msgstr "Caja Vistas de inicio" msgid "boxes_startup_views_box_description" -msgstr "Caja mostrando las vistas de inicio de la aplicación" +msgstr "Muestra las vistas de inicio de la aplicación" msgid "bug report sent" msgstr "Reporte de error enviado" @@ -1202,29 +1254,29 @@ msgstr "por" msgid "by relation" -msgstr "por relación" +msgstr "por la relación" msgid "by_transition" -msgstr "" +msgstr "transición" msgctxt "TrInfo" msgid "by_transition" -msgstr "" +msgstr "transición" + +msgid "by_transition_object" +msgstr "cambio de estados" msgctxt "BaseTransition" msgid "by_transition_object" -msgstr "" +msgstr "tiene como información" msgctxt "Transition" msgid "by_transition_object" -msgstr "" +msgstr "tiene como información" msgctxt "WorkflowTransition" msgid "by_transition_object" -msgstr "" - -msgid "by_transition_object" -msgstr "" +msgstr "tiene como información" msgid "calendar" msgstr "mostrar un calendario" @@ -1242,13 +1294,13 @@ msgstr "calendario (anual)" msgid "can not resolve entity types:" -msgstr "" +msgstr "Imposible de interpretar los tipos de entidades:" msgid "can't be changed" -msgstr "" +msgstr "No puede ser modificado" msgid "can't be deleted" -msgstr "" +msgstr "No puede ser eliminado" #, python-format msgid "can't change the %s attribute" @@ -1263,11 +1315,11 @@ msgstr "imposible de mostrar los datos, a causa del siguiente error: %s" msgid "can't have multiple exits on the same state" -msgstr "" +msgstr "no puede tener varias salidas en el mismo estado" #, python-format msgid "can't parse %(value)r (expected %(format)s)" -msgstr "" +msgstr "no puede analizar %(value)r (formato requerido : %(format)s)" #, python-format msgid "" @@ -1288,14 +1340,14 @@ msgctxt "CWAttribute" msgid "cardinality" -msgstr "" +msgstr "cardinalidad" msgctxt "CWRelation" msgid "cardinality" -msgstr "" +msgstr "cardinalidad" msgid "category" -msgstr "categoria" +msgstr "categoría" #, python-format msgid "changed state of %(etype)s #%(eid)s (%(title)s)" @@ -1305,56 +1357,60 @@ msgstr "Cambios realizados" msgid "click here to see created entity" -msgstr "ver la entidad creada" +msgstr "Ver la entidad creada" msgid "click on the box to cancel the deletion" msgstr "Seleccione la zona de edición para cancelar la eliminación" +msgid "click to add a value" +msgstr "seleccione para agregar un valor" + +msgid "click to delete this value" +msgstr "seleccione para eliminar este valor" + msgid "click to edit this field" -msgstr "" +msgstr "seleccione para editar este campo" msgid "comment" msgstr "Comentario" msgctxt "TrInfo" msgid "comment" -msgstr "" +msgstr "Comentario" msgid "comment_format" msgstr "Formato" msgctxt "TrInfo" msgid "comment_format" -msgstr "" +msgstr "Formato" msgid "components" msgstr "Componentes" msgid "components_appliname" -msgstr "Título de la aplicación" +msgstr "Nombre de la aplicación" msgid "components_appliname_description" -msgstr "Muestra el título de la aplicación en el encabezado de la página" +msgstr "Muestra el nombre de la aplicación en el encabezado de la página" msgid "components_breadcrumbs" msgstr "Ruta de Navegación" msgid "components_breadcrumbs_description" -msgstr "" -"Muestra un camino que permite identificar el lugar donde se encuentra la " -"página en el sitio" +msgstr "Muestra el lugar donde se encuentra la página actual en el Sistema" msgid "components_etypenavigation" -msgstr "Filtro por tipo" +msgstr "Filtar por tipo" msgid "components_etypenavigation_description" -msgstr "Permite filtrar por tipo de entidad los resultados de búsqueda" +msgstr "Permite filtrar por tipo de entidad los resultados de una búsqueda" msgid "components_help" msgstr "Botón de ayuda" msgid "components_help_description" -msgstr "El botón de ayuda, en el encabezado de página" +msgstr "El botón de ayuda, en el encabezado de la página" msgid "components_loggeduserlink" msgstr "Liga usuario" @@ -1362,10 +1418,10 @@ msgid "components_loggeduserlink_description" msgstr "" "Muestra un enlace hacia el formulario de conexión para los usuarios " -"anónimos, o una caja que contiene las ligas propias a el usuarioconectado. " +"anónimos, o una caja que contiene los enlaces del usuario conectado. " msgid "components_logo" -msgstr "Logo" +msgstr "logo" msgid "components_logo_description" msgstr "El logo de la aplicación, en el encabezado de página" @@ -1375,76 +1431,90 @@ msgid "components_navigation_description" msgstr "" -"Componente que permite distribuir sobre varias páginas las búsquedas que " -"arrojan mayores resultados que un número previamente elegido" +"Componente que permite presentar en varias páginas los resultados de " +"búsqueda cuando son mayores a un número predeterminado " msgid "components_rqlinput" -msgstr "Barra rql" +msgstr "Barra RQL" msgid "components_rqlinput_description" -msgstr "La barra de demanda rql, en el encabezado de página" +msgstr "La barra para realizar consultas en RQL, en el encabezado de página" msgid "composite" msgstr "composite" msgctxt "CWRelation" msgid "composite" -msgstr "" +msgstr "composite" msgid "condition" msgstr "condición" msgctxt "BaseTransition" msgid "condition" -msgstr "" +msgstr "condición" msgctxt "Transition" msgid "condition" -msgstr "" +msgstr "condición" msgctxt "WorkflowTransition" msgid "condition" -msgstr "" +msgstr "condición" + +msgid "condition_object" +msgstr "condición de" msgctxt "RQLExpression" msgid "condition_object" -msgstr "" - -msgid "condition_object" msgstr "condición de" msgid "conditions" -msgstr "" +msgstr "condiciones" msgid "config mode" -msgstr "" +msgstr "Modo de configuración" msgid "config type" -msgstr "" +msgstr "Tipo de configuración" msgid "confirm password" msgstr "Confirmar contraseña" msgid "constrained_by" -msgstr "Restricción hecha por" +msgstr "Restricción impuesta por" msgctxt "CWAttribute" msgid "constrained_by" -msgstr "" +msgstr "Restricción impuesta por" msgctxt "CWRelation" msgid "constrained_by" -msgstr "" +msgstr "Restricción impuesta por" + +msgid "constrained_by_object" +msgstr "Restricción de" msgctxt "CWConstraint" msgid "constrained_by_object" -msgstr "" - -msgid "constrained_by_object" -msgstr "ha restringido" +msgstr "Restricción de" msgid "constraint factory" -msgstr "FAbrica de restricciones" +msgstr "Fábrica de restricciones" + +msgid "constraint_of" +msgstr "" + +msgctxt "CWUniqueTogetherConstraint" +msgid "constraint_of" +msgstr "" + +msgid "constraint_of_object" +msgstr "" + +msgctxt "CWEType" +msgid "constraint_of_object" +msgstr "" msgid "constraints" msgstr "Restricciones" @@ -1453,7 +1523,7 @@ msgstr "Restricciones que se aplican a esta relación" msgid "content type" -msgstr "" +msgstr "tipo MIME" msgid "contentnavigation" msgstr "Componentes contextuales" @@ -1462,10 +1532,10 @@ msgstr "Ruta de Navegación" msgid "contentnavigation_breadcrumbs_description" -msgstr "Muestra un camino que permite localizar la página actual en el sitio" +msgstr "Muestra la ruta que permite localizar la página actual en el Sistema" msgid "contentnavigation_metadata" -msgstr "" +msgstr "Metadatos de la Entidad" msgid "contentnavigation_metadata_description" msgstr "" @@ -1475,7 +1545,7 @@ msgid "contentnavigation_prevnext_description" msgstr "" -"Muestra las ligas que permiten pasar de una entidad a otra en lasentidades " +"Muestra las ligas que permiten pasar de una entidad a otra en las entidades " "que implementan la interface \"anterior/siguiente\"." msgid "contentnavigation_seealso" @@ -1483,16 +1553,16 @@ msgid "contentnavigation_seealso_description" msgstr "" -"sección que muestra las entidades ligadas por la relación \"vea también\" , " -"si la entidad soporta esta relación." +"sección que muestra las entidades relacionadas por la relación \"vea también" +"\" , si la entidad soporta esta relación." msgid "contentnavigation_wfhistory" msgstr "Histórico del workflow." msgid "contentnavigation_wfhistory_description" msgstr "" -"Sección que ofrece el reporte histórico del workflow para las entidades que " -"posean un workflow." +"Sección que muestra el reporte histórico de las transiciones del workflow. " +"Aplica solo en entidades con workflow." msgid "context" msgstr "Contexto" @@ -1503,6 +1573,10 @@ msgid "context where this component should be displayed" msgstr "Contexto en el cual el componente debe aparecer en el sistema" +msgid "context where this facet should be displayed, leave empty for both" +msgstr "" +"Contexto en el cual esta faceta debe ser mostrada, dejar vacia para ambos" + msgid "control subject entity's relations order" msgstr "Controla el orden de relaciones de la entidad sujeto" @@ -1519,13 +1593,13 @@ msgstr "" "Relación sistema que indica el(los) propietario(s) de una entidad. Esta " "relación pone de manera implícita al propietario en el grupo de propietarios " -"de una entidad" +"de una entidad." msgid "core relation indicating the original creator of an entity" msgstr "Relación sistema que indica el creador de una entidad." msgid "core relation indicating the type of an entity" -msgstr "Relación sistema que indica el tipo de entidad" +msgstr "Relación sistema que indica el tipo de entidad." msgid "" "core relation indicating the types (including specialized types) of an entity" @@ -1546,7 +1620,7 @@ msgstr "Crear una página de inicio" msgid "created on" -msgstr "Creado el" +msgstr "creado el" msgid "created_by" msgstr "creado por" @@ -1574,6 +1648,11 @@ msgid "creating CWRelation (CWRelation relation_type CWRType %(linkto)s)" msgstr "Creación de la relación %(linkto)s" +msgid "" +"creating CWUniqueTogetherConstraint (CWUniqueTogetherConstraint " +"constraint_of CWEType %(linkto)s)" +msgstr "" + msgid "creating CWUser (CWUser in_group CWGroup %(linkto)s)" msgstr "Creación de un usuario para agregar al grupo %(linkto)s" @@ -1582,12 +1661,13 @@ msgid "" "creating RQLExpression (CWAttribute %(linkto)s read_permission RQLExpression)" -msgstr "" +msgstr "creación de una expresión RQL por el derecho de lectura de %(linkto)s" msgid "" "creating RQLExpression (CWAttribute %(linkto)s update_permission " "RQLExpression)" msgstr "" +"creación de una expresión RQL por el derecho de actualización de %(linkto)s" msgid "" "creating RQLExpression (CWEType %(linkto)s add_permission RQLExpression)" @@ -1601,24 +1681,24 @@ msgid "" "creating RQLExpression (CWEType %(linkto)s read_permission RQLExpression)" -msgstr "Creación de una expresión RQL para la autorización de leer %(linkto)s" +msgstr "Creación de una expresión RQL para permitir leer %(linkto)s" msgid "" "creating RQLExpression (CWEType %(linkto)s update_permission RQLExpression)" -msgstr "Creación de una expresión RQL para autorizar actualizar %(linkto)s" +msgstr "Creación de una expresión RQL para permitir actualizar %(linkto)s" msgid "" "creating RQLExpression (CWRelation %(linkto)s add_permission RQLExpression)" -msgstr "" +msgstr "Creación de una expresión RQL para permitir agregar %(linkto)s" msgid "" "creating RQLExpression (CWRelation %(linkto)s delete_permission " "RQLExpression)" -msgstr "" +msgstr "Creación de una expresión RQL para permitir eliminar %(linkto)s" msgid "" "creating RQLExpression (CWRelation %(linkto)s read_permission RQLExpression)" -msgstr "" +msgstr "Creación de una expresión RQL para permitir leer %(linkto)s" msgid "creating RQLExpression (Transition %(linkto)s condition RQLExpression)" msgstr "Creación de una expresión RQL para la transición %(linkto)s" @@ -1626,111 +1706,112 @@ msgid "" "creating RQLExpression (WorkflowTransition %(linkto)s condition " "RQLExpression)" -msgstr "" +msgstr "Creación de una expresión RQL para la transición Workflow %(linkto)s" msgid "creating State (State allowed_transition Transition %(linkto)s)" -msgstr "Creación de un estado que pueda ir hacia la transición %(linkto)s" +msgstr "Creación de un estado que puede ir hacia la transición %(linkto)s" msgid "creating State (State state_of Workflow %(linkto)s)" -msgstr "" +msgstr "Creando un Estado del Workflow" msgid "creating State (Transition %(linkto)s destination_state State)" -msgstr "Creación de un estado destinación de la transición %(linkto)s" +msgstr "Creación de un Estado Destinación de la Transición %(linkto)s" msgid "" "creating SubWorkflowExitPoint (WorkflowTransition %(linkto)s " "subworkflow_exit SubWorkflowExitPoint)" -msgstr "" +msgstr "creación de un punto de Salida de la Transición Workflow %(linkto)s" msgid "creating Transition (State %(linkto)s allowed_transition Transition)" -msgstr "Creación de una transición autorizada desde el estado %(linkto)s" +msgstr "Creación de una transición autorizada desde el Estado %(linkto)s" msgid "creating Transition (Transition destination_state State %(linkto)s)" -msgstr "Creación de un transición hacia el estado %(linkto)s" +msgstr "Creación de un transición hacia el Estado %(linkto)s" msgid "creating Transition (Transition transition_of Workflow %(linkto)s)" -msgstr "" +msgstr "Creación de una Transición Workflow %(linkto)s" msgid "" "creating WorkflowTransition (State %(linkto)s allowed_transition " "WorkflowTransition)" msgstr "" +"Creación de una Transición Workflow permitida desde el estado %(linkto)s" msgid "" -"creating WorkflowTransition (WorkflowTransition transition_of Workflow %" -"(linkto)s)" -msgstr "" +"creating WorkflowTransition (WorkflowTransition transition_of Workflow " +"%(linkto)s)" +msgstr "Creación de una Transición Workflow del Workflow %(linkto)s" msgid "creation" msgstr "Creación" msgid "creation date" -msgstr "" +msgstr "Fecha de Creación" msgid "creation time of an entity" msgstr "Fecha de creación de una entidad" msgid "creation_date" -msgstr "fecha de creación" +msgstr "Fecha de Creación" msgid "cstrtype" -msgstr "Tipo de condición" +msgstr "Tipo de restricción" msgctxt "CWConstraint" msgid "cstrtype" -msgstr "" +msgstr "Tipo" + +msgid "cstrtype_object" +msgstr "utilizado por" msgctxt "CWConstraintType" msgid "cstrtype_object" -msgstr "" - -msgid "cstrtype_object" -msgstr "utilizado por" +msgstr "Tipo de restricciones" msgid "csv entities export" msgstr "Exportar entidades en csv" msgid "csv export" -msgstr "Exportar CSV" +msgstr "Exportar en CSV" msgid "ctxtoolbar" -msgstr "" +msgstr "Barra de herramientas" msgid "custom_workflow" -msgstr "" +msgstr "Workflow específico" msgid "custom_workflow_object" -msgstr "" +msgstr "Workflow de" msgid "cwetype-box" -msgstr "" +msgstr "Vista \"caja\"" msgid "cwetype-description" -msgstr "" +msgstr "Descripción" msgid "cwetype-permissions" -msgstr "" +msgstr "Permisos" msgid "cwetype-views" -msgstr "" +msgstr "Vistas" msgid "cwetype-workflow" -msgstr "" +msgstr "Workflow" msgid "cwgroup-main" -msgstr "" +msgstr "Descripción" msgid "cwgroup-permissions" -msgstr "" +msgstr "Permisos" msgid "cwrtype-description" -msgstr "" +msgstr "Descripción" msgid "cwrtype-permissions" -msgstr "" +msgstr "Permisos" msgid "cwuri" -msgstr "" +msgstr "Uri Interna" msgid "data directory url" msgstr "Url del repertorio de datos" @@ -1751,37 +1832,38 @@ msgstr "Valor por defecto" msgid "default text format for rich text fields." -msgstr "Formato de texto como opción por defecto para los campos texto" +msgstr "" +"Formato de texto que se utilizará por defecto para los campos de tipo texto" msgid "default user workflow" -msgstr "" +msgstr "Workflow por defecto de los usuarios" msgid "default value" -msgstr "" +msgstr "Valor por defecto" msgid "default workflow for an entity type" -msgstr "" +msgstr "Workflow por defecto para un tipo de entidad" msgid "default_workflow" -msgstr "" +msgstr "Workflow por defecto" msgctxt "CWEType" msgid "default_workflow" -msgstr "" +msgstr "Workflow por defecto" + +msgid "default_workflow_object" +msgstr "Workflow por defecto de" msgctxt "Workflow" msgid "default_workflow_object" -msgstr "" - -msgid "default_workflow_object" -msgstr "" +msgstr "Workflow por defecto de" msgid "defaultval" msgstr "Valor por defecto" msgctxt "CWAttribute" msgid "defaultval" -msgstr "" +msgstr "Valor por defecto" msgid "define a CubicWeb user" msgstr "Define un usuario CubicWeb" @@ -1793,14 +1875,21 @@ "define a final relation: link a final relation type from a non final entity " "to a final entity type. used to build the instance schema" msgstr "" +"Define una relación final: liga un tipo de relación final desde una entidad " +"NO final hacia un tipo de entidad final. Se usa para crear el esquema de la " +"instancia." msgid "" "define a non final relation: link a non final relation type from a non final " "entity to a non final entity type. used to build the instance schema" msgstr "" +"Define una relación NO final: liga un tipo de relación NO final desde una " +"entidad NO final hacia un tipo de entidad NO final. Se usa para crear el " +"esquema de la instancia." msgid "define a relation type, used to build the instance schema" msgstr "" +"Define un tipo de relación, usado para construir el esquema de la instancia." msgid "define a rql expression used to define permissions" msgstr "Expresión RQL utilizada para definir los derechos de acceso" @@ -1813,16 +1902,20 @@ msgid "define an entity type, used to build the instance schema" msgstr "" +"Define un tipo de entidad, usado para construir el esquema de la instancia." msgid "define how we get out from a sub-workflow" +msgstr "Define como salir de un sub-Workflow" + +msgid "defines a sql-level multicolumn unique index" msgstr "" msgid "" "defines what's the property is applied for. You must select this first to be " "able to set value" msgstr "" -"Define a que se aplica la propiedad . Usted debe seleccionar esto antes de " -"poder fijar un valor" +"Define a que se aplica la propiedad . Debe de seleccionar esto antes de " +"establecer un valor" msgid "delete" msgstr "Eliminar" @@ -1836,30 +1929,27 @@ msgid "delete this relation" msgstr "Eliminar esta relación" -msgid "delete_perm" -msgstr "Eliminar" - msgid "delete_permission" -msgstr "Autorización de eliminar" +msgstr "Permiso de eliminar" msgctxt "CWEType" msgid "delete_permission" -msgstr "" +msgstr "Permiso de eliminar" msgctxt "CWRelation" msgid "delete_permission" -msgstr "" +msgstr "Permiso de eliminar" + +msgid "delete_permission_object" +msgstr "posee permiso para eliminar" msgctxt "CWGroup" msgid "delete_permission_object" -msgstr "" +msgstr "puede eliminar" msgctxt "RQLExpression" msgid "delete_permission_object" -msgstr "" - -msgid "delete_permission_object" -msgstr "posee la autorización de eliminar" +msgstr "puede eliminar" #, python-format msgid "deleted %(etype)s #%(eid)s (%(title)s)" @@ -1867,96 +1957,100 @@ #, python-format msgid "" -"deleted relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #%" -"(eidto)s" +"deleted relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #" +"%(eidto)s" msgstr "" +"La relación %(rtype)s de %(frometype)s #%(eidfrom)s a %(toetype)s #%(eidto)s " +"ha sido suprimida." msgid "depends on the constraint type" -msgstr "Depende del tipo de condición" - +msgstr "Depende del tipo de restricción" + +msgid "description" +msgstr "Descripción" + +msgctxt "BaseTransition" +msgid "description" +msgstr "Descripción" + +msgctxt "CWAttribute" msgid "description" msgstr "Descripción" msgctxt "CWEType" msgid "description" -msgstr "" +msgstr "Descripción" + +msgctxt "CWRType" +msgid "description" +msgstr "Descripción" msgctxt "CWRelation" msgid "description" -msgstr "" - -msgctxt "Workflow" +msgstr "Descripción" + +msgctxt "State" msgid "description" -msgstr "" - -msgctxt "CWAttribute" -msgid "description" -msgstr "" +msgstr "Descripción" msgctxt "Transition" msgid "description" -msgstr "" +msgstr "Descripción" + +msgctxt "Workflow" +msgid "description" +msgstr "Descripción" msgctxt "WorkflowTransition" msgid "description" -msgstr "" - -msgctxt "State" -msgid "description" -msgstr "" - -msgctxt "CWRType" -msgid "description" -msgstr "" +msgstr "Descripción" + +msgid "description_format" +msgstr "Formato" msgctxt "BaseTransition" -msgid "description" -msgstr "" - +msgid "description_format" +msgstr "Formato" + +msgctxt "CWAttribute" msgid "description_format" msgstr "Formato" msgctxt "CWEType" msgid "description_format" -msgstr "" +msgstr "Formato" + +msgctxt "CWRType" +msgid "description_format" +msgstr "Formato" msgctxt "CWRelation" msgid "description_format" -msgstr "" +msgstr "Formato" + +msgctxt "State" +msgid "description_format" +msgstr "Formato" + +msgctxt "Transition" +msgid "description_format" +msgstr "Formato" msgctxt "Workflow" msgid "description_format" -msgstr "" - -msgctxt "CWAttribute" -msgid "description_format" -msgstr "" - -msgctxt "Transition" -msgid "description_format" -msgstr "" +msgstr "Formato" msgctxt "WorkflowTransition" msgid "description_format" -msgstr "" - -msgctxt "State" -msgid "description_format" -msgstr "" - -msgctxt "CWRType" -msgid "description_format" -msgstr "" - -msgctxt "BaseTransition" -msgid "description_format" -msgstr "" +msgstr "Formato" msgid "destination state for this transition" -msgstr "Estado destino para esta transición" +msgstr "Estados accesibles para esta transición" msgid "destination state must be in the same workflow as our parent transition" msgstr "" +"El estado de destino debe pertenecer al mismo Workflow que la transición " +"padre." msgid "destination state of a transition" msgstr "Estado destino de una transición" @@ -1965,24 +2059,27 @@ "destination state. No destination state means that transition should go back " "to the state from which we've entered the subworkflow." msgstr "" - +"Estado destino de la transición. Si el Estado destino no ha sido " +"especificado, la transición regresará hacia el estado que tenía la entidad " +"al entrar en el Sub-Workflow." + +msgid "destination_state" +msgstr "Estado destino" + +msgctxt "SubWorkflowExitPoint" msgid "destination_state" msgstr "Estado destino" msgctxt "Transition" msgid "destination_state" -msgstr "" - -msgctxt "SubWorkflowExitPoint" -msgid "destination_state" -msgstr "" +msgstr "Estado destino" + +msgid "destination_state_object" +msgstr "Destino de" msgctxt "State" msgid "destination_state_object" -msgstr "" - -msgid "destination_state_object" -msgstr "Destino de" +msgstr "Estado final de" msgid "detach attached file" msgstr "soltar el archivo existente" @@ -1993,11 +2090,17 @@ msgid "display order of the component" msgstr "Orden de aparición del componente" +msgid "display order of the facet" +msgstr "Orden de aparición de la faceta" + msgid "display the box or not" -msgstr "Mostrar la caja o no" +msgstr "Mostrar o no la caja" msgid "display the component or not" -msgstr "Mostrar el componente o no" +msgstr "Mostrar o no el componente" + +msgid "display the facet or not" +msgstr "Mostrar o no la faceta" msgid "" "distinct label to distinguate between other permission entity of the same " @@ -2011,19 +2114,19 @@ #, python-format msgid "download %s" -msgstr "" +msgstr "Descargar %s" msgid "download icon" msgstr "ícono de descarga" msgid "download schema as owl" -msgstr "Descargar esquema en OWL" +msgstr "Descargar esquema en formato OWL" msgid "edit bookmarks" msgstr "Editar favoritos" msgid "edit canceled" -msgstr "" +msgstr "Edición cancelada" msgid "edit the index page" msgstr "Modificar la página de inicio" @@ -2031,9 +2134,6 @@ msgid "editable-table" msgstr "Tabla modificable" -msgid "edition" -msgstr "Edición" - msgid "eid" msgstr "eid" @@ -2044,7 +2144,10 @@ msgstr "Mensajes enviados con éxito" msgid "embed" -msgstr "Incrustrado" +msgstr "Incrustado" + +msgid "embedded html" +msgstr "Html incrustado" msgid "embedding this url is forbidden" msgstr "La inclusión de este url esta prohibida" @@ -2053,28 +2156,28 @@ msgstr "Entidades eliminadas" msgid "entity copied" -msgstr "entidad copiada" +msgstr "Entidad copiada" msgid "entity created" -msgstr "entidad creada" +msgstr "Entidad creada" msgid "entity creation" -msgstr "" +msgstr "Creación de entidad" msgid "entity deleted" msgstr "Entidad eliminada" msgid "entity deletion" -msgstr "" +msgstr "Eliminación de entidad" msgid "entity edited" -msgstr "entidad modificada" +msgstr "Entidad modificada" msgid "entity has no workflow set" -msgstr "" +msgstr "La entidad no tiene Workflow" msgid "entity linked" -msgstr "entidad asociada" +msgstr "Entidad asociada" msgid "entity type" msgstr "Tipo de entidad" @@ -2087,10 +2190,10 @@ "avanzada" msgid "entity types which may use this workflow" -msgstr "" +msgstr "Tipos de entidades que pueden utilizar este Workflow" msgid "entity update" -msgstr "" +msgstr "Actualización de la Entidad" msgid "error while embedding page" msgstr "Error durante la inclusión de la página" @@ -2101,7 +2204,7 @@ msgid "error while publishing ReST text" msgstr "" -"Se ha producido un error durante la interpretación del texto en formatoReST" +"Se ha producido un error durante la interpretación del texto en formato ReST" #, python-format msgid "error while querying source %s, some data may be missing" @@ -2110,20 +2213,20 @@ "datos visibles se encuentren incompletos" msgid "eta_date" -msgstr "fecha de fin" - -msgid "exit state must a subworkflow state" -msgstr "" +msgstr "Fecha de fin" + +msgid "exit state must be a subworkflow state" +msgstr "El estado de salida debe de ser un estado del Sub-Workflow" msgid "exit_point" -msgstr "" +msgstr "Estado de Salida" msgid "exit_point_object" -msgstr "" +msgstr "Estado de Salida de" #, python-format msgid "exiting from subworkflow %s" -msgstr "" +msgstr "Salida del subworkflow %s" msgid "expected:" msgstr "Previsto :" @@ -2133,14 +2236,14 @@ msgctxt "RQLExpression" msgid "expression" -msgstr "" +msgstr "RQL de la expresión" msgid "exprtype" msgstr "Tipo de la expresión" msgctxt "RQLExpression" msgid "exprtype" -msgstr "" +msgstr "Tipo" msgid "external page" msgstr "Página externa" @@ -2149,71 +2252,71 @@ msgstr "Caja de facetas" msgid "facets_created_by-facet" -msgstr "faceta \"creada por\"" +msgstr "Faceta \"creada por\"" msgid "facets_created_by-facet_description" -msgstr "faceta creado por" +msgstr "Faceta creada por" msgid "facets_cwfinal-facet" -msgstr "faceta \"final\"" +msgstr "Faceta \"final\"" msgid "facets_cwfinal-facet_description" -msgstr "faceta para las entidades \"finales\"" +msgstr "Faceta para las entidades \"finales\"" msgid "facets_etype-facet" -msgstr "faceta \"es de tipo\"" +msgstr "Faceta \"es de tipo\"" msgid "facets_etype-facet_description" -msgstr "faceta es de tipo" +msgstr "Faceta es de tipo" msgid "facets_has_text-facet" -msgstr "faceta \"contiene el texto\"" +msgstr "Faceta \"contiene el texto\"" msgid "facets_has_text-facet_description" -msgstr "faceta contiene el texto" +msgstr "Faceta contiene el texto" msgid "facets_in_group-facet" -msgstr "faceta \"forma parte del grupo\"" +msgstr "Faceta \"forma parte del grupo\"" msgid "facets_in_group-facet_description" -msgstr "faceta en grupo" +msgstr "Faceta en grupo" msgid "facets_in_state-facet" -msgstr "faceta \"en el estado\"" +msgstr "Faceta \"en el estado\"" msgid "facets_in_state-facet_description" -msgstr "faceta en el estado" +msgstr "Faceta en el estado" #, python-format msgid "failed to uniquify path (%s, %s)" -msgstr "" +msgstr "No se pudo obtener un dato único (%s, %s)" msgid "february" msgstr "Febrero" msgid "file tree view" -msgstr "File Vista Arborescencia" +msgstr "Arborescencia (archivos)" msgid "final" msgstr "Final" msgctxt "CWEType" msgid "final" -msgstr "" +msgstr "Final" msgctxt "CWRType" msgid "final" -msgstr "" +msgstr "Final" msgid "first name" -msgstr "" +msgstr "Nombre" msgid "firstname" msgstr "Nombre" msgctxt "CWUser" msgid "firstname" -msgstr "" +msgstr "Nombre" msgid "foaf" msgstr "Amigo de un Amigo, FOAF" @@ -2223,24 +2326,24 @@ #, python-format msgid "follow this link for more information on this %s" -msgstr "" +msgstr "Seleccione esta liga para obtener mayor información sobre %s" msgid "follow this link if javascript is deactivated" -msgstr "" +msgstr "Seleccione esta liga si javascript esta desactivado" msgid "for_user" msgstr "Para el usuario" msgctxt "CWProperty" msgid "for_user" -msgstr "" +msgstr "Propiedad del Usuario" + +msgid "for_user_object" +msgstr "Utiliza las propiedades" msgctxt "CWUser" msgid "for_user_object" -msgstr "" - -msgid "for_user_object" -msgstr "Utiliza las propiedades" +msgstr "Tiene como preferencia" msgid "friday" msgstr "Viernes" @@ -2257,35 +2360,35 @@ msgctxt "CWAttribute" msgid "from_entity" -msgstr "" +msgstr "Atributo de la entidad" msgctxt "CWRelation" msgid "from_entity" -msgstr "" - -msgctxt "CWEType" -msgid "from_entity_object" -msgstr "" +msgstr "Relación de la entidad" msgid "from_entity_object" msgstr "Relación sujeto" +msgctxt "CWEType" +msgid "from_entity_object" +msgstr "Entidad de" + msgid "from_interval_start" -msgstr "" +msgstr "De" msgid "from_state" -msgstr "De el estado" +msgstr "Del Estado" msgctxt "TrInfo" msgid "from_state" -msgstr "" +msgstr "Estado de Inicio" + +msgid "from_state_object" +msgstr "Transiciones desde este estado" msgctxt "State" msgid "from_state_object" -msgstr "" - -msgid "from_state_object" -msgstr "Transiciones desde este estado" +msgstr "Estado de Inicio de" msgid "full text or RQL query" msgstr "Texto de búsqueda o demanda RQL" @@ -2295,25 +2398,27 @@ msgctxt "CWRType" msgid "fulltext_container" -msgstr "" +msgstr "Objeto a indexar" msgid "fulltextindexed" msgstr "Indexación de texto" msgctxt "CWAttribute" msgid "fulltextindexed" -msgstr "" +msgstr "Texto indexado" msgid "generic plot" -msgstr "Trazado de curbas estándares" +msgstr "Gráfica Genérica" msgid "generic relation to link one entity to another" -msgstr "relación generica para ligar entidades" +msgstr "Relación genérica para ligar entidades" msgid "" "generic relation to specify that an external entity represent the same " "object as a local one: http://www.w3.org/TR/owl-ref/#sameAs-def" msgstr "" +"Relación genérica que indicar que una entidad es idéntica a otro recurso web " +"(ver http://www.w3.org/TR/owl-ref/#sameAs-def)." msgid "go back to the index page" msgstr "Regresar a la página de inicio" @@ -2321,25 +2426,34 @@ msgid "granted to groups" msgstr "Otorgado a los grupos" -msgid "graphical representation of the instance'schema" -msgstr "" +#, python-format +msgid "graphical representation of %(appid)s data model" +msgstr "Representación gráfica del modelo de datos de %(appid)s" #, python-format -msgid "graphical schema for %s" -msgstr "Gráfica del esquema por %s" +msgid "" +"graphical representation of the %(etype)s entity type from %(appid)s data " +"model" +msgstr "" +"Representación gráfica del modelo de datos para el tipo de entidad %(etype)s " +"de %(appid)s" #, python-format -msgid "graphical workflow for %s" -msgstr "Gráfica del workflow por %s" +msgid "" +"graphical representation of the %(rtype)s relation type from %(appid)s data " +"model" +msgstr "" +"Representación gráfica del modelo de datos para el tipo de relación " +"%(rtype)s de %(appid)s" msgid "group in which a user should be to be allowed to pass this transition" -msgstr "Grupo en el cual el usuario debe estar para poder pasar la transición" +msgstr "Grupo en el cual el usuario debe estar lograr la transición" msgid "groups" msgstr "Grupos" msgid "groups grant permissions to the user" -msgstr "Los grupos otorgan las autorizaciones al usuario" +msgstr "Los grupos otorgan los permisos al usuario" msgid "groups to which the permission is granted" msgstr "Grupos quienes tienen otorgada esta autorización" @@ -2363,42 +2477,43 @@ "how to format date and time in the ui (\"man strftime\" for format " "description)" msgstr "" -"Como formatear la fecha en la interface (\"man strftime\" por la descripción " -"del formato)" +"Formato de fecha y hora que se utilizará por defecto en la interfaz (\"man " +"strftime\" para mayor información del formato)" msgid "how to format date in the ui (\"man strftime\" for format description)" msgstr "" -"Como formatear la fecha en la interface (\"man strftime\" por la descripción " -"del formato)" +"Formato de fecha que se utilizará por defecto en la interfaz (\"man strftime" +"\" para mayor información del formato)" msgid "how to format float numbers in the ui" -msgstr "Como formatear los números flotantes en la interface" +msgstr "" +"Formato de números flotantes que se utilizará por defecto en la interfaz" msgid "how to format time in the ui (\"man strftime\" for format description)" msgstr "" -"Como formatear la hora en la interface (\"man strftime\" por la descripción " -"del formato)" +"Formato de hora que se utilizará por defecto en la interfaz (\"man strftime" +"\" para mayor información del formato)" msgid "i18n_bookmark_url_fqs" -msgstr "" +msgstr "Parámetros" msgid "i18n_bookmark_url_path" -msgstr "" +msgstr "Ruta" msgid "i18n_login_popup" msgstr "Identificarse" msgid "i18ncard_*" -msgstr "" +msgstr "0..n" msgid "i18ncard_+" -msgstr "" +msgstr "1..n" msgid "i18ncard_1" -msgstr "" +msgstr "1" msgid "i18ncard_?" -msgstr "" +msgstr "0..1" msgid "i18nprevnext_next" msgstr "Siguiente" @@ -2416,7 +2531,7 @@ msgstr "ID del template principal" msgid "identical to" -msgstr "" +msgstr "Idéntico a" msgid "identical_to" msgstr "idéntico a" @@ -2432,7 +2547,7 @@ "entity (the container)." msgstr "" "Si el texto indexado de la entidad sujeto/objeto debe ser agregado a la " -"entidad a el otro extremo de la relación (el contenedor)." +"entidad al otro extremo de la relación (el contenedor)." msgid "image" msgstr "Imagen" @@ -2442,26 +2557,26 @@ msgctxt "CWUser" msgid "in_group" -msgstr "" +msgstr "Forma parte del grupo" + +msgid "in_group_object" +msgstr "Miembros" msgctxt "CWGroup" msgid "in_group_object" -msgstr "" - -msgid "in_group_object" -msgstr "Miembros" +msgstr "Contiene los usuarios" msgid "in_state" -msgstr "estado" +msgstr "Estado" msgid "in_state_object" -msgstr "estado de" +msgstr "Estado de" msgid "incontext" msgstr "En el contexto" msgid "incorrect captcha value" -msgstr "" +msgstr "Valor del Captcha incorrecto" #, python-format msgid "incorrect value (%(value)s) for type \"%(type)s\"" @@ -2475,7 +2590,7 @@ msgctxt "CWAttribute" msgid "indexed" -msgstr "" +msgstr "Indexado" msgid "indicate the current state of an entity" msgstr "Indica el estado actual de una entidad" @@ -2487,51 +2602,51 @@ "Indica cual estado deberá ser utilizado por defecto al crear una entidad" msgid "info" -msgstr "" +msgstr "Información del Sistema" #, python-format msgid "initial estimation %s" msgstr "Estimación inicial %s" msgid "initial state for this workflow" -msgstr "" +msgstr "Estado inicial para este Workflow" msgid "initial_state" -msgstr "estado inicial" +msgstr "Estado inicial" msgctxt "Workflow" msgid "initial_state" -msgstr "" +msgstr "Estado inicial" + +msgid "initial_state_object" +msgstr "Estado inicial de" msgctxt "State" msgid "initial_state_object" -msgstr "" - -msgid "initial_state_object" -msgstr "es el estado inicial de" +msgstr "Estado inicial de" msgid "inlined" -msgstr "Puesto en línea" +msgstr "Inlined" msgctxt "CWRType" msgid "inlined" -msgstr "" +msgstr "Inlined" msgid "instance home" -msgstr "" +msgstr "Repertorio de la Instancia" msgid "instance schema" -msgstr "" +msgstr "Esquema de la Instancia" msgid "internal entity uri" -msgstr "" +msgstr "Uri Interna" msgid "internationalizable" msgstr "Internacionalizable" msgctxt "CWAttribute" msgid "internationalizable" -msgstr "" +msgstr "Internacionalizable" #, python-format msgid "invalid action %r" @@ -2539,7 +2654,7 @@ #, python-format msgid "invalid value %(value)s, it must be one of %(choices)s" -msgstr "" +msgstr "Valor %(value)s incorrecto, debe estar entre %(choices)s" msgid "is" msgstr "es" @@ -2567,7 +2682,7 @@ "is this relation physically inlined? you should know what you're doing if " "you are changing this!" msgstr "" -"Es esta relación puesta en línea en la base de datos ? Usted debe saber lo " +"Es esta relación estilo INLINED en la base de datos ? Usted debe saber lo " "que hace si cambia esto !" msgid "is_instance_of" @@ -2593,35 +2708,35 @@ msgctxt "CWPermission" msgid "label" -msgstr "" +msgstr "Etiqueta" msgid "language of the user interface" -msgstr "Idioma para la interface del usuario" +msgstr "Idioma que se utilizará por defecto en la interfaz usuario" msgid "last connection date" -msgstr "Ultima fecha de conexión" +msgstr "Ultima conexión" msgid "last login time" -msgstr "" +msgstr "Ultima conexión" msgid "last name" -msgstr "" +msgstr "Apellido" msgid "last usage" -msgstr "" +msgstr "Ultimo uso" msgid "last_login_time" msgstr "Ultima fecha de conexión" msgctxt "CWUser" msgid "last_login_time" -msgstr "" +msgstr "Ultima conexión" msgid "latest modification time of an entity" msgstr "Fecha de la última modificación de una entidad " msgid "latest update on" -msgstr "actualizado el" +msgstr "Actualizado el" msgid "left" msgstr "izquierda" @@ -2630,56 +2745,56 @@ "link a permission to the entity. This permission should be used in the " "security definition of the entity's type to be useful." msgstr "" -"relaciónar una autorización con la entidad. Este autorización debe ser usada " -"en la definición de la entidad para ser utíl." +"Relacionar un permiso con la entidad. Este permiso debe ser integrado en la " +"definición de seguridad de la entidad para poder ser utilizado." msgid "" "link a property to the user which want this property customization. Unless " "you're a site manager, this relation will be handled automatically." msgstr "" -"Liga una propiedad a el usuario que desea esta personalización. Salvo que " -"usted sea un administrador del sistema, esta relación es gestionada " -"automáticamente." +"Liga una propiedad al usuario que desea esta personalización. Salvo que " +"usted sea un administrador del sistema, esta relación será administrada de " +"forma automática." msgid "link a relation definition to its object entity type" -msgstr "liga una definición de relación a su tipo de entidad objeto" +msgstr "Liga una definición de relación a su tipo de entidad objeto" msgid "link a relation definition to its relation type" -msgstr "liga una definición de relación a su tipo de relación" +msgstr "Liga una definición de relación a su tipo de relación" msgid "link a relation definition to its subject entity type" -msgstr "liga una definición de relación a su tipo de entidad" +msgstr "Liga una definición de relación a su tipo de entidad" msgid "link a state to one or more workflow" -msgstr "" +msgstr "Liga un estado a uno o más Workflow" msgid "link a transition information to its object" -msgstr "liga una transcion de informacion a los objetos asociados" +msgstr "Liga una transición de informacion hacia los objetos asociados" msgid "link a transition to one or more workflow" -msgstr "" +msgstr "Liga una transición a uno o más Workflow" msgid "link a workflow to one or more entity type" -msgstr "" +msgstr "Liga un Workflow a uno a más tipos de entidad" msgid "list" msgstr "Lista" msgid "log in" -msgstr "Identificarse" +msgstr "Acceder" msgid "log out first" -msgstr "" +msgstr "Desconéctese primero" msgid "login" -msgstr "Clave de acesso" +msgstr "Usuario" msgctxt "CWUser" msgid "login" -msgstr "" +msgstr "Usuario" msgid "login or email" -msgstr "Clave de acesso o dirección de correo" +msgstr "Usuario o dirección de correo" msgid "login_action" msgstr "Ingresa tus datos" @@ -2695,144 +2810,147 @@ msgstr "Informaciones Generales" msgid "mainvars" -msgstr "Principales variables" +msgstr "Variables principales" msgctxt "RQLExpression" msgid "mainvars" -msgstr "" +msgstr "Variables principales" msgid "manage" -msgstr "Administracion del Sitio" +msgstr "Administración Sistema" msgid "manage bookmarks" -msgstr "Administra tus favoritos" +msgstr "Gestión de favoritos" msgid "manage permissions" -msgstr "Administración de Autorizaciones" +msgstr "Gestión de permisos" msgid "manage security" -msgstr "Administración de la Seguridad" +msgstr "Gestión de seguridad" msgid "managers" -msgstr "editores" +msgstr "Administradores" msgid "mandatory relation" -msgstr "" +msgstr "Relación obligatoria" msgid "march" msgstr "Marzo" msgid "maximum number of characters in short description" -msgstr "Numero maximo de caracteres en las descripciones cortas" +msgstr "Máximo de caracteres en las descripciones cortas" msgid "maximum number of entities to display in related combo box" -msgstr "Numero maximo de entidades a mostrar en las listas dinamicas" +msgstr "Máximo de entidades a mostrar en las listas dinámicas" msgid "maximum number of objects displayed by page of results" -msgstr "Numero maximo de objetos mostrados por pagina de resultados" +msgstr "Máximo de elementos mostrados por página de resultados" msgid "maximum number of related entities to display in the primary view" -msgstr "Numero maximo de entidades ligadas a mostrar en la vista primaria" +msgstr "Máximo de entidades relacionadas a mostrar en la vista primaria" msgid "may" msgstr "Mayo" msgid "memory leak debugging" -msgstr "" +msgstr "depuración (debugging) de fuga de memoria" msgid "milestone" msgstr "Milestone" #, python-format msgid "missing parameters for entity %s" -msgstr "Parametros faltantes a la entidad %s" +msgstr "Parámetros faltantes a la entidad %s" + +msgid "modification" +msgstr "" msgid "modification_date" -msgstr "Fecha de modificacion" +msgstr "Fecha de modificación" msgid "modify" msgstr "Modificar" msgid "monday" -msgstr "Lundi" +msgstr "Lunes" msgid "more actions" -msgstr "mas acciones" +msgstr "Más acciones" msgid "more info about this workflow" -msgstr "" +msgstr "Más información acerca de este workflow" msgid "multiple edit" -msgstr "Edicion multiple" +msgstr "Edición multiple" msgid "my custom search" -msgstr "Mi busqueda personalizada" - +msgstr "Mi búsqueda personalizada" + +msgid "name" +msgstr "Nombre" + +msgctxt "BaseTransition" +msgid "name" +msgstr "Nombre" + +msgctxt "CWCache" +msgid "name" +msgstr "Nombre" + +msgctxt "CWConstraintType" msgid "name" msgstr "Nombre" msgctxt "CWEType" msgid "name" -msgstr "" +msgstr "Nombre" + +msgctxt "CWGroup" +msgid "name" +msgstr "Nombre" + +msgctxt "CWPermission" +msgid "name" +msgstr "Nombre" + +msgctxt "CWRType" +msgid "name" +msgstr "Nombre" + +msgctxt "State" +msgid "name" +msgstr "Nombre" msgctxt "Transition" msgid "name" -msgstr "" +msgstr "Nombre" msgctxt "Workflow" msgid "name" -msgstr "" - -msgctxt "CWGroup" -msgid "name" -msgstr "" - -msgctxt "CWConstraintType" -msgid "name" -msgstr "" +msgstr "Nombre" msgctxt "WorkflowTransition" msgid "name" -msgstr "" - -msgctxt "State" -msgid "name" -msgstr "" - -msgctxt "CWPermission" -msgid "name" -msgstr "" - -msgctxt "CWRType" -msgid "name" -msgstr "" - -msgctxt "BaseTransition" -msgid "name" -msgstr "" - -msgctxt "CWCache" -msgid "name" -msgstr "" +msgstr "Nombre" msgid "name of the cache" -msgstr "Nombre del Cache" +msgstr "Nombre del Caché" msgid "" "name of the main variables which should be used in the selection if " "necessary (comma separated)" msgstr "" -"Nombre de las variables principales que deberian se utilizadas en la " -"selecciónde ser necesario (separarlas con comas)" +"Nombre de las variables principales que deberían ser utilizadas en la " +"selección de ser necesario (separarlas con comas)" msgid "name or identifier of the permission" -msgstr "Nombre o indentificador de la autorización" +msgstr "Nombre o identificador del permiso" msgid "navbottom" -msgstr "Pie de pagina" +msgstr "Pie de página" msgid "navcontentbottom" -msgstr "Pie de pagina del contenido principal" +msgstr "Pie de página del contenido principal" msgid "navcontenttop" msgstr "Encabezado" @@ -2841,21 +2959,17 @@ msgstr "Navegación" msgid "navigation.combobox-limit" -msgstr "" - -# msgstr "Navegación: numero maximo de elementos en una caja de elección (combobox)" +msgstr "ComboBox" + msgid "navigation.page-size" -msgstr "" - -# msgstr "Navegación: numero maximo de elementos por pagina" +msgstr "Paginación" + msgid "navigation.related-limit" -msgstr "" - -# msgstr "Navegación: numero maximo de elementos relacionados" +msgstr "Entidades relacionadas" + msgid "navigation.short-line-size" -msgstr "" - -# msgstr "Navegación: numero maximo de caracteres en una linéa corta" +msgstr "Descripción corta" + msgid "navtop" msgstr "Encabezado del contenido principal" @@ -2866,331 +2980,349 @@ msgstr "Siguientes resultados" msgid "no" -msgstr "no" +msgstr "No" msgid "no associated permissions" -msgstr "no autorización relacionada" +msgstr "No existe permiso asociado" #, python-format msgid "no edited fields specified for entity %s" -msgstr "" +msgstr "Ningún campo editable especificado para la entidad %s" + +msgid "no related entity" +msgstr "No posee entidad asociada" msgid "no related project" -msgstr "no hay proyecto relacionado" +msgstr "No tiene proyecto relacionado" msgid "no repository sessions found" -msgstr "" +msgstr "Ninguna sesión encontrada" msgid "no selected entities" -msgstr "no hay entidades seleccionadas" +msgstr "No hay entidades seleccionadas" #, python-format msgid "no such entity type %s" -msgstr "el tipo de entidad '%s' no existe" +msgstr "El tipo de entidad '%s' no existe" msgid "no version information" -msgstr "no información de version" +msgstr "No existe la información de version" msgid "no web sessions found" -msgstr "" +msgstr "Ninguna sesión web encontrada" msgid "normal" -msgstr "" +msgstr "Normal" msgid "not authorized" -msgstr "no autorizado" +msgstr "No autorizado" msgid "not selected" -msgstr "no seleccionado" +msgstr "No seleccionado" msgid "november" -msgstr "noviembre" +msgstr "Noviembre" msgid "object" -msgstr "objeto" +msgstr "Objeto" msgid "object type" -msgstr "" +msgstr "Tipo de Objeto" msgid "october" -msgstr "octubre" +msgstr "Octubre" msgid "one month" -msgstr "un mes" +msgstr "Un mes" msgid "one week" -msgstr "una semana" +msgstr "Una semana" msgid "oneline" -msgstr "una linea" +msgstr "En una línea" msgid "only select queries are authorized" -msgstr "solo estan permitidas consultas de lectura" +msgstr "Solo están permitidas consultas de lectura" msgid "open all" -msgstr "abrir todos" +msgstr "Abrir todos" msgid "opened sessions" -msgstr "" +msgstr "Sesiones abiertas" msgid "opened web sessions" -msgstr "" +msgstr "Sesiones Web abiertas" msgid "options" -msgstr "" +msgstr "Opciones" msgid "order" -msgstr "orden" +msgstr "Orden" msgid "ordernum" -msgstr "orden" +msgstr "Orden" msgctxt "CWAttribute" msgid "ordernum" -msgstr "" +msgstr "Número de Orden" msgctxt "CWRelation" msgid "ordernum" -msgstr "" +msgstr "Número de Orden" msgid "owl" -msgstr "owl" +msgstr "OWL" msgid "owlabox" -msgstr "owlabox" +msgstr "OWLabox" msgid "owned_by" -msgstr "pertenece a" +msgstr "Pertenece a" msgid "owned_by_object" -msgstr "pertenece al objeto" +msgstr "Pertenece al objeto" msgid "owners" -msgstr "proprietarios" +msgstr "Proprietarios" msgid "ownership" -msgstr "pertenencia" +msgstr "Propiedad" msgid "ownerships have been changed" -msgstr "la pertenencia ha sido modificada" +msgstr "Derechos de propiedad modificados" msgid "pageid-not-found" -msgstr "pagina no encontrada." +msgstr "Página no encontrada." msgid "password" -msgstr "Clave de acceso" +msgstr "Contraseña" msgid "password and confirmation don't match" -msgstr "La clave de acceso y la confirmación no concuerdan" +msgstr "Su contraseña y confirmación no concuerdan" msgid "path" msgstr "Ruta" msgctxt "Bookmark" msgid "path" -msgstr "" +msgstr "Ruta" msgid "permission" msgstr "Permiso" msgid "permissions" -msgstr "" +msgstr "Permisos" msgid "permissions for this entity" msgstr "Permisos para esta entidad" -msgid "personnal informations" -msgstr "Información personal" - msgid "pick existing bookmarks" -msgstr "Seleccione los favoritos existentes" +msgstr "Seleccionar favoritos existentes" msgid "pkey" -msgstr "pkey" +msgstr "Clave" msgctxt "CWProperty" msgid "pkey" -msgstr "" +msgstr "Código de la Propiedad" msgid "please correct errors below" -msgstr "Favor de corregir errores" +msgstr "Por favor corregir los errores señalados en la parte inferior" msgid "please correct the following errors:" -msgstr "Favor de corregir los siguientes errores :" +msgstr "Por favor corregir los siguientes errores:" msgid "possible views" msgstr "Vistas posibles" msgid "powered by CubicWeb" -msgstr "" +msgstr "Potenciado en CubicWeb" msgid "prefered_form" -msgstr "" +msgstr "Forma preferida" msgctxt "EmailAddress" msgid "prefered_form" -msgstr "" +msgstr "Email principal" + +msgid "prefered_form_object" +msgstr "Formato preferido sobre" msgctxt "EmailAddress" msgid "prefered_form_object" -msgstr "" - -msgid "prefered_form_object" -msgstr "" +msgstr "Email principal de" msgid "preferences" msgstr "Preferencias" msgid "previous_results" -msgstr "Resultados anteriores" +msgstr "Resultados Anteriores" msgid "primary" msgstr "Primaria" msgid "primary_email" -msgstr "Dirección de email principal" +msgstr "Dirección principal de correo electrónico" msgctxt "CWUser" msgid "primary_email" -msgstr "" +msgstr "Dirección principal de correo electrónico" + +msgid "primary_email_object" +msgstr "Dirección de email principal (objeto)" msgctxt "EmailAddress" msgid "primary_email_object" +msgstr "Dirección principal de correo electrónico de" + +msgid "profile" msgstr "" -msgid "primary_email_object" -msgstr "Dirección de email principal (objeto)" - msgid "progress" -msgstr "Avance" +msgstr "Progreso" msgid "progress bar" -msgstr "Barra de progreso de avance" +msgstr "Barra de Progreso" msgid "project" msgstr "Proyecto" msgid "rdef-description" -msgstr "" +msgstr "Descripción" msgid "rdef-permissions" -msgstr "" +msgstr "Permisos" msgid "read" msgstr "Lectura" -msgid "read_perm" -msgstr "Lectura" - msgid "read_permission" msgstr "Permiso de lectura" +msgctxt "CWAttribute" +msgid "read_permission" +msgstr "Permiso de Lectura" + msgctxt "CWEType" msgid "read_permission" -msgstr "" - -msgctxt "CWAttribute" -msgid "read_permission" -msgstr "" +msgstr "Permiso de Lectura" msgctxt "CWRelation" msgid "read_permission" -msgstr "" +msgstr "Permiso de Lectura" + +msgid "read_permission_object" +msgstr "Tiene acceso de lectura a" msgctxt "CWGroup" msgid "read_permission_object" -msgstr "" +msgstr "Puede leer" msgctxt "RQLExpression" msgid "read_permission_object" -msgstr "" - -msgid "read_permission_object" -msgstr "Objeto_permiso_lectura" +msgstr "Puede leer" msgid "registry" -msgstr "" +msgstr "Registro" msgid "related entity has no state" -msgstr "" +msgstr "La entidad relacionada no posee Estado" msgid "related entity has no workflow set" -msgstr "" +msgstr "La entidad relacionada no posee Workflow definido" msgid "relation" -msgstr "" +msgstr "relación" #, python-format msgid "relation %(relname)s of %(ent)s" msgstr "relación %(relname)s de %(ent)s" msgid "relation add" -msgstr "" +msgstr "Agregar Relación" msgid "relation removal" -msgstr "" +msgstr "Eliminar Relación" msgid "relation_type" -msgstr "tipo de relación" +msgstr "Tipo de Relación" msgctxt "CWAttribute" msgid "relation_type" -msgstr "" +msgstr "Tipo de Relación" msgctxt "CWRelation" msgid "relation_type" -msgstr "" +msgstr "Tipo de Relación" + +msgid "relation_type_object" +msgstr "Definición de Relaciones" msgctxt "CWRType" msgid "relation_type_object" +msgstr "Definición de Relaciones" + +msgid "relations" msgstr "" -msgid "relation_type_object" -msgstr "Definición" +msgctxt "CWUniqueTogetherConstraint" +msgid "relations" +msgstr "" msgid "relations deleted" -msgstr "Relaciones eliminadas" +msgstr "Relaciones Eliminadas" + +msgid "relations_object" +msgstr "" + +msgctxt "CWAttribute" +msgid "relations_object" +msgstr "" + +msgctxt "CWRelation" +msgid "relations_object" +msgstr "" msgid "relative url of the bookmarked page" -msgstr "Url relativa de la pagina" +msgstr "Url relativa de la página" msgid "remove-inlined-entity-form" -msgstr "" +msgstr "Eliminar" msgid "require_group" -msgstr "Requiere grupo" +msgstr "Requiere el grupo" msgctxt "BaseTransition" msgid "require_group" -msgstr "" +msgstr "Restringida al Grupo" + +msgctxt "CWPermission" +msgid "require_group" +msgstr "Restringida al Grupo" msgctxt "Transition" msgid "require_group" -msgstr "" - -msgctxt "CWPermission" -msgid "require_group" -msgstr "" +msgstr "Restringida al Grupo" msgctxt "WorkflowTransition" msgid "require_group" -msgstr "" +msgstr "Restringida al Grupo" + +msgid "require_group_object" +msgstr "Posee derechos sobre" msgctxt "CWGroup" msgid "require_group_object" -msgstr "" - -msgid "require_group_object" -msgstr "Requerido por grupo" +msgstr "Posee derechos sobre" msgid "require_permission" -msgstr "Requiere autorización" +msgstr "Requiere Permisos" msgid "require_permission_object" msgstr "Requerido por autorización" msgid "required" -msgstr "" +msgstr "Requerido" msgid "required attribute" msgstr "Atributo requerido" @@ -3199,7 +3331,7 @@ msgstr "Campo requerido" msgid "resources usage" -msgstr "" +msgstr "Recursos utilizados" msgid "" "restriction part of a rql query. For entity rql expression, X and U are " @@ -3207,382 +3339,387 @@ "relation rql expression, S, O and U are predefined respectivly to the " "current relation'subject, object and to the request user. " msgstr "" -"restriction part of a rql query. For entity rql expression, X and U are " -"predefined respectivly to the current object and to the request user. For " -"relation rql expression, S, O and U are predefined respectivly to the " -"current relation'subject, object and to the request user. " +"Parte restrictiva de una consulta RQL. En una expresión ligada a una " +"entidad, X y U son respectivamente asignadas a la Entidad y el Usuario en " +"curso.En una expresión ligada a una relación, S, O y U son respectivamente " +"asignados al Sujeto/Objeto de la relación y al Usuario actual." msgid "revert changes" -msgstr "Revertir cambios" +msgstr "Anular modificación" msgid "right" msgstr "Derecha" msgid "rql expressions" -msgstr "expresiones rql" +msgstr "Expresiones RQL" msgid "rss" msgstr "RSS" msgid "same_as" -msgstr "" +msgstr "Idéntico a" msgid "sample format" -msgstr "ejemplo" +msgstr "Ejemplo" msgid "saturday" -msgstr "sabado" +msgstr "Sábado" msgid "schema entities" -msgstr "entidades del esquema" +msgstr "Entidades del esquema" msgid "schema's permissions definitions" -msgstr "definiciones de permisos del esquema" +msgstr "Definiciones de permisos del esquema" msgid "schema-diagram" -msgstr "" +msgstr "Gráfica" msgid "schema-entity-types" -msgstr "" +msgstr "Entidades" msgid "schema-relation-types" -msgstr "" +msgstr "Relaciones" msgid "schema-security" -msgstr "" +msgstr "Seguridad" msgid "search" -msgstr "buscar" +msgstr "Buscar" msgid "search for association" -msgstr "buscar por asociación" +msgstr "Búsqueda por asociación" msgid "searching for" -msgstr "buscando " +msgstr "Buscando" msgid "secondary" -msgstr "secundario" +msgstr "Secundaria" msgid "security" -msgstr "seguridad" +msgstr "Seguridad" msgid "see them all" msgstr "Ver todos" msgid "see_also" -msgstr "Ver tambíen" +msgstr "Ver además" msgid "select" msgstr "Seleccionar" msgid "select a" -msgstr "seleccione un" +msgstr "Seleccione un" msgid "select a key first" -msgstr "seleccione una clave" +msgstr "Seleccione una clave" msgid "select a relation" -msgstr "seleccione una relación" +msgstr "Seleccione una relación" msgid "select this entity" -msgstr "seleccionar esta entidad" +msgstr "Seleccionar esta entidad" msgid "selected" -msgstr "seleccionado" +msgstr "Seleccionado" msgid "semantic description of this attribute" -msgstr "descripción semantica de este atributo" +msgstr "Descripción semántica de este atributo" msgid "semantic description of this entity type" -msgstr "descripción semantica de este tipo de entidad" +msgstr "Descripción semántica de este tipo de entidad" msgid "semantic description of this relation" -msgstr "descripción semantica de esta relación" +msgstr "Descripción semántica de esta relación" msgid "semantic description of this relation type" -msgstr "descripción semantica de este tipo de relación" +msgstr "Descripción semántica de este tipo de relación" msgid "semantic description of this state" -msgstr "descripción semantica de este estado" +msgstr "Descripción semántica de este estado" msgid "semantic description of this transition" -msgstr "descripcion semantica de esta transición" +msgstr "Descripcion semántica de esta transición" msgid "semantic description of this workflow" -msgstr "" +msgstr "Descripcion semántica de este Workflow" msgid "send email" -msgstr "enviar email" +msgstr "Enviar email" msgid "september" -msgstr "septiembre" +msgstr "Septiembre" msgid "server information" -msgstr "server information" +msgstr "Información del servidor" msgid "" "should html fields being edited using fckeditor (a HTML WYSIWYG editor). " "You should also select text/html as default text format to actually get " "fckeditor." msgstr "" -"indique si los campos deberan ser editados usando fckeditor (un\n" -"editor HTML WYSIWYG). Debera tambien elegir text/html\n" -"como formato de texto por default para poder utilizar fckeditor." +"Indica si los campos de tipo texto deberán ser editados usando fckeditor " +"(un\n" +"editor HTML WYSIWYG). Deberá también elegir text/html\n" +"como formato de texto por defecto para poder utilizar fckeditor." #, python-format msgid "show %s results" -msgstr "mostrar %s resultados" +msgstr "Mostrar %s resultados" msgid "show advanced fields" -msgstr "mostrar campos avanzados" +msgstr "Mostrar campos avanzados" msgid "show filter form" -msgstr "afficher le filtre" +msgstr "Mostrar el Filtro" msgid "sioc" -msgstr "" +msgstr "SIOC" msgid "site configuration" -msgstr "configuracion del sitio" +msgstr "Configuración Sistema" msgid "site documentation" -msgstr "documentacion del sitio" +msgstr "Documentación Sistema" msgid "site schema" -msgstr "esquema del sitio" +msgstr "Esquema del Sistema" msgid "site title" -msgstr "titulo del sitio" +msgstr "Nombre del Sistema" msgid "site-wide property can't be set for user" -msgstr "" -"una propiedad especifica para el sitio no puede establecerse para el usuario" - -msgid "some errors occured:" -msgstr "" +msgstr "Una propiedad específica al Sistema no puede ser propia al usuario" + +msgid "some errors occurred:" +msgstr "Algunos errores encontrados :" msgid "some later transaction(s) touch entity, undo them first" msgstr "" +"Las transacciones más recientes modificaron esta entidad, anúlelas primero" msgid "sorry, the server is unable to handle this query" -msgstr "lo sentimos, el servidor no puede manejar esta consulta" +msgstr "Lo sentimos, el servidor no puede manejar esta consulta" msgid "sparql xml" -msgstr "" +msgstr "XML Sparql" msgid "special transition allowing to go through a sub-workflow" -msgstr "" +msgstr "Transición especial que permite ir en un Sub-Workflow" msgid "specializes" -msgstr "derivado de" +msgstr "Deriva de" msgctxt "CWEType" msgid "specializes" -msgstr "" +msgstr "Especializa" + +msgid "specializes_object" +msgstr "Especializado por" msgctxt "CWEType" msgid "specializes_object" -msgstr "" - -msgid "specializes_object" -msgstr "objeto_derivado" +msgstr "Especializado por" msgid "startup views" -msgstr "vistas de inicio" +msgstr "Vistas de inicio" msgid "state" -msgstr "estado" +msgstr "Estado" msgid "state and transition don't belong the the same workflow" -msgstr "" +msgstr "El Estado y la Transición no pertenecen al mismo Workflow" msgid "state doesn't apply to this entity's type" -msgstr "" +msgstr "Este Estado no aplica a este tipo de Entidad" msgid "state doesn't belong to entity's current workflow" -msgstr "" +msgstr "El Estado no pertenece al Workflow actual de la Entidad" msgid "state doesn't belong to entity's workflow" -msgstr "" +msgstr "El Estado no pertenece al Workflow de la Entidad" msgid "" "state doesn't belong to entity's workflow. You may want to set a custom " "workflow for this entity first." msgstr "" +"El Estado no pertenece al Workflow Actual de la Entidad. Usted deseaquizás " +"especificar que esta entidad debe utilizar este Workflow" msgid "state doesn't belong to this workflow" -msgstr "" +msgstr "El Estado no pertenece a este Workflow" msgid "state_of" -msgstr "estado_de" +msgstr "Estado de" msgctxt "State" msgid "state_of" -msgstr "" +msgstr "Estado de" + +msgid "state_of_object" +msgstr "Tiene por Estado" msgctxt "Workflow" msgid "state_of_object" -msgstr "" - -msgid "state_of_object" -msgstr "objeto_estado_de" +msgstr "Tiene por Estado" msgid "status change" -msgstr "cambio de estatus" +msgstr "Cambio de Estatus" msgid "status changed" -msgstr "estatus cambiado" +msgstr "Estatus cambiado" #, python-format msgid "status will change from %(st1)s to %(st2)s" -msgstr "estatus cambiara de %(st1)s a %(st2)s" +msgstr "El estatus cambiará de %(st1)s a %(st2)s" msgid "subject" -msgstr "sujeto" +msgstr "Sujeto" msgid "subject type" -msgstr "" +msgstr "Tipo del sujeto" msgid "subject/object cardinality" -msgstr "cardinalidad sujeto/objeto" +msgstr "Cardinalidad Sujeto/Objeto" msgid "subworkflow" -msgstr "" +msgstr "Sub-Workflow" msgctxt "WorkflowTransition" msgid "subworkflow" -msgstr "" +msgstr "Sub-Workflow" msgid "" "subworkflow isn't a workflow for the same types as the transition's workflow" msgstr "" +"Le Sub-Workflow no se aplica a los mismos tipos que el Workflow de esta " +"transición" msgid "subworkflow state" -msgstr "" +msgstr "Estado de Sub-Workflow" msgid "subworkflow_exit" -msgstr "" +msgstr "Salida del Sub-Workflow" msgctxt "WorkflowTransition" msgid "subworkflow_exit" -msgstr "" +msgstr "Salida del Sub-Workflow" + +msgid "subworkflow_exit_object" +msgstr "Salida Sub-Workflow de" msgctxt "SubWorkflowExitPoint" msgid "subworkflow_exit_object" -msgstr "" - -msgid "subworkflow_exit_object" -msgstr "" +msgstr "Salida Sub-Workflow de" + +msgid "subworkflow_object" +msgstr "Sub-Workflow de" msgctxt "Workflow" msgid "subworkflow_object" -msgstr "" - -msgid "subworkflow_object" -msgstr "" +msgstr "Sub-Workflow de" msgid "subworkflow_state" -msgstr "" +msgstr "Estado de Sub-Workflow" msgctxt "SubWorkflowExitPoint" msgid "subworkflow_state" -msgstr "" +msgstr "Estado de Sub-Workflow" + +msgid "subworkflow_state_object" +msgstr "Estado de Salida de" msgctxt "State" msgid "subworkflow_state_object" -msgstr "" - -msgid "subworkflow_state_object" -msgstr "" +msgstr "Estado de Salida de" msgid "sunday" -msgstr "domingo" +msgstr "Domingo" msgid "surname" -msgstr "apellido" +msgstr "Apellido" msgctxt "CWUser" msgid "surname" -msgstr "" +msgstr "Apellido" msgid "symmetric" -msgstr "simetrico" +msgstr "Simétrico" msgctxt "CWRType" msgid "symmetric" -msgstr "" +msgstr "Simétrico" msgid "system entities" -msgstr "entidades de sistema" +msgstr "Entidades del sistema" msgid "table" -msgstr "tabla" +msgstr "Tabla" msgid "tablefilter" -msgstr "filtro de tabla" +msgstr "Tablero de Filtrado" msgid "task progression" -msgstr "progreso de la tarea" +msgstr "Progreso de la Acción" msgid "text" -msgstr "text" +msgstr "Texto" msgid "text/cubicweb-page-template" -msgstr "text/cubicweb-page-template" +msgstr "Usar Page Templates" msgid "text/html" -msgstr "html" +msgstr "Usar HTML" msgid "text/plain" -msgstr "text/plain" +msgstr "Usar Texto simple" msgid "text/rest" -msgstr "text/rest" +msgstr "Texto en REST" msgid "the URI of the object" -msgstr "" +msgstr "El URI del Objeto" msgid "the prefered email" -msgstr "dirección principal de email" +msgstr "Dirección principal de email" #, python-format msgid "the value \"%s\" is already used, use another one" -msgstr "el valor \"%s\" ya esta en uso, favor de utilizar otro" +msgstr "El valor \"%s\" ya esta en uso, favor de utilizar otro" msgid "this action is not reversible!" -msgstr "esta acción es irreversible!." +msgstr "Esta acción es irreversible!." msgid "this entity is currently owned by" -msgstr "esta entidad es propiedad de" +msgstr "Esta Entidad es propiedad de" msgid "this resource does not exist" -msgstr "este recurso no existe" +msgstr "Este recurso no existe" msgid "thursday" -msgstr "jueves" +msgstr "Jueves" msgid "timeline" -msgstr "" +msgstr "Escala de Tiempo" msgid "timestamp" -msgstr "fecha" +msgstr "Fecha" msgctxt "CWCache" msgid "timestamp" -msgstr "" +msgstr "Válido desde" msgid "timestamp of the latest source synchronization." -msgstr "fecha de la ultima sincronización de la fuente." +msgstr "Fecha de la última sincronización de la fuente." msgid "timetable" -msgstr "tabla de tiempos" +msgstr "Tablero de tiempos" msgid "title" -msgstr "titulo" +msgstr "Nombre" msgctxt "Bookmark" msgid "title" -msgstr "" +msgstr "Nombre" msgid "to" msgstr "a" @@ -3592,434 +3729,444 @@ msgstr "a %(date)s" msgid "to associate with" -msgstr "a asociar con" +msgstr "Para asociar con" msgid "to_entity" -msgstr "hacia entidad" +msgstr "Hacia la entidad" msgctxt "CWAttribute" msgid "to_entity" -msgstr "" +msgstr "Por la entidad" msgctxt "CWRelation" msgid "to_entity" -msgstr "" +msgstr "Por la entidad" + +msgid "to_entity_object" +msgstr "Objeto de la Relación" msgctxt "CWEType" msgid "to_entity_object" -msgstr "" - -msgid "to_entity_object" -msgstr "hacia entidad objeto" +msgstr "Objeto de la Relación" msgid "to_interval_end" -msgstr "" +msgstr "a" msgid "to_state" -msgstr "hacia el estado" +msgstr "Hacia el Estado" msgctxt "TrInfo" msgid "to_state" -msgstr "" +msgstr "Hacia el Estado" + +msgid "to_state_object" +msgstr "Transición hacia este Estado" msgctxt "State" msgid "to_state_object" -msgstr "" - -msgid "to_state_object" -msgstr "hacia objeto estado" +msgstr "Transición hacia este Estado" msgid "todo_by" -msgstr "a hacer por" +msgstr "Asignada a" msgid "toggle check boxes" -msgstr "cambiar valor" +msgstr "Cambiar valor" msgid "transaction undoed" -msgstr "" +msgstr "Transacciones Anuladas" #, python-format msgid "transition %(tr)s isn't allowed from %(st)s" -msgstr "" +msgstr "La transición %(tr)s no esta permitida desde el Estado %(st)s" msgid "transition doesn't belong to entity's workflow" -msgstr "" +msgstr "La transición no pertenece al Workflow de la Entidad" msgid "transition isn't allowed" -msgstr "" +msgstr "La transición no esta permitida" msgid "transition may not be fired" -msgstr "" +msgstr "La transición no puede ser lanzada" msgid "transition_of" -msgstr "transicion de" +msgstr "Transición de" msgctxt "BaseTransition" msgid "transition_of" -msgstr "" +msgstr "Transición de" msgctxt "Transition" msgid "transition_of" -msgstr "" +msgstr "Transición de" msgctxt "WorkflowTransition" msgid "transition_of" -msgstr "" +msgstr "Transición de" + +msgid "transition_of_object" +msgstr "Utiliza las transiciones" msgctxt "Workflow" msgid "transition_of_object" -msgstr "" - -msgid "transition_of_object" -msgstr "objeto de transición" +msgstr "Utiliza las transiciones" msgid "tree view" -msgstr "" +msgstr "Vista Jerárquica" msgid "tuesday" -msgstr "martes" +msgstr "Martes" msgid "type" -msgstr "type" +msgstr "Tipo" msgctxt "BaseTransition" msgid "type" -msgstr "" +msgstr "Tipo" msgctxt "Transition" msgid "type" -msgstr "" +msgstr "Tipo" msgctxt "WorkflowTransition" msgid "type" -msgstr "" +msgstr "Tipo" msgid "type here a sparql query" -msgstr "" +msgstr "Escriba aquí su consulta en Sparql" msgid "ui" -msgstr "interfaz de usuario" +msgstr "Interfaz Genérica" msgid "ui.date-format" -msgstr "" +msgstr "Formato de Fecha" msgid "ui.datetime-format" -msgstr "" +msgstr "Formato de Fecha y Hora" msgid "ui.default-text-format" -msgstr "" +msgstr "Formato de texto" msgid "ui.encoding" -msgstr "" +msgstr "Codificación" msgid "ui.fckeditor" -msgstr "" +msgstr "Editor de texto FCK" msgid "ui.float-format" -msgstr "" +msgstr "Números flotantes" msgid "ui.language" -msgstr "" +msgstr "Lenguaje" msgid "ui.main-template" -msgstr "" +msgstr "Plantilla Principal" msgid "ui.site-title" -msgstr "" +msgstr "Nombre del Sistema" msgid "ui.time-format" -msgstr "" +msgstr "Formato de hora" msgid "unable to check captcha, please try again" -msgstr "" +msgstr "Imposible de verificar el Captcha, inténtelo otra vez" msgid "unaccessible" -msgstr "inaccesible" +msgstr "Inaccesible" msgid "unauthorized value" -msgstr "valor no permitido" +msgstr "Valor no permitido" msgid "undo" -msgstr "" +msgstr "Anular" msgid "unique identifier used to connect to the application" -msgstr "identificador unico utilizado para conectar a la aplicación" +msgstr "Identificador único utilizado para conectarse al Sistema" msgid "unknown external entity" -msgstr "entidad externa desconocida" +msgstr "Entidad externa desconocida" msgid "unknown property key" -msgstr "propiedad desconocida" +msgstr "Clave de Propiedad desconocida" msgid "unknown vocabulary:" -msgstr "" +msgstr "Vocabulario desconocido: " msgid "up" -msgstr "arriba" +msgstr "Arriba" msgid "upassword" -msgstr "clave de acceso" +msgstr "Contraseña" msgctxt "CWUser" msgid "upassword" -msgstr "" +msgstr "Contraseña" msgid "update" -msgstr "modificación" - -msgid "update_perm" -msgstr "modificación" +msgstr "Modificación" msgid "update_permission" -msgstr "Permiso de modificación" +msgstr "Puede ser modificado por" + +msgctxt "CWAttribute" +msgid "update_permission" +msgstr "Puede ser modificado por" msgctxt "CWEType" msgid "update_permission" -msgstr "" - -msgctxt "CWAttribute" -msgid "update_permission" -msgstr "" +msgstr "Puede ser modificado por" + +msgid "update_permission_object" +msgstr "Tiene permiso de modificar" msgctxt "CWGroup" msgid "update_permission_object" -msgstr "" +msgstr "Puede modificar" msgctxt "RQLExpression" msgid "update_permission_object" -msgstr "" - -msgid "update_permission_object" -msgstr "objeto de autorización de modificaciones" +msgstr "Puede modificar" + +msgid "update_relation" +msgstr "Modificar" msgid "updated" -msgstr "" +msgstr "Actualizado" #, python-format msgid "updated %(etype)s #%(eid)s (%(title)s)" msgstr "actualización de la entidad %(etype)s #%(eid)s (%(title)s)" msgid "uri" -msgstr "" +msgstr "URI" msgctxt "ExternalUri" msgid "uri" -msgstr "" +msgstr "URI" msgid "use template languages" -msgstr "utilizar plantillas de lenguaje" +msgstr "Utilizar plantillas de lenguaje" msgid "" "use to define a transition from one or multiple states to a destination " "states in workflow's definitions. Transition without destination state will " "go back to the state from which we arrived to the current state." msgstr "" +"Se utiliza en una definición de procesos para agregar una transición desde " +"uno o varios estados hacia un estado destino. Una transición sin Estado " +"destino regresará al Estado anterior del Estado actual" msgid "use_email" -msgstr "correo electrónico" +msgstr "Correo electrónico" msgctxt "CWUser" msgid "use_email" -msgstr "" +msgstr "Usa el Correo Electrónico" + +msgid "use_email_object" +msgstr "Email utilizado por" msgctxt "EmailAddress" msgid "use_email_object" -msgstr "" - -msgid "use_email_object" -msgstr "objeto email utilizado" +msgstr "Utilizado por" msgid "use_template_format" -msgstr "utilización del formato 'cubicweb template'" +msgstr "Utilización del formato 'cubicweb template'" msgid "" "used for cubicweb configuration. Once a property has been created you can't " "change the key." msgstr "" -"utilizado para la configuración de cubicweb. Una vez que la propiedad ha " -"sido creada no puede cambiar la llave" +"Se utiliza para la configuración de CubicWeb. Una vez que la propiedad ha " +"sido creada no puede cambiar la clave" msgid "" "used to associate simple states to an entity type and/or to define workflows" msgstr "" -"utilizado para asociar estados simples a un tipo de entidad y/o para definir " -"workflows" +"Se utiliza para asociar estados simples a un tipo de entidad y/o para " +"definir Workflows" msgid "used to grant a permission to a group" -msgstr "utilizado para otorgar permisos a un grupo" +msgstr "Se utiliza para otorgar permisos a un grupo" msgid "user" -msgstr "" +msgstr "Usuario" #, python-format msgid "" "user %s has made the following change(s):\n" "\n" msgstr "" -"el usuario %s ha efectuado los siguentes cambios:\n" +"El usuario %s ha efectuado los siguentes cambios:\n" "\n" msgid "user interface encoding" -msgstr "codificación de la interfaz de usuario" +msgstr "Encoding de la interfaz de usuario" msgid "user preferences" -msgstr "preferencias del usuario" +msgstr "Preferencias" msgid "users" -msgstr "usuarios" +msgstr "Usuarios" msgid "users using this bookmark" -msgstr "usuarios en este favorito" +msgstr "Usuarios utilizando este Favorito" msgid "validate modifications on selected items" -msgstr "valida modificaciones sobre elementos seleccionados" +msgstr "Valida modificaciones sobre elementos seleccionados" msgid "validating..." -msgstr "validando ..." +msgstr "Validando ..." msgid "value" -msgstr "valor" +msgstr "Valor" msgctxt "CWConstraint" msgid "value" -msgstr "" +msgstr "Valor" msgctxt "CWProperty" msgid "value" -msgstr "" +msgstr "Vampr" msgid "value associated to this key is not editable manually" -msgstr "el valor asociado a este elemento no es editable manualmente" +msgstr "El valor asociado a este elemento no es editable manualmente" #, python-format msgid "value must be %(op)s %(boundary)s" -msgstr "" +msgstr "El valor debe ser %(op)s %(boundary)s" #, python-format msgid "value must be <= %(boundary)s" -msgstr "" +msgstr "El valor debe ser <= %(boundary)s" #, python-format msgid "value must be >= %(boundary)s" -msgstr "" +msgstr "El valor debe ser >= %(boundary)s" #, python-format msgid "value should have maximum size of %s" -msgstr "" +msgstr "El valor no debe exceder de %s" #, python-format msgid "value should have minimum size of %s" -msgstr "" +msgstr "El valor no puede ser menor a %s" msgid "vcard" msgstr "vcard" msgid "versions configuration" -msgstr "" +msgstr "Configuración de Versión" msgid "view" -msgstr "ver" +msgstr "Ver" msgid "view all" -msgstr "ver todos" +msgstr "Ver todos" msgid "view detail for this entity" -msgstr "ver detalle de esta entidad" +msgstr "Ver a detalle esta entidad" msgid "view history" -msgstr "" +msgstr "Ver histórico" msgid "view identifier" -msgstr "" +msgstr "Identificador" msgid "view title" -msgstr "" +msgstr "Nombre" msgid "view workflow" -msgstr "ver workflow" +msgstr "Ver Workflow" msgid "view_index" +msgstr "Inicio" + +#, python-format +msgid "violates unique_together constraints (%s)" msgstr "" msgid "visible" -msgstr "visible" +msgstr "Visible" msgid "we are not yet ready to handle this query" -msgstr "" +msgstr "Aún no podemos manejar este tipo de consulta Sparql" msgid "wednesday" -msgstr "miercoles" +msgstr "Miércoles" msgid "week" msgstr "sem." #, python-format msgid "welcome %s !" -msgstr "bienvenido %s !" +msgstr "¡ Bienvenido %s !" msgid "wf_info_for" -msgstr "historial de" +msgstr "Histórico de" msgid "wf_info_for_object" -msgstr "historial de transiciones" +msgstr "Histórico de transiciones" msgid "wf_tab_info" -msgstr "" +msgstr "Descripción" msgid "wfgraph" -msgstr "" +msgstr "Gráfica del Workflow" msgid "" "when multiple addresses are equivalent (such as python-projects@logilab.org " "and python-projects@lists.logilab.org), set this to indicate which is the " "preferred form." msgstr "" +"Cuando varias direcciones email son equivalentes (como python-" +"projects@logilab.org y python-projects@lists.logilab.org), aquí se indica " +"cual es la forma preferida." msgid "workflow" -msgstr "" +msgstr "Workflow" msgid "workflow already have a state of that name" -msgstr "" +msgstr "El Workflow ya tiene un Estado con ese nombre" msgid "workflow already have a transition of that name" -msgstr "" +msgstr "El Workflow ya tiene una transición con ese nombre" #, python-format msgid "workflow changed to \"%s\"" -msgstr "" +msgstr "Workflow cambiado a \"%s\"" msgid "workflow has no initial state" -msgstr "" +msgstr "El Workflow no posee Estado Inicial" msgid "workflow history item" -msgstr "" +msgstr "Elemento histórico del Workflow" msgid "workflow isn't a workflow for this type" -msgstr "" +msgstr "El Workflow no se aplica a este Tipo de Entidad" msgid "workflow to which this state belongs" -msgstr "" +msgstr "Workflow al cual pertenece este estado" msgid "workflow to which this transition belongs" -msgstr "" +msgstr "Workflow al cual pertenece esta transición" msgid "workflow_of" -msgstr "" +msgstr "Workflow de" msgctxt "Workflow" msgid "workflow_of" -msgstr "" +msgstr "Workflow de" + +msgid "workflow_of_object" +msgstr "Utiliza el Workflow" msgctxt "CWEType" msgid "workflow_of_object" -msgstr "" - -msgid "workflow_of_object" -msgstr "" +msgstr "Utiliza el Workflow" #, python-format msgid "wrong query parameter line %s" -msgstr "" +msgstr "Parámetro erróneo de consulta línea %s" msgid "xbel" msgstr "xbel" @@ -4028,16 +4175,34 @@ msgstr "xml" msgid "xml export" -msgstr "" +msgstr "Exportar XML" msgid "yes" -msgstr "si" +msgstr "Sí" msgid "you have been logged out" -msgstr "ha terminado la sesion" +msgstr "Ha terminado la sesión" msgid "you should probably delete that property" -msgstr "deberia probablamente suprimir esta propriedad" - -#~ msgid "schema-image" -#~ msgstr "esquema imagen" +msgstr "Debería probablamente suprimir esta propriedad" + +#~ msgid "add_perm" +#~ msgstr "Agregado" + +#~ msgid "delete_perm" +#~ msgstr "Eliminar" + +#~ msgid "edition" +#~ msgstr "Edición" + +#~ msgid "graphical workflow for %s" +#~ msgstr "Gráfica del workflow por %s" + +#~ msgid "personnal informations" +#~ msgstr "Información personal" + +#~ msgid "read_perm" +#~ msgstr "Lectura" + +#~ msgid "update_perm" +#~ msgstr "Permiso de Modificar" diff -r f4d1d5d9ccbb -r 90f2f20367bc i18n/fr.po --- a/i18n/fr.po Tue Jul 27 12:36:03 2010 +0200 +++ b/i18n/fr.po Wed Nov 03 16:38:28 2010 +0100 @@ -4,9 +4,10 @@ msgid "" msgstr "" "Project-Id-Version: cubicweb 2.46.0\n" -"PO-Revision-Date: 2010-05-16 18:59+0200\n" +"PO-Revision-Date: 2010-09-15 15:12+0200\n" "Last-Translator: Logilab Team \n" "Language-Team: fr \n" +"Language: \n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" @@ -36,7 +37,7 @@ msgstr " de l'état %(fromstate)s vers l'état %(tostate)s\n" msgid " :" -msgstr "" +msgstr " :" #, python-format msgid "%(attr)s set to %(newvalue)s" @@ -188,6 +189,9 @@ "
        Ce schéma du modèle de données exclue les méta-données, mais " "vous pouvez afficher un schéma complet.
        " +msgid "" +msgstr "" + msgid "?*" msgstr "0..1 0..n" @@ -242,6 +246,9 @@ msgid "Browse by category" msgstr "Naviguer par catégorie" +msgid "Browse by entity type" +msgstr "Naviguer par type d'entité" + msgid "Bytes" msgstr "Donnée binaires" @@ -320,6 +327,12 @@ msgid "CWRelation_plural" msgstr "Relations" +msgid "CWUniqueTogetherConstraint" +msgstr "Contrainte unique_together" + +msgid "CWUniqueTogetherConstraint_plural" +msgstr "Contraintes unique_together" + msgid "CWUser" msgstr "Utilisateur" @@ -374,6 +387,10 @@ "Ne peut annuler la création de l'entité %(eid)s de type %(etype)s, ce type " "n'existe plus" +#, python-format +msgid "Data connection graph for %s" +msgstr "Graphique de connection des données pour %s" + msgid "Date" msgstr "Date" @@ -398,10 +415,10 @@ msgid "Download schema as OWL" msgstr "Télécharger le schéma au format OWL" -msgctxt "inlined:CWUser.use_email.subject" msgid "EmailAddress" msgstr "Adresse électronique" +msgctxt "inlined:CWUser.use_email.subject" msgid "EmailAddress" msgstr "Adresse électronique" @@ -438,6 +455,9 @@ msgid "Garbage collection information" msgstr "Information sur le ramasse-miette" +msgid "Got rhythm?" +msgstr "T'as le rythme ?" + msgid "Help" msgstr "Aide" @@ -504,6 +524,9 @@ msgid "New CWRelation" msgstr "Nouvelle définition de relation non finale" +msgid "New CWUniqueTogetherConstraint" +msgstr "Nouvelle contrainte unique_together" + msgid "New CWUser" msgstr "Nouvel utilisateur" @@ -534,17 +557,21 @@ msgid "New WorkflowTransition" msgstr "Nouvelle transition workflow" +#, python-format +msgid "No account? Try public access at %s" +msgstr "Pas de compte ? Accédez au site public : %s" + msgid "No result matching query" -msgstr "aucun résultat" +msgstr "Aucun résultat ne correspond à la requête" msgid "Non exhaustive list of views that may apply to entities of this type" -msgstr "Liste non exhausite des vues s'appliquant à ce type d'entité" +msgstr "Liste non exhaustive des vues s'appliquant à ce type d'entité" msgid "OR" msgstr "OU" -msgid "Parent classes:" -msgstr "Classes parentes :" +msgid "Parent class:" +msgstr "Classe parente" msgid "Password" msgstr "Mot de passe" @@ -643,9 +670,6 @@ msgid "Submit bug report by mail" msgstr "Soumettre ce rapport par email" -msgid "The repository holds the following entities" -msgstr "Le dépot contient les entités suivantes" - #, python-format msgid "The view %s can not be applied to this query" msgstr "La vue %s ne peut être appliquée à cette requête" @@ -693,6 +717,9 @@ msgid "This CWRelation" msgstr "Cette définition de relation" +msgid "This CWUniqueTogetherConstraint" +msgstr "Cette contrainte unique_together" + msgid "This CWUser" msgstr "Cet utilisateur" @@ -724,7 +751,7 @@ msgstr "Cette transition workflow" msgid "This entity type permissions:" -msgstr "Permissions pour ce type d'endité" +msgstr "Permissions pour ce type d'entité" msgid "Time" msgstr "Heure" @@ -748,7 +775,7 @@ msgstr "contrainte d'unicité" msgid "Unreachable objects" -msgstr "Objets inacessible" +msgstr "Objets inaccessibles" msgid "Used by:" msgstr "Utilisé par :" @@ -908,6 +935,9 @@ msgid "add CWRelation relation_type CWRType object" msgstr "définition de relation" +msgid "add CWUniqueTogetherConstraint constraint_of CWEType object" +msgstr "contrainte unique_together" + msgid "add CWUser in_group CWGroup object" msgstr "utilisateur" @@ -966,9 +996,6 @@ msgid "add a new permission" msgstr "ajouter une permission" -msgid "add_perm" -msgstr "ajout" - # subject and object forms for each relation type # (no object form for final relation types) msgid "add_permission" @@ -984,6 +1011,9 @@ msgid "add_permission" msgstr "permission d'ajout" +msgid "add_permission_object" +msgstr "a la permission d'ajouter" + msgctxt "CWGroup" msgid "add_permission_object" msgstr "a la permission d'ajouter" @@ -992,8 +1022,8 @@ msgid "add_permission_object" msgstr "a la permission d'ajouter" -msgid "add_permission_object" -msgstr "a la permission d'ajouter" +msgid "add_relation" +msgstr "ajouter" #, python-format msgid "added %(etype)s #%(eid)s (%(title)s)" @@ -1001,11 +1031,11 @@ #, python-format msgid "" -"added relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #%" -"(eidto)s" +"added relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #" +"%(eidto)s" msgstr "" -"la relation %(rtype)s de %(frometype)s #%(eidfrom)s vers %(toetype)s #%" -"(eidto)s a été ajoutée" +"la relation %(rtype)s de %(frometype)s #%(eidfrom)s vers %(toetype)s #" +"%(eidto)s a été ajoutée" msgid "addrelated" msgstr "ajouter" @@ -1037,6 +1067,9 @@ msgid "allowed_transition" msgstr "transitions autorisées" +msgid "allowed_transition_object" +msgstr "états en entrée" + msgctxt "BaseTransition" msgid "allowed_transition_object" msgstr "transition autorisée de" @@ -1049,9 +1082,6 @@ msgid "allowed_transition_object" msgstr "transition autorisée de" -msgid "allowed_transition_object" -msgstr "états en entrée" - msgid "am/pm calendar (month)" msgstr "calendrier am/pm (mois)" @@ -1067,13 +1097,13 @@ msgid "an electronic mail address associated to a short alias" msgstr "une adresse électronique associée à un alias" -msgid "an error occured" +msgid "an error occurred" msgstr "une erreur est survenue" -msgid "an error occured while processing your request" +msgid "an error occurred while processing your request" msgstr "une erreur est survenue pendant le traitement de votre requête" -msgid "an error occured, the request cannot be fulfilled" +msgid "an error occurred, the request cannot be fulfilled" msgstr "une erreur est survenue, la requête ne peut être complétée" msgid "an integer is expected" @@ -1137,13 +1167,13 @@ msgid "bookmarked_by" msgstr "utilisé par" +msgid "bookmarked_by_object" +msgstr "utilise le(s) signet(s)" + msgctxt "CWUser" msgid "bookmarked_by_object" msgstr "utilise le(s) signet(s)" -msgid "bookmarked_by_object" -msgstr "a pour signets" - msgid "bookmarks" msgstr "signets" @@ -1231,6 +1261,9 @@ msgid "by_transition" msgstr "transition" +msgid "by_transition_object" +msgstr "changement d'états" + msgctxt "BaseTransition" msgid "by_transition_object" msgstr "a pour information" @@ -1243,9 +1276,6 @@ msgid "by_transition_object" msgstr "a pour information" -msgid "by_transition_object" -msgstr "changement d'états" - msgid "calendar" msgstr "afficher un calendrier" @@ -1330,6 +1360,12 @@ msgid "click on the box to cancel the deletion" msgstr "cliquez dans la zone d'édition pour annuler la suppression" +msgid "click to add a value" +msgstr "cliquer pour ajouter une valeur" + +msgid "click to delete this value" +msgstr "cliquer pour supprimer cette valeur" + msgid "click to edit this field" msgstr "cliquez pour éditer ce champ" @@ -1426,10 +1462,10 @@ msgid "condition" msgstr "condition" -msgctxt "RQLExpression" msgid "condition_object" msgstr "condition de" +msgctxt "RQLExpression" msgid "condition_object" msgstr "condition de" @@ -1456,16 +1492,30 @@ msgid "constrained_by" msgstr "contraint par" -msgctxt "CWConstraint" msgid "constrained_by_object" msgstr "contrainte de" +msgctxt "CWConstraint" msgid "constrained_by_object" msgstr "contrainte de" msgid "constraint factory" msgstr "fabrique de contraintes" +msgid "constraint_of" +msgstr "contrainte de" + +msgctxt "CWUniqueTogetherConstraint" +msgid "constraint_of" +msgstr "contrainte de" + +msgid "constraint_of_object" +msgstr "contraint par" + +msgctxt "CWEType" +msgid "constraint_of_object" +msgstr "contraint par" + msgid "constraints" msgstr "contraintes" @@ -1524,6 +1574,11 @@ msgid "context where this component should be displayed" msgstr "contexte où ce composant doit être affiché" +msgid "context where this facet should be displayed, leave empty for both" +msgstr "" +"contexte où cette facette doit être affichée. Laissez ce champ vide pour " +"l'avoir dans les deux." + msgid "control subject entity's relations order" msgstr "contrôle l'ordre des relations de l'entité sujet" @@ -1595,6 +1650,11 @@ msgid "creating CWRelation (CWRelation relation_type CWRType %(linkto)s)" msgstr "création relation %(linkto)s" +msgid "" +"creating CWUniqueTogetherConstraint (CWUniqueTogetherConstraint " +"constraint_of CWEType %(linkto)s)" +msgstr "création d'une contrainte unique_together sur %(linkto)s" + msgid "creating CWUser (CWUser in_group CWGroup %(linkto)s)" msgstr "création d'un utilisateur à rajouter au groupe %(linkto)s" @@ -1680,8 +1740,8 @@ msgstr "création d'une transition workflow autorisée depuis l'état %(linkto)s" msgid "" -"creating WorkflowTransition (WorkflowTransition transition_of Workflow %" -"(linkto)s)" +"creating WorkflowTransition (WorkflowTransition transition_of Workflow " +"%(linkto)s)" msgstr "création d'une transition workflow du workflow %(linkto)s" msgid "creation" @@ -1697,19 +1757,19 @@ msgstr "date de création" msgid "cstrtype" -msgstr "type de constrainte" +msgstr "type de contrainte" msgctxt "CWConstraint" msgid "cstrtype" msgstr "type" +msgid "cstrtype_object" +msgstr "utilisé par" + msgctxt "CWConstraintType" msgid "cstrtype_object" msgstr "type des contraintes" -msgid "cstrtype_object" -msgstr "utilisé par" - msgid "csv entities export" msgstr "export d'entités en CSV" @@ -1792,10 +1852,10 @@ msgid "default_workflow" msgstr "workflow par défaut" -msgctxt "Workflow" msgid "default_workflow_object" msgstr "workflow par défaut de" +msgctxt "Workflow" msgid "default_workflow_object" msgstr "workflow par défaut de" @@ -1816,7 +1876,7 @@ "define a final relation: link a final relation type from a non final entity " "to a final entity type. used to build the instance schema" msgstr "" -"définit une relation non finale: lie un type de relation non finaledepuis " +"définit une relation non finale: lie un type de relation non finale depuis " "une entité vers un type d'entité non final. Utilisé pour construire le " "schéma de l'instance" @@ -1845,6 +1905,9 @@ msgid "define how we get out from a sub-workflow" msgstr "définit comment sortir d'un sous-workflow" +msgid "defines a sql-level multicolumn unique index" +msgstr "définit un index SQL unique sur plusieurs colonnes" + msgid "" "defines what's the property is applied for. You must select this first to be " "able to set value" @@ -1864,9 +1927,6 @@ msgid "delete this relation" msgstr "supprimer cette relation" -msgid "delete_perm" -msgstr "suppression" - msgid "delete_permission" msgstr "permission de supprimer" @@ -1878,6 +1938,9 @@ msgid "delete_permission" msgstr "permission de supprimer" +msgid "delete_permission_object" +msgstr "a la permission de supprimer" + msgctxt "CWGroup" msgid "delete_permission_object" msgstr "peut supprimer" @@ -1886,17 +1949,14 @@ msgid "delete_permission_object" msgstr "peut supprimer" -msgid "delete_permission_object" -msgstr "a la permission de supprimer" - #, python-format msgid "deleted %(etype)s #%(eid)s (%(title)s)" msgstr "suppression de l'entité %(etype)s #%(eid)s (%(title)s)" #, python-format msgid "" -"deleted relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #%" -"(eidto)s" +"deleted relation %(rtype)s from %(frometype)s #%(eidfrom)s to %(toetype)s #" +"%(eidto)s" msgstr "" "relation %(rtype)s de %(frometype)s #%(eidfrom)s vers %(toetype)s #%(eidto)s " "supprimée" @@ -1907,15 +1967,7 @@ msgid "description" msgstr "description" -msgctxt "CWEType" -msgid "description" -msgstr "description" - -msgctxt "CWRelation" -msgid "description" -msgstr "description" - -msgctxt "Workflow" +msgctxt "BaseTransition" msgid "description" msgstr "description" @@ -1923,15 +1975,7 @@ msgid "description" msgstr "description" -msgctxt "Transition" -msgid "description" -msgstr "description" - -msgctxt "WorkflowTransition" -msgid "description" -msgstr "description" - -msgctxt "State" +msgctxt "CWEType" msgid "description" msgstr "description" @@ -1939,10 +1983,34 @@ msgid "description" msgstr "description" -msgctxt "BaseTransition" +msgctxt "CWRelation" +msgid "description" +msgstr "description" + +msgctxt "State" +msgid "description" +msgstr "description" + +msgctxt "Transition" msgid "description" msgstr "description" +msgctxt "Workflow" +msgid "description" +msgstr "description" + +msgctxt "WorkflowTransition" +msgid "description" +msgstr "description" + +msgid "description_format" +msgstr "format" + +msgctxt "BaseTransition" +msgid "description_format" +msgstr "format" + +msgctxt "CWAttribute" msgid "description_format" msgstr "format" @@ -1950,38 +2018,30 @@ msgid "description_format" msgstr "format" +msgctxt "CWRType" +msgid "description_format" +msgstr "format" + msgctxt "CWRelation" msgid "description_format" msgstr "format" +msgctxt "State" +msgid "description_format" +msgstr "format" + +msgctxt "Transition" +msgid "description_format" +msgstr "format" + msgctxt "Workflow" msgid "description_format" msgstr "format" -msgctxt "CWAttribute" -msgid "description_format" -msgstr "format" - -msgctxt "Transition" -msgid "description_format" -msgstr "format" - msgctxt "WorkflowTransition" msgid "description_format" msgstr "format" -msgctxt "State" -msgid "description_format" -msgstr "format" - -msgctxt "CWRType" -msgid "description_format" -msgstr "format" - -msgctxt "BaseTransition" -msgid "description_format" -msgstr "format" - msgid "destination state for this transition" msgstr "états accessibles par cette transition" @@ -2004,21 +2064,21 @@ msgid "destination_state" msgstr "état de destination" +msgctxt "SubWorkflowExitPoint" +msgid "destination_state" +msgstr "état de destination" + msgctxt "Transition" msgid "destination_state" msgstr "état de destination" -msgctxt "SubWorkflowExitPoint" -msgid "destination_state" -msgstr "état de destination" +msgid "destination_state_object" +msgstr "destination de" msgctxt "State" msgid "destination_state_object" msgstr "état final de" -msgid "destination_state_object" -msgstr "destination de" - msgid "detach attached file" msgstr "détacher le fichier existant" @@ -2028,12 +2088,18 @@ msgid "display order of the component" msgstr "ordre d'affichage du composant" +msgid "display order of the facet" +msgstr "ordre d'affichage de la facette" + msgid "display the box or not" msgstr "afficher la boîte ou non" msgid "display the component or not" msgstr "afficher le composant ou non" +msgid "display the facet or not" +msgstr "afficher la facette ou non" + msgid "" "distinct label to distinguate between other permission entity of the same " "name" @@ -2066,9 +2132,6 @@ msgid "editable-table" msgstr "table éditable" -msgid "edition" -msgstr "édition" - msgid "eid" msgstr "eid" @@ -2081,6 +2144,9 @@ msgid "embed" msgstr "embarqué" +msgid "embedded html" +msgstr "HTML contenu" + msgid "embedding this url is forbidden" msgstr "l'inclusion de cette url est interdite" @@ -2146,7 +2212,7 @@ msgid "eta_date" msgstr "date de fin" -msgid "exit state must a subworkflow state" +msgid "exit state must be a subworkflow state" msgstr "l'état de sortie doit être un état du sous-workflow" msgid "exit_point" @@ -2269,13 +2335,13 @@ msgid "for_user" msgstr "propriété de l'utilisateur" +msgid "for_user_object" +msgstr "utilise les propriétés" + msgctxt "CWUser" msgid "for_user_object" msgstr "a pour préférence" -msgid "for_user_object" -msgstr "utilise les propriétés" - msgid "friday" msgstr "vendredi" @@ -2297,13 +2363,13 @@ msgid "from_entity" msgstr "relation de l'entité" +msgid "from_entity_object" +msgstr "relation sujet" + msgctxt "CWEType" msgid "from_entity_object" msgstr "entité de" -msgid "from_entity_object" -msgstr "relation sujet" - msgid "from_interval_start" msgstr "De" @@ -2314,13 +2380,13 @@ msgid "from_state" msgstr "état de départ" +msgid "from_state_object" +msgstr "transitions depuis cet état" + msgctxt "State" msgid "from_state_object" msgstr "état de départ de" -msgid "from_state_object" -msgstr "transitions depuis cet état" - msgid "full text or RQL query" msgstr "texte à rechercher ou requête RQL" @@ -2357,16 +2423,25 @@ msgid "granted to groups" msgstr "accordée aux groupes" -msgid "graphical representation of the instance'schema" -msgstr "représentation graphique du schéma de l'instance" +#, python-format +msgid "graphical representation of %(appid)s data model" +msgstr "réprésentation graphique du modèle de données de %(appid)s" #, python-format -msgid "graphical schema for %s" -msgstr "graphique du schéma pour %s" +msgid "" +"graphical representation of the %(etype)s entity type from %(appid)s data " +"model" +msgstr "" +"réprésentation graphique du modèle de données pour le type d'entité " +"%(etype)s de %(appid)s" #, python-format -msgid "graphical workflow for %s" -msgstr "graphique du workflow pour %s" +msgid "" +"graphical representation of the %(rtype)s relation type from %(appid)s data " +"model" +msgstr "" +"réprésentation graphique du modèle de données pour le type de relation " +"%(rtype)s de %(appid)s" msgid "group in which a user should be to be allowed to pass this transition" msgstr "" @@ -2481,13 +2556,13 @@ msgid "in_group" msgstr "fait partie du groupe" +msgid "in_group_object" +msgstr "membres" + msgctxt "CWGroup" msgid "in_group_object" msgstr "contient les utilisateurs" -msgid "in_group_object" -msgstr "membres" - msgid "in_state" msgstr "état" @@ -2540,10 +2615,10 @@ msgid "initial_state" msgstr "état initial" -msgctxt "State" msgid "initial_state_object" msgstr "état initial de" +msgctxt "State" msgid "initial_state_object" msgstr "état initial de" @@ -2785,6 +2860,9 @@ msgid "missing parameters for entity %s" msgstr "paramètres manquants pour l'entité %s" +msgid "modification" +msgstr "modification" + msgid "modification_date" msgstr "date de modification" @@ -2809,10 +2887,38 @@ msgid "name" msgstr "nom" +msgctxt "BaseTransition" +msgid "name" +msgstr "nom" + +msgctxt "CWCache" +msgid "name" +msgstr "nom" + +msgctxt "CWConstraintType" +msgid "name" +msgstr "nom" + msgctxt "CWEType" msgid "name" msgstr "nom" +msgctxt "CWGroup" +msgid "name" +msgstr "nom" + +msgctxt "CWPermission" +msgid "name" +msgstr "nom" + +msgctxt "CWRType" +msgid "name" +msgstr "nom" + +msgctxt "State" +msgid "name" +msgstr "nom" + msgctxt "Transition" msgid "name" msgstr "nom" @@ -2821,38 +2927,10 @@ msgid "name" msgstr "nom" -msgctxt "CWGroup" -msgid "name" -msgstr "nom" - -msgctxt "CWConstraintType" -msgid "name" -msgstr "nom" - msgctxt "WorkflowTransition" msgid "name" msgstr "nom" -msgctxt "State" -msgid "name" -msgstr "nom" - -msgctxt "CWPermission" -msgid "name" -msgstr "nom" - -msgctxt "CWRType" -msgid "name" -msgstr "nom" - -msgctxt "BaseTransition" -msgid "name" -msgstr "nom" - -msgctxt "CWCache" -msgid "name" -msgstr "nom" - msgid "name of the cache" msgstr "nom du cache applicatif" @@ -2860,8 +2938,8 @@ "name of the main variables which should be used in the selection if " "necessary (comma separated)" msgstr "" -"nom des variables principaes qui devrait être utilisées dans la sélection si " -"nécessaire (les séparer par des virgules)" +"nom des variables principales qui devrait être utilisées dans la sélection " +"si nécessaire (les séparer par des virgules)" msgid "name or identifier of the permission" msgstr "nom (identifiant) de la permission" @@ -2909,6 +2987,9 @@ msgid "no edited fields specified for entity %s" msgstr "aucun champ à éditer spécifié pour l'entité %s" +msgid "no related entity" +msgstr "pas d'entité liée" + msgid "no related project" msgstr "pas de projet rattaché" @@ -3035,9 +3116,6 @@ msgid "permissions for this entity" msgstr "permissions pour cette entité" -msgid "personnal informations" -msgstr "informations personnelles" - msgid "pick existing bookmarks" msgstr "récupérer des signets existants" @@ -3067,13 +3145,13 @@ msgid "prefered_form" msgstr "forme préférée" +msgid "prefered_form_object" +msgstr "forme préférée à" + msgctxt "EmailAddress" msgid "prefered_form_object" msgstr "forme préférée de" -msgid "prefered_form_object" -msgstr "forme préférée à" - msgid "preferences" msgstr "préférences" @@ -3090,12 +3168,15 @@ msgid "primary_email" msgstr "email principal" +msgid "primary_email_object" +msgstr "adresse email principale (object)" + msgctxt "EmailAddress" msgid "primary_email_object" msgstr "adresse principale de" -msgid "primary_email_object" -msgstr "adresse email principale (object)" +msgid "profile" +msgstr "profil" msgid "progress" msgstr "avancement" @@ -3115,24 +3196,24 @@ msgid "read" msgstr "lecture" -msgid "read_perm" -msgstr "lecture" - +msgid "read_permission" +msgstr "permission de lire" + +msgctxt "CWAttribute" msgid "read_permission" msgstr "permission de lire" msgctxt "CWEType" msgid "read_permission" -msgstr "permission d'ajouter" - -msgctxt "CWAttribute" -msgid "read_permission" msgstr "permission de lire" msgctxt "CWRelation" msgid "read_permission" msgstr "permission de lire" +msgid "read_permission_object" +msgstr "a la permission de lire" + msgctxt "CWGroup" msgid "read_permission_object" msgstr "peut lire" @@ -3141,9 +3222,6 @@ msgid "read_permission_object" msgstr "peut lire" -msgid "read_permission_object" -msgstr "a la permission de lire" - msgid "registry" msgstr "registre" @@ -3177,16 +3255,34 @@ msgid "relation_type" msgstr "type de relation" +msgid "relation_type_object" +msgstr "définition" + msgctxt "CWRType" msgid "relation_type_object" msgstr "définition" -msgid "relation_type_object" -msgstr "définition" +msgid "relations" +msgstr "relations" + +msgctxt "CWUniqueTogetherConstraint" +msgid "relations" +msgstr "relations" msgid "relations deleted" msgstr "relations supprimées" +msgid "relations_object" +msgstr "relations de" + +msgctxt "CWAttribute" +msgid "relations_object" +msgstr "contraint par" + +msgctxt "CWRelation" +msgid "relations_object" +msgstr "contraint par" + msgid "relative url of the bookmarked page" msgstr "url relative de la page" @@ -3200,11 +3296,11 @@ msgid "require_group" msgstr "restreinte au groupe" -msgctxt "Transition" +msgctxt "CWPermission" msgid "require_group" msgstr "restreinte au groupe" -msgctxt "CWPermission" +msgctxt "Transition" msgid "require_group" msgstr "restreinte au groupe" @@ -3212,11 +3308,11 @@ msgid "require_group" msgstr "restreinte au groupe" +msgid "require_group_object" +msgstr "a les droits" + msgctxt "CWGroup" msgid "require_group_object" -msgstr "de" - -msgid "require_group_object" msgstr "a les droits" msgid "require_permission" @@ -3244,9 +3340,9 @@ "current relation'subject, object and to the request user. " msgstr "" "partie restriction de la requête rql. Pour une expression s'appliquant à une " -"entité, X et U sont respectivement préféfinis à l'entité et à l'utilisateur " +"entité, X et U sont respectivement prédéfinis à l'entité et à l'utilisateur " "courant. Pour une expression s'appliquant à une relation, S, O et U sont " -"respectivement préféfinis au sujet/objet de la relation et à l'utilisateur " +"respectivement prédéfinis au sujet/objet de la relation et à l'utilisateur " "courant." msgid "revert changes" @@ -3262,7 +3358,7 @@ msgstr "RSS" msgid "same_as" -msgstr "identique à l'entité externe" +msgstr "identique à" msgid "sample format" msgstr "exemple" @@ -3394,7 +3490,7 @@ msgid "site-wide property can't be set for user" msgstr "une propriété spécifique au site ne peut être propre à un utilisateur" -msgid "some errors occured:" +msgid "some errors occurred:" msgstr "des erreurs sont survenues" msgid "some later transaction(s) touch entity, undo them first" @@ -3417,10 +3513,10 @@ msgid "specializes" msgstr "spécialise" -msgctxt "CWEType" msgid "specializes_object" msgstr "parent de" +msgctxt "CWEType" msgid "specializes_object" msgstr "parent de" @@ -3459,13 +3555,13 @@ msgid "state_of" msgstr "état de" +msgid "state_of_object" +msgstr "a pour état" + msgctxt "Workflow" msgid "state_of_object" msgstr "contient les états" -msgid "state_of_object" -msgstr "a pour état" - msgid "status change" msgstr "changer l'état" @@ -3508,20 +3604,20 @@ msgid "subworkflow_exit" msgstr "sortie du sous-workflow" +msgid "subworkflow_exit_object" +msgstr "états de sortie" + msgctxt "SubWorkflowExitPoint" msgid "subworkflow_exit_object" msgstr "états de sortie" -msgid "subworkflow_exit_object" -msgstr "états de sortie" +msgid "subworkflow_object" +msgstr "utilisé par la transition" msgctxt "Workflow" msgid "subworkflow_object" msgstr "sous workflow de" -msgid "subworkflow_object" -msgstr "utilisé par la transition" - msgid "subworkflow_state" msgstr "état du sous-workflow" @@ -3529,10 +3625,10 @@ msgid "subworkflow_state" msgstr "état" -msgctxt "State" msgid "subworkflow_state_object" msgstr "état de sortie de" +msgctxt "State" msgid "subworkflow_state_object" msgstr "état de sortie de" @@ -3647,12 +3743,12 @@ msgid "to_entity" msgstr "pour l'entité" +msgid "to_entity_object" +msgstr "objet de la relation" + msgctxt "CWEType" msgid "to_entity_object" -msgstr "relation objet" - -msgid "to_entity_object" -msgstr "relation objet" +msgstr "objet de la relation" msgid "to_interval_end" msgstr "à" @@ -3664,13 +3760,13 @@ msgid "to_state" msgstr "état de destination" +msgid "to_state_object" +msgstr "transitions vers cet état" + msgctxt "State" msgid "to_state_object" msgstr "transition vers cet état" -msgid "to_state_object" -msgstr "transitions vers cet état" - msgid "todo_by" msgstr "à faire par" @@ -3708,10 +3804,10 @@ msgid "transition_of" msgstr "transition de" -msgctxt "Workflow" msgid "transition_of_object" msgstr "a pour transition" +msgctxt "Workflow" msgid "transition_of_object" msgstr "a pour transition" @@ -3809,19 +3905,19 @@ msgid "update" msgstr "modification" -msgid "update_perm" -msgstr "modification" - msgid "update_permission" msgstr "permission de modification" +msgctxt "CWAttribute" +msgid "update_permission" +msgstr "permission de modifier" + msgctxt "CWEType" msgid "update_permission" msgstr "permission de modifier" -msgctxt "CWAttribute" -msgid "update_permission" -msgstr "permission de modifier" +msgid "update_permission_object" +msgstr "a la permission de modifier" msgctxt "CWGroup" msgid "update_permission_object" @@ -3831,8 +3927,8 @@ msgid "update_permission_object" msgstr "peut modifier" -msgid "update_permission_object" -msgstr "a la permission de modifier" +msgid "update_relation" +msgstr "modifier" msgid "updated" msgstr "mis à jour" @@ -3867,13 +3963,13 @@ msgid "use_email" msgstr "utilise l'adresse électronique" +msgid "use_email_object" +msgstr "adresse utilisée par" + msgctxt "EmailAddress" msgid "use_email_object" msgstr "utilisée par" -msgid "use_email_object" -msgstr "adresse utilisée par" - msgid "use_template_format" msgstr "utilisation du format 'cubicweb template'" @@ -3984,6 +4080,10 @@ msgid "view_index" msgstr "accueil" +#, python-format +msgid "violates unique_together constraints (%s)" +msgstr "violation de contrainte unique_together (%s)" + msgid "visible" msgstr "visible" @@ -4057,10 +4157,10 @@ msgid "workflow_of" msgstr "workflow de" -msgctxt "CWEType" msgid "workflow_of_object" msgstr "a pour workflow" +msgctxt "CWEType" msgid "workflow_of_object" msgstr "a pour workflow" @@ -4085,3 +4185,27 @@ msgid "you should probably delete that property" msgstr "vous devriez probablement supprimer cette propriété" + +#~ msgid "add_perm" +#~ msgstr "ajout" + +#~ msgid "delete_perm" +#~ msgstr "suppression" + +#~ msgid "edition" +#~ msgstr "édition" + +#~ msgid "graphical workflow for %s" +#~ msgstr "graphique du workflow pour %s" + +#~ msgid "personnal informations" +#~ msgstr "informations personnelles" + +#~ msgid "read_perm" +#~ msgstr "lecture" + +#~ msgid "update_perm" +#~ msgstr "modification" + +#~ msgid "yams type, rdf type or mime type of the object" +#~ msgstr "type yams, vocabulaire rdf ou type mime de l'objet" diff -r f4d1d5d9ccbb -r 90f2f20367bc i18n/static-messages.pot --- a/i18n/static-messages.pot Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,11 +0,0 @@ -msgid "read_perm" -msgstr "" - -msgid "add_perm" -msgstr "" - -msgid "update_perm" -msgstr "" - -msgid "delete_perm" -msgstr "" diff -r f4d1d5d9ccbb -r 90f2f20367bc interfaces.py --- a/interfaces.py Tue Jul 27 12:36:03 2010 +0200 +++ b/interfaces.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,68 +15,24 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -""" -Standard interfaces. +"""Standard interfaces. Deprecated in favor of adapters. .. note:: - The `implements` selector matches not only entity classes but also - their interfaces. Writing __select__ = implements('IGeocodable') is - a perfectly fine thing to do. + The `implements` selector used to match not only entity classes but also their + interfaces. This will disappear in a future version. You should define an + adapter for that interface and use `adaptable('MyIFace')` selector on appobjects + that require that interface. + """ __docformat__ = "restructuredtext en" from logilab.common.interface import Interface -class IEmailable(Interface): - """interface for emailable entities""" - def get_email(self): - """return email address""" - - @classmethod - def allowed_massmail_keys(cls): - """returns a set of allowed email substitution keys - - The default is to return the entity's attribute list but an - entity class might override this method to allow extra keys. - For instance, the Person class might want to return a `companyname` - key. - """ - - def as_email_context(self): - """returns the dictionary as used by the sendmail controller to - build email bodies. - - NOTE: the dictionary keys should match the list returned by the - `allowed_massmail_keys` method. - """ - - -class IWorkflowable(Interface): - """interface for entities dealing with a specific workflow""" - # XXX to be completed, see cw.entities.wfobjs.WorkflowableMixIn - - @property - def state(self): - """return current state name""" - - def change_state(self, stateeid, trcomment=None, trcommentformat=None): - """change the entity's state to the state of the given name in entity's - workflow - """ - - def latest_trinfo(self): - """return the latest transition information for this entity - """ - - +# XXX deprecates in favor of IProgressAdapter class IProgress(Interface): - """something that has a cost, a state and a progression - - Take a look at cubicweb.mixins.ProgressMixIn for some - default implementations - """ + """something that has a cost, a state and a progression""" @property def cost(self): @@ -112,7 +68,7 @@ def progress(self): """returns the % progress of the task item""" - +# XXX deprecates in favor of IMileStoneAdapter class IMileStone(IProgress): """represents an ITask's item""" @@ -135,7 +91,132 @@ def contractors(self): """returns the list of persons supposed to work on this task""" +# XXX deprecates in favor of IEmbedableAdapter +class IEmbedable(Interface): + """interface for embedable entities""" + def embeded_url(self): + """embed action interface""" + +# XXX deprecates in favor of ICalendarViewsAdapter +class ICalendarViews(Interface): + """calendar views interface""" + def matching_dates(self, begin, end): + """ + :param begin: day considered as begin of the range (`DateTime`) + :param end: day considered as end of the range (`DateTime`) + + :return: + a list of dates (`DateTime`) in the range [`begin`, `end`] on which + this entity apply + """ + +# XXX deprecates in favor of ICalendarableAdapter +class ICalendarable(Interface): + """interface for items that do have a begin date 'start' and an end date 'stop' + """ + + @property + def start(self): + """return start date""" + + @property + def stop(self): + """return stop state""" + +# XXX deprecates in favor of ICalendarableAdapter +class ITimetableViews(Interface): + """timetable views interface""" + def timetable_date(self): + """XXX explain + + :return: date (`DateTime`) + """ + +# XXX deprecates in favor of IGeocodableAdapter +class IGeocodable(Interface): + """interface required by geocoding views such as gmap-view""" + + @property + def latitude(self): + """returns the latitude of the entity""" + + @property + def longitude(self): + """returns the longitude of the entity""" + + def marker_icon(self): + """returns the icon that should be used as the marker""" + +# XXX deprecates in favor of ISIOCItemAdapter +class ISiocItem(Interface): + """interface for entities which may be represented as an ISIOC item""" + + def isioc_content(self): + """return item's content""" + + def isioc_container(self): + """return container entity""" + + def isioc_type(self): + """return container type (post, BlogPost, MailMessage)""" + + def isioc_replies(self): + """return replies items""" + + def isioc_topics(self): + """return topics items""" + +# XXX deprecates in favor of ISIOCContainerAdapter +class ISiocContainer(Interface): + """interface for entities which may be represented as an ISIOC container""" + + def isioc_type(self): + """return container type (forum, Weblog, MailingList)""" + + def isioc_items(self): + """return contained items""" + +# XXX deprecates in favor of IEmailableAdapter +class IFeed(Interface): + """interface for entities with rss flux""" + + def rss_feed_url(self): + """""" + +# XXX deprecates in favor of IDownloadableAdapter +class IDownloadable(Interface): + """interface for downloadable entities""" + + def download_url(self): # XXX not really part of this interface + """return an url to download entity's content""" + def download_content_type(self): + """return MIME type of the downloadable content""" + def download_encoding(self): + """return encoding of the downloadable content""" + def download_file_name(self): + """return file name of the downloadable content""" + def download_data(self): + """return actual data of the downloadable content""" + +# XXX deprecates in favor of IPrevNextAdapter +class IPrevNext(Interface): + """interface for entities which can be linked to a previous and/or next + entity + """ + + def next_entity(self): + """return the 'next' entity""" + def previous_entity(self): + """return the 'previous' entity""" + +# XXX deprecates in favor of IBreadCrumbsAdapter +class IBreadCrumbs(Interface): + + def breadcrumbs(self, view, recurs=False): + pass + +# XXX deprecates in favor of ITreeAdapter class ITree(Interface): def parent(self): @@ -159,141 +240,3 @@ def root(self): """returns the root object""" - -## web specific interfaces #################################################### - - -class IPrevNext(Interface): - """interface for entities which can be linked to a previous and/or next - entity - """ - - def next_entity(self): - """return the 'next' entity""" - def previous_entity(self): - """return the 'previous' entity""" - - -class IBreadCrumbs(Interface): - """interface for entities which can be "located" on some path""" - - # XXX fix recurs ! - def breadcrumbs(self, view, recurs=False): - """return a list containing some: - - * tuple (url, label) - * entity - * simple label string - - defining path from a root to the current view - - the main view is given as argument so breadcrumbs may vary according - to displayed view (may be None). When recursing on a parent entity, - the `recurs` argument should be set to True. - """ - - -class IDownloadable(Interface): - """interface for downloadable entities""" - - def download_url(self): # XXX not really part of this interface - """return an url to download entity's content""" - def download_content_type(self): - """return MIME type of the downloadable content""" - def download_encoding(self): - """return encoding of the downloadable content""" - def download_file_name(self): - """return file name of the downloadable content""" - def download_data(self): - """return actual data of the downloadable content""" - - -class IEmbedable(Interface): - """interface for embedable entities""" - - def embeded_url(self): - """embed action interface""" - -class ICalendarable(Interface): - """interface for items that do have a begin date 'start' and an end date 'stop' - """ - - @property - def start(self): - """return start date""" - - @property - def stop(self): - """return stop state""" - -class ICalendarViews(Interface): - """calendar views interface""" - def matching_dates(self, begin, end): - """ - :param begin: day considered as begin of the range (`DateTime`) - :param end: day considered as end of the range (`DateTime`) - - :return: - a list of dates (`DateTime`) in the range [`begin`, `end`] on which - this entity apply - """ - -class ITimetableViews(Interface): - """timetable views interface""" - def timetable_date(self): - """XXX explain - - :return: date (`DateTime`) - """ - -class IGeocodable(Interface): - """interface required by geocoding views such as gmap-view""" - - @property - def latitude(self): - """returns the latitude of the entity""" - - @property - def longitude(self): - """returns the longitude of the entity""" - - def marker_icon(self): - """returns the icon that should be used as the marker - (returns None for default) - """ - -class IFeed(Interface): - """interface for entities with rss flux""" - - def rss_feed_url(self): - """return an url which layout sub-entities item - """ - -class ISiocItem(Interface): - """interface for entities (which are item - in sioc specification) with sioc views""" - - def isioc_content(self): - """return content entity""" - - def isioc_container(self): - """return container entity""" - - def isioc_type(self): - """return container type (post, BlogPost, MailMessage)""" - - def isioc_replies(self): - """return replies items""" - - def isioc_topics(self): - """return topics items""" - -class ISiocContainer(Interface): - """interface for entities (which are container - in sioc specification) with sioc views""" - - def isioc_type(self): - """return container type (forum, Weblog, MailingList)""" - - def isioc_items(self): - """return contained items""" diff -r f4d1d5d9ccbb -r 90f2f20367bc mail.py --- a/mail.py Tue Jul 27 12:36:03 2010 +0200 +++ b/mail.py Wed Nov 03 16:38:28 2010 +0100 @@ -184,7 +184,7 @@ # previous email if not self.msgid_timestamp: refs = [self.construct_message_id(eid) - for eid in entity.notification_references(self)] + for eid in entity.cw_adapt_to('INotifiable').notification_references(self)] else: refs = () msgid = self.construct_message_id(entity.eid) @@ -198,7 +198,7 @@ if isinstance(something, Entity): # hi-jack self._cw to get a session for the returned user self._cw = self._cw.hijack_user(something) - emailaddr = something.get_email() + emailaddr = something.cw_adapt_to('IEmailable').get_email() else: emailaddr, lang = something self._cw.set_language(lang) @@ -246,7 +246,8 @@ # email generation helpers ################################################# def construct_message_id(self, eid): - return construct_message_id(self._cw.vreg.config.appid, eid, self.msgid_timestamp) + return construct_message_id(self._cw.vreg.config.appid, eid, + self.msgid_timestamp) def format_field(self, attr, value): return ':%(attr)s: %(value)s' % {'attr': attr, 'value': value} diff -r f4d1d5d9ccbb -r 90f2f20367bc migration.py --- a/migration.py Tue Jul 27 12:36:03 2010 +0200 +++ b/migration.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""utilities for instances migration +"""utilities for instances migration""" -""" __docformat__ = "restructuredtext en" import sys @@ -25,14 +24,16 @@ import logging import tempfile from os.path import exists, join, basename, splitext +from itertools import chain +from logilab.common import IGNORED_EXTENSIONS from logilab.common.decorators import cached from logilab.common.configuration import REQUIRED, read_old_config from logilab.common.shellutils import ASK from logilab.common.changelog import Version -from cubicweb import ConfigurationError - +from cubicweb import ConfigurationError, ExecutionError +from cubicweb.cwconfig import CubicWebConfiguration as cwcfg def filter_scripts(config, directory, fromversion, toversion, quiet=True): """return a list of paths of migration files to consider to upgrade @@ -52,8 +53,7 @@ return [] result = [] for fname in os.listdir(directory): - if fname.endswith('.pyc') or fname.endswith('.pyo') \ - or fname.endswith('~'): + if fname.endswith(IGNORED_EXTENSIONS): continue fpath = join(directory, fname) try: @@ -76,9 +76,6 @@ return sorted(result) -IGNORED_EXTENSIONS = ('.swp', '~') - - def execscript_confirm(scriptpath): """asks for confirmation before executing a script and provides the ability to show the script's content @@ -111,7 +108,7 @@ self.config = config if config: # no config on shell to a remote instance - self.config.init_log(logthreshold=logging.ERROR, debug=True) + self.config.init_log(logthreshold=logging.ERROR) # 0: no confirmation, 1: only main commands confirmed, 2 ask for everything self.verbosity = verbosity self.need_wrap = True @@ -125,13 +122,15 @@ 'config': self.config, 'interactive_mode': interactive, } + self._context_stack = [] def __getattribute__(self, name): try: return object.__getattribute__(self, name) except AttributeError: cmd = 'cmd_%s' % name - if hasattr(self, cmd): + # search self.__class__ to avoid infinite recursion + if hasattr(self.__class__, cmd): meth = getattr(self, cmd) return lambda *args, **kwargs: self.interact(args, kwargs, meth=meth) @@ -202,7 +201,8 @@ if not ask_confirm or self.confirm(msg): return meth(*args, **kwargs) - def confirm(self, question, shell=True, abort=True, retry=False, default='y'): + def confirm(self, question, shell=True, abort=True, retry=False, pdb=False, + default='y'): """ask for confirmation and return true on positive answer if `retry` is true the r[etry] answer may return 2 @@ -210,6 +210,8 @@ possibleanswers = ['y', 'n'] if abort: possibleanswers.append('abort') + if pdb: + possibleanswers.append('pdb') if shell: possibleanswers.append('shell') if retry: @@ -224,9 +226,13 @@ return 2 if answer == 'abort': raise SystemExit(1) - if shell and answer == 'shell': + if answer == 'shell': self.interactive_shell() - return self.confirm(question) + return self.confirm(question, shell, abort, retry, pdb, default) + if answer == 'pdb': + import pdb + pdb.set_trace() + return self.confirm(question, shell, abort, retry, pdb, default) return True def interactive_shell(self): @@ -280,27 +286,62 @@ context[attr[4:]] = getattr(self, attr) return context + def update_context(self, key, value): + for context in self._context_stack: + context[key] = value + self.__context[key] = value + def cmd_process_script(self, migrscript, funcname=None, *args, **kwargs): - """execute a migration script - in interactive mode, display the migration script path, ask for - confirmation and execute it if confirmed + """execute a migration script in interactive mode + + Display the migration script path, ask for confirmation and execute it + if confirmed + + Allowed input file formats for migration scripts: + - `python` (.py) + - `sql` (.sql) + - `doctest` (.txt or .rst) + + .. warning:: sql migration scripts are not available in web-only instance + + You can pass script parameters with using double dash (--) in the + command line + + Context environment can have these variables defined: + - __name__ : will be determine by funcname parameter + - __file__ : is the name of the script if it exists + - __args__ : script arguments coming from command-line + + :param migrscript: name of the script + :param funcname: defines __name__ inside the shell (or use __main__) + :params args: optional arguments for funcname + :keyword scriptargs: optional arguments of the script """ + ftypes = {'python': ('.py',), + 'doctest': ('.txt', '.rst'), + 'sql': ('.sql',)} + # sql migration scripts are not available in web-only instance + if not hasattr(self, "session"): + ftypes.pop('sql') migrscript = os.path.normpath(migrscript) - if migrscript.endswith('.py'): - script_mode = 'python' - elif migrscript.endswith('.txt') or migrscript.endswith('.rst'): - script_mode = 'doctest' + for (script_mode, ftype) in ftypes.items(): + if migrscript.endswith(ftype): + break else: - raise Exception('This is not a valid cubicweb shell input') + ftypes = ', '.join(chain(*ftypes.values())) + msg = 'ignoring %s, not a valid script extension (%s)' + raise ExecutionError(msg % (migrscript, ftypes)) if not self.execscript_confirm(migrscript): return scriptlocals = self._create_context().copy() + self._context_stack.append(scriptlocals) if script_mode == 'python': if funcname is None: pyname = '__main__' else: pyname = splitext(basename(migrscript))[0] - scriptlocals.update({'__file__': migrscript, '__name__': pyname}) + scriptlocals.update({'__file__': migrscript, '__name__': pyname, + '__args__': kwargs.pop("scriptargs", [])}) execfile(migrscript, scriptlocals) if funcname is not None: try: @@ -311,10 +352,15 @@ self.critical('no %s in script %s', funcname, migrscript) return None return func(*args, **kwargs) + elif script_mode == 'sql': + from cubicweb.server.sqlutils import sqlexec + sqlexec(open(migrscript).read(), self.session.system_sql) + self.commit() else: # script_mode == 'doctest' import doctest doctest.testfile(migrscript, module_relative=False, optionflags=doctest.ELLIPSIS, globs=scriptlocals) + self._context_stack.pop() def cmd_option_renamed(self, oldname, newname): """a configuration option has been renamed""" @@ -345,10 +391,8 @@ cubes = (cubes,) origcubes = self.config.cubes() newcubes = [p for p in self.config.expand_cubes(cubes) - if not p in origcubes] + if not p in origcubes] if newcubes: - for cube in cubes: - assert cube in newcubes self.config.add_cubes(newcubes) return newcubes @@ -410,8 +454,8 @@ """ def __init__(self, config): - self.cubes = {} self.config = config + self.cubes = {'cubicweb': cwcfg.cubicweb_version()} def add_cube(self, name, version): self.cubes[name] = version @@ -419,44 +463,50 @@ def solve(self): self.warnings = [] self.errors = [] - self.read_constraints() - for cube, versions in sorted(self.constraints.items()): - oper, version = None, None + self.dependencies = {} + self.reverse_dependencies = {} + self.constraints = {} + # read dependencies + for cube in self.cubes: + if cube == 'cubicweb': continue + self.dependencies[cube] = dict(self.config.cube_dependencies(cube)) + self.dependencies[cube]['cubicweb'] = self.config.cube_depends_cubicweb_version(cube) + # compute reverse dependencies + for cube, dependencies in self.dependencies.iteritems(): + for name, constraint in dependencies.iteritems(): + self.reverse_dependencies.setdefault(name,set()) + if constraint: + try: + oper, version = constraint.split() + self.reverse_dependencies[name].add( (oper, version, cube) ) + except: + self.warnings.append( + 'cube %s depends on %s but constraint badly ' + 'formatted: %s' % (cube, name, constraint)) + # check consistency + for cube, versions in sorted(self.reverse_dependencies.items()): + oper, version, source = None, None, None # simplify constraints if versions: for constraint in versions: - op, ver = constraint + op, ver, src = constraint if oper is None: oper = op version = ver + source = src elif op == '>=' and oper == '>=': - version = max_version(ver, version) + if version_strictly_lower(version, ver): + version = ver + source = src else: print 'unable to handle this case', oper, version, op, ver # "solve" constraint satisfaction problem if cube not in self.cubes: - self.errors.append( ('add', cube, version) ) + self.errors.append( ('add', cube, version, source) ) elif versions: lower_strict = version_strictly_lower(self.cubes[cube], version) if oper in ('>=','='): if lower_strict: - self.errors.append( ('update', cube, version) ) + self.errors.append( ('update', cube, version, source) ) else: print 'unknown operator', oper - - def read_constraints(self): - self.constraints = {} - self.reverse_constraints = {} - for cube in self.cubes: - use = self.config.cube_dependencies(cube) - for name, constraint in use.iteritems(): - self.constraints.setdefault(name,set()) - if constraint: - try: - oper, version = constraint.split() - self.constraints[name].add( (oper, version) ) - except: - self.warnings.append( - 'cube %s depends on %s but constraint badly ' - 'formatted: %s' % (cube, name, constraint)) - self.reverse_constraints.setdefault(name, set()).add(cube) diff -r f4d1d5d9ccbb -r 90f2f20367bc misc/migration/3.6.0_Any.py --- a/misc/migration/3.6.0_Any.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -sync_schema_props_perms('read_permission', syncperms=False) # fix read_permission cardinality diff -r f4d1d5d9ccbb -r 90f2f20367bc misc/migration/3.9.0_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/misc/migration/3.9.0_Any.py Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,3 @@ +if repo.system_source.dbdriver == 'postgres': + sql('ALTER TABLE appears ADD COLUMN weight float') + sql('UPDATE appears SET weight=1.0 ') diff -r f4d1d5d9ccbb -r 90f2f20367bc misc/migration/3.9.5_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/misc/migration/3.9.5_Any.py Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,4 @@ +if not rql('CWConstraintType X WHERE X name "RQLUniqueConstraint"', + ask_confirm=False): + rql('INSERT CWConstraintType X: X name "RQLUniqueConstraint"', + ask_confirm=False) diff -r f4d1d5d9ccbb -r 90f2f20367bc misc/migration/bootstrapmigration_repository.py --- a/misc/migration/bootstrapmigration_repository.py Tue Jul 27 12:36:03 2010 +0200 +++ b/misc/migration/bootstrapmigration_repository.py Wed Nov 03 16:38:28 2010 +0100 @@ -18,7 +18,6 @@ """allways executed before all others in server migration it should only include low level schema changes - """ from __future__ import with_statement @@ -93,6 +92,10 @@ for action in ('read', 'add', 'delete'): drop_relation_definition('CWRType', '%s_permission' % action, 'CWGroup', commit=False) drop_relation_definition('CWRType', '%s_permission' % action, 'RQLExpression') + sync_schema_props_perms('read_permission', syncperms=False) # fix read_permission cardinality + +if applcubicwebversion < (3, 9, 6) and cubicwebversion >= (3, 9, 6): + add_entity_type('CWUniqueTogetherConstraint') if applcubicwebversion < (3, 4, 0) and cubicwebversion >= (3, 4, 0): diff -r f4d1d5d9ccbb -r 90f2f20367bc misc/scripts/detect_cycle.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/misc/scripts/detect_cycle.py Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,15 @@ + +try: + rtype, = __args__ +except ValueError: + print 'USAGE: cubicweb-ctl shell detect_cycle.py -- ' + print + +graph = {} +for fromeid, toeid in rql('Any X,Y WHERE X %s Y' % rtype): + graph.setdefault(fromeid, []).append(toeid) + +from logilab.common.graph import get_cycles + +for cycle in get_cycles(graph): + print 'cycle', '->'.join(str(n) for n in cycle) diff -r f4d1d5d9ccbb -r 90f2f20367bc misc/scripts/ldap_change_base_dn.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/misc/scripts/ldap_change_base_dn.py Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,24 @@ +from base64 import b64decode, b64encode +try: + uri, newdn = __args__ +except ValueError: + print 'USAGE: cubicweb-ctl shell ldap_change_base_dn.py -- ' + print + print 'you should not have updated your sources file yet' + +olddn = repo.config.sources()[uri]['user-base-dn'] + +assert olddn != newdn + +raw_input("Ensure you've stopped the instance, type enter when done.") + +for eid, extid in sql("SELECT eid, extid FROM entities WHERE source='%s'" % uri): + olduserdn = b64decode(extid) + newuserdn = olduserdn.replace(olddn, newdn) + if newuserdn != olduserdn: + print olduserdn, '->', newuserdn + sql("UPDATE entities SET extid='%s' WHERE eid=%s" % (b64encode(newuserdn), eid)) + +commit() + +print 'you can now update the sources file to the new dn and restart the instance' diff -r f4d1d5d9ccbb -r 90f2f20367bc misc/scripts/repair_file_1-9_migration.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/misc/scripts/repair_file_1-9_migration.py Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,52 @@ +"""execute this script if you've migration to file >= 1.9.0 with cubicweb <= 3.9.2 + +FYI, this migration occurred : +* on our intranet on July 07 2010 +* on our extranet on July 16 2010 +""" +from __future__ import with_statement + +try: + backupinstance, = __args__ +except ValueError: + print 'USAGE: cubicweb-ctl shell repair_file_1-9_migration.py -- ' + print + print 'you should restored the backup on a new instance, accessible through pyro' + +from cubicweb import cwconfig, dbapi +from cubicweb.server.session import hooks_control + +sourcescfg = repo.config.sources() +backupcfg = cwconfig.instance_configuration(backupinstance) +backupcfg.repairing = True +backuprepo, backupcnx = dbapi.in_memory_cnx(backupcfg, sourcescfg['admin']['login'], + password=sourcescfg['admin']['password'], + host='localhost') +backupcu = backupcnx.cursor() + +with hooks_control(session, session.HOOKS_DENY_ALL): + rql('SET X is Y WHERE X is File, Y name "File", NOT X is Y') + rql('SET X is_instance_of Y WHERE X is File, Y name "File", NOT X is_instance_of Y') + for rtype, in backupcu.execute('DISTINCT Any RTN WHERE X relation_type RT, RT name RTN,' + 'X from_entity Y, Y name "Image", X is CWRelation, ' + 'EXISTS(XX is CWRelation, XX relation_type RT, ' + 'XX from_entity YY, YY name "File")'): + if rtype in ('is', 'is_instance_of'): + continue + print rtype + for feid, xeid in backupcu.execute('Any F,X WHERE F %s X, F is IN (File,Image)' % rtype): + print 'restoring relation %s between file %s and %s' % (rtype, feid, xeid), + print rql('SET F %s X WHERE F eid %%(f)s, X eid %%(x)s, NOT F %s X' % (rtype, rtype), + {'f': feid, 'x': xeid}) + + for rtype, in backupcu.execute('DISTINCT Any RTN WHERE X relation_type RT, RT name RTN,' + 'X to_entity Y, Y name "Image", X is CWRelation, ' + 'EXISTS(XX is CWRelation, XX relation_type RT, ' + 'XX to_entity YY, YY name "File")'): + print rtype + for feid, xeid in backupcu.execute('Any F,X WHERE X %s F, F is IN (File,Image)' % rtype): + print 'restoring relation %s between %s and file %s' % (rtype, xeid, feid), + print rql('SET X %s F WHERE F eid %%(f)s, X eid %%(x)s, NOT X %s F' % (rtype, rtype), + {'f': feid, 'x': xeid}) + +commit() diff -r f4d1d5d9ccbb -r 90f2f20367bc mixins.py --- a/mixins.py Tue Jul 27 12:36:03 2010 +0200 +++ b/mixins.py Wed Nov 03 16:38:28 2010 +0100 @@ -21,9 +21,10 @@ from itertools import chain from logilab.common.decorators import cached +from logilab.common.deprecation import deprecated, class_deprecated from cubicweb.selectors import implements -from cubicweb.interfaces import IEmailable, ITree +from cubicweb.interfaces import ITree class TreeMixIn(object): @@ -33,6 +34,9 @@ tree_attribute, parent_target and children_target class attribute to benefit from this default implementation """ + __metaclass__ = class_deprecated + __deprecation_warning__ = '[3.9] TreeMixIn is deprecated, use/override ITreeAdapter instead' + tree_attribute = None # XXX misnamed parent_target = 'subject' @@ -67,7 +71,7 @@ _done = set() for child in self.children(): if child.eid in _done: - self.error('loop in %s tree', self.__regid__.lower()) + self.error('loop in %s tree: %s', self.__regid__.lower(), child) continue yield child _done.add(child.eid) @@ -90,7 +94,7 @@ parent = self while parent: if parent.eid in path: - self.error('loop in %s tree', self.__regid__.lower()) + self.error('loop in %s tree: %s', self.__regid__.lower(), parent) break path.append(parent.eid) try: @@ -117,16 +121,6 @@ return chain([self], _uptoroot(self)) return _uptoroot(self) - def notification_references(self, view): - """used to control References field of email send on notification - for this entity. `view` is the notification view. - - Should return a list of eids which can be used to generate message ids - of previously sent email - """ - return self.path()[:-1] - - ## ITree interface ######################################################## def parent(self): """return the parent entity if any, else None (e.g. if we are on the @@ -151,7 +145,7 @@ entities=entities) def children_rql(self): - return self.related_rql(self.tree_attribute, self.children_target) + return self.cw_related_rql(self.tree_attribute, self.children_target) def is_leaf(self): return len(self.children()) == 0 @@ -171,8 +165,7 @@ NOTE: The default implementation is based on the primary_email / use_email scheme """ - __implements__ = (IEmailable,) - + @deprecated("[3.9] use entity.cw_adapt_to('IEmailable').get_email()") def get_email(self): if getattr(self, 'primary_email', None): return self.primary_email[0].address @@ -180,28 +173,6 @@ return self.use_email[0].address return None - @classmethod - def allowed_massmail_keys(cls): - """returns a set of allowed email substitution keys - - The default is to return the entity's attribute list but an - entity class might override this method to allow extra keys. - For instance, the Person class might want to return a `companyname` - key. - """ - return set(rschema.type - for rschema, attrtype in cls.e_schema.attribute_definitions() - if attrtype.type not in ('Password', 'Bytes')) - - def as_email_context(self): - """returns the dictionary as used by the sendmail controller to - build email bodies. - - NOTE: the dictionary keys should match the list returned by the - `allowed_massmail_keys` method. - """ - return dict( (attr, getattr(self, attr)) for attr in self.allowed_massmail_keys() ) - """pluggable mixins system: plug classes registered in MI_REL_TRIGGERS on entity classes which have the relation described by the dict's key. @@ -215,7 +186,7 @@ } - +# XXX move to cubicweb.web.views.treeview once we delete usage from this file def _done_init(done, view, row, col): """handle an infinite recursion safety belt""" if done is None: @@ -223,7 +194,7 @@ entity = view.cw_rset.get_entity(row, col) if entity.eid in done: msg = entity._cw._('loop in %(rel)s relation (%(eid)s)') % { - 'rel': entity.tree_attribute, + 'rel': entity.cw_adapt_to('ITree').tree_relation, 'eid': entity.eid } return None, msg @@ -233,16 +204,20 @@ class TreeViewMixIn(object): """a recursive tree view""" + __metaclass__ = class_deprecated + __deprecation_warning__ = '[3.9] TreeViewMixIn is deprecated, use/override BaseTreeView instead' + __regid__ = 'tree' + __select__ = implements(ITree, warn=False) item_vid = 'treeitem' - __select__ = implements(ITree) def call(self, done=None, **kwargs): if done is None: done = set() super(TreeViewMixIn, self).call(done=done, **kwargs) - def cell_call(self, row, col=0, vid=None, done=None, **kwargs): + def cell_call(self, row, col=0, vid=None, done=None, maxlevel=None, **kwargs): + assert maxlevel is None or maxlevel > 0 done, entity = _done_init(done, self, row, col) if done is None: # entity is actually an error message @@ -250,8 +225,14 @@ return self.open_item(entity) entity.view(vid or self.item_vid, w=self.w, **kwargs) + if maxlevel is not None: + maxlevel -= 1 + if maxlevel == 0: + self.close_item(entity) + return relatedrset = entity.children(entities=False) - self.wview(self.__regid__, relatedrset, 'null', done=done, **kwargs) + self.wview(self.__regid__, relatedrset, 'null', done=done, + maxlevel=maxlevel, **kwargs) self.close_item(entity) def open_item(self, entity): @@ -262,6 +243,8 @@ class TreePathMixIn(object): """a recursive path view""" + __metaclass__ = class_deprecated + __deprecation_warning__ = '[3.9] TreePathMixIn is deprecated, use/override TreePathView instead' __regid__ = 'path' item_vid = 'oneline' separator = u' > ' @@ -286,6 +269,8 @@ class ProgressMixIn(object): """provide a default implementations for IProgress interface methods""" + __metaclass__ = class_deprecated + __deprecation_warning__ = '[3.9] ProgressMixIn is deprecated, use/override IProgressAdapter instead' @property def cost(self): diff -r f4d1d5d9ccbb -r 90f2f20367bc mttransforms.py --- a/mttransforms.py Tue Jul 27 12:36:03 2010 +0200 +++ b/mttransforms.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""mime type transformation engine for cubicweb, based on mtconverter +"""mime type transformation engine for cubicweb, based on mtconverter""" -""" __docformat__ = "restructuredtext en" from logilab import mtconverter diff -r f4d1d5d9ccbb -r 90f2f20367bc req.py --- a/req.py Tue Jul 27 12:36:03 2010 +0200 +++ b/req.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""Base class for request/session +"""Base class for request/session""" -""" __docformat__ = "restructuredtext en" from warnings import warn @@ -133,7 +132,7 @@ Example (in a shell session): >>> c = create_entity('Company', name=u'Logilab') - >>> create_entity('Person', firstname=u'John', lastname=u'Doe', + >>> create_entity('Person', firstname=u'John', surname=u'Doe', ... works_for=c) """ @@ -175,6 +174,8 @@ """return an absolute URL using params dictionary key/values as URL parameters. Values are automatically URL quoted, and the publishing method to use may be specified or will be guessed. + + raises :exc:`ValueError` if None is found in arguments """ # use *args since we don't want first argument to be "anonymous" to # avoid potential clash with kwargs @@ -202,7 +203,6 @@ return u'%s%s' % (base_url, path) return u'%s%s?%s' % (base_url, path, self.build_url_params(**kwargs)) - def build_url_params(self, **kwargs): """return encoded params to incorporate them in an URL""" args = [] @@ -210,6 +210,8 @@ if not isinstance(values, (list, tuple)): values = (values,) for value in values: + if value is None: + raise ValueError(_('unauthorized value')) args.append(u'%s=%s' % (param, self.url_quote(value))) return '&'.join(args) @@ -279,7 +281,7 @@ user = self.user userinfo['login'] = user.login userinfo['name'] = user.name() - userinfo['email'] = user.get_email() + userinfo['email'] = user.cw_adapt_to('IEmailable').get_email() return userinfo def is_internal_session(self): @@ -373,11 +375,11 @@ raise ValueError(self._('can\'t parse %(value)r (expected %(format)s)') % {'value': value, 'format': format}) - # abstract methods to override according to the web front-end ############# - def base_url(self): """return the root url of the instance""" - raise NotImplementedError + return self.vreg.config['base-url'] + + # abstract methods to override according to the web front-end ############# def describe(self, eid): """return a tuple (type, sourceuri, extid) for the entity with id """ diff -r f4d1d5d9ccbb -r 90f2f20367bc rqlrewrite.py --- a/rqlrewrite.py Tue Jul 27 12:36:03 2010 +0200 +++ b/rqlrewrite.py Wed Nov 03 16:38:28 2010 +0100 @@ -19,8 +19,8 @@ tree. This is used for instance for read security checking in the repository. +""" -""" __docformat__ = "restructuredtext en" from rql import nodes as n, stmts, TypeResolverException @@ -45,14 +45,12 @@ allpossibletypes = {} for solution in solutions: for varname, etype in solution.iteritems(): - if not varname in newroot.defined_vars or eschema(etype).final: + # XXX not considering aliases by design, right ? + if varname not in newroot.defined_vars or eschema(etype).final: continue allpossibletypes.setdefault(varname, set()).add(etype) for varname in sorted(allpossibletypes): - try: - var = newroot.defined_vars[varname] - except KeyError: - continue + var = newroot.defined_vars[varname] stinfo = var.stinfo if stinfo.get('uidrel') is not None: continue # eid specified, no need for additional type specification @@ -64,7 +62,7 @@ if newroot is rqlst and typerel is not None: mytyperel = typerel else: - for vref in newroot.defined_vars[varname].references(): + for vref in var.references(): rel = vref.relation() if rel and rel.is_types_restriction(): mytyperel = rel @@ -79,12 +77,6 @@ for cst in mytyperel.get_nodes(n.Constant): if not cst.value in possibletypes: cst.parent.remove(cst) - try: - stinfo['possibletypes'].remove(cst.value) - except KeyError: - # restriction on a type not used by this query, may - # occurs with X is IN(...) - pass else: # we have to add types restriction if stinfo.get('scope') is not None: @@ -94,7 +86,7 @@ # to the root rel = newroot.add_type_restriction(var, possibletypes) stinfo['typerel'] = rel - stinfo['possibletypes'] = possibletypes + stinfo['possibletypes'] = possibletypes def remove_solutions(origsolutions, solutions, defined): diff -r f4d1d5d9ccbb -r 90f2f20367bc rset.py --- a/rset.py Tue Jul 27 12:36:03 2010 +0200 +++ b/rset.py Wed Nov 03 16:38:28 2010 +0100 @@ -77,10 +77,16 @@ rows = self.rows if len(rows) > 10: rows = rows[:10] + ['...'] + if len(rows) > 1: + # add a line break before first entity if more that one. + pattern = '' + else: + pattern = '' + if not self.description: - return '' % (self.rql, len(self.rows), + return pattern % (self.rql, len(self.rows), '\n'.join(str(r) for r in rows)) - return '' % (self.rql, len(self.rows), + return pattern % (self.rql, len(self.rows), '\n'.join('%s (%s)' % (r, d) for r, d in zip(rows, self.description))) @@ -453,7 +459,7 @@ etype = self.description[row][col] entity = self.req.vreg['etypes'].etype_class(etype)(req, rset=self, row=row, col=col) - entity.set_eid(eid) + entity.eid = eid # cache entity req.set_entity_cache(entity) eschema = entity.e_schema @@ -494,7 +500,7 @@ rrset.req = req else: rrset = self._build_entity(row, outerselidx).as_rset() - entity.set_related_cache(attr, role, rrset) + entity.cw_set_relation_cache(attr, role, rrset) return entity @cached @@ -563,7 +569,8 @@ if i == col: continue coletype = self.description[row][i] - # None description possible on column resulting from an outer join + # None description possible on column resulting from an + # outer join if coletype is None or eschema(coletype).final: continue try: @@ -582,11 +589,20 @@ @cached def related_entity(self, row, col): - """try to get the related entity to extract format information if any""" + """given an cell of the result set, try to return a (entity, relation + name) tuple to which this cell is linked. + + This is especially useful when the cell is an attribute of an entity, + to get the entity to which this attribute belongs to. + """ rqlst = self.syntax_tree() + # UNION query, we've first to find a 'pivot' column to use to get the + # actual query from which the row is coming etype, locate_query_col = self._locate_query_params(rqlst, row, col) - # UNION query, find the subquery from which this entity has been found + # now find the query from which this entity has been found. Returned + # select node may be a subquery with different column indexes. select = rqlst.locate_subquery(locate_query_col, etype, self.args)[0] + # then get the index of root query's col in the subquery col = rqlst.subquery_selection_index(select, col) if col is None: # XXX unexpected, should fix subquery_selection_index ? diff -r f4d1d5d9ccbb -r 90f2f20367bc rtags.py --- a/rtags.py Tue Jul 27 12:36:03 2010 +0200 +++ b/rtags.py Wed Nov 03 16:38:28 2010 +0100 @@ -34,8 +34,6 @@ * ``tag_subject_of`` tag a relation in the subject's context * ``tag_object_of`` tag a relation in the object's context * ``tag_attribute`` shortcut for tag_subject_of - - """ __docformat__ = "restructuredtext en" @@ -212,4 +210,27 @@ _allowed_values = frozenset((True, False)) +class NoTargetRelationTagsDict(RelationTagsDict): + + @property + def name(self): + return self.__class__.name + + def tag_subject_of(self, key, tag): + subj, rtype, obj = key + if obj != '*': + self.warning('using explict target type in %s.tag_subject_of() ' + 'has no effect, use (%s, %s, "*") instead of (%s, %s, %s)', + self.name, subj, rtype, subj, rtype, obj) + super(NoTargetRelationTagsDict, self).tag_subject_of((subj, rtype, '*'), tag) + + def tag_object_of(self, key, tag): + subj, rtype, obj = key + if subj != '*': + self.warning('using explict subject type in %s.tag_object_of() ' + 'has no effect, use ("*", %s, %s) instead of (%s, %s, %s)', + self.name, rtype, obj, subj, rtype, obj) + super(NoTargetRelationTagsDict, self).tag_object_of(('*', rtype, obj), tag) + + set_log_methods(RelationTags, logging.getLogger('cubicweb.rtags')) diff -r f4d1d5d9ccbb -r 90f2f20367bc schema.py --- a/schema.py Tue Jul 27 12:36:03 2010 +0200 +++ b/schema.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""classes to define schemas for CubicWeb +"""classes to define schemas for CubicWeb""" -""" __docformat__ = "restructuredtext en" _ = unicode @@ -52,15 +51,19 @@ 'owned_by', 'created_by', 'is', 'is_instance_of', 'identity', 'eid', 'creation_date', 'modification_date', 'has_text', 'cwuri', )) -SYSTEM_RTYPES = set(('require_permission', 'custom_workflow', 'in_state', - 'wf_info_for')) +WORKFLOW_RTYPES = set(('custom_workflow', 'in_state', 'wf_info_for')) +SYSTEM_RTYPES = set(('require_permission',)) | WORKFLOW_RTYPES # set of entity and relation types used to build the schema SCHEMA_TYPES = set(( 'CWEType', 'CWRType', 'CWAttribute', 'CWRelation', - 'CWConstraint', 'CWConstraintType', 'RQLExpression', + 'CWConstraint', 'CWConstraintType', 'CWUniqueTogetherConstraint', + 'RQLExpression', 'relation_type', 'from_entity', 'to_entity', 'constrained_by', 'cstrtype', + 'constraint_of', 'relations', + 'read_permission', 'add_permission', + 'delete_permission', 'update_permission', )) WORKFLOW_TYPES = set(('Transition', 'State', 'TrInfo', 'Workflow', @@ -417,7 +420,7 @@ # avoid deleting the relation type accidentally... self.schema['has_text'].del_relation_def(self, self.schema['String']) - def schema_entity(self): + def schema_entity(self): # XXX @property for consistency with meta """return True if this entity type is used to build the schema""" return self.type in SCHEMA_TYPES @@ -441,7 +444,7 @@ def meta(self): return self.type in META_RTYPES - def schema_relation(self): + def schema_relation(self): # XXX @property for consistency with meta """return True if this relation type is used to build the schema""" return self.type in SCHEMA_TYPES @@ -612,6 +615,7 @@ class BaseRQLConstraint(BaseConstraint): """base class for rql constraints """ + distinct_query = None def __init__(self, restriction, mainvars=None): self.restriction = normalize_expression(restriction) @@ -651,8 +655,12 @@ pass # this is a vocabulary constraint, not enforce XXX why? def __str__(self): - return '%s(Any %s WHERE %s)' % (self.__class__.__name__, self.mainvars, - self.restriction) + if self.distinct_query: + selop = 'Any' + else: + selop = 'DISTINCT Any' + return '%s(%s %s WHERE %s)' % (self.__class__.__name__, selop, + self.mainvars, self.restriction) def __repr__(self): return '<%s @%#x>' % (self.__str__(), id(self)) @@ -699,7 +707,7 @@ """ if not self.match_condition(session, eidfrom, eidto): # XXX at this point if both or neither of S and O are in mainvar we - # dunno if the validation error `occured` on eidfrom or eidto (from + # dunno if the validation error `occurred` on eidfrom or eidto (from # user interface point of view) # # possible enhancement: check entity being created, it's probably @@ -744,13 +752,14 @@ class RQLUniqueConstraint(RepoEnforcedRQLConstraintMixIn, BaseRQLConstraint): """the unique rql constraint check that the result of the query isn't - greater than one - """ - distinct_query = True + greater than one. - # XXX turns mainvars into a required argument in __init__, since we've no - # way to guess it correctly (eg if using S,O or U the constraint will - # always be satisfied since we've to use a DISTINCT query) + You *must* specify mainvars when instantiating the constraint since there is + no way to guess it correctly (e.g. if using S,O or U the constraint will + always be satisfied because we've to use a DISTINCT query). + """ + # XXX turns mainvars into a required argument in __init__ + distinct_query = True def match_condition(self, session, eidfrom, eidto): return len(self.exec_query(session, eidfrom, eidto)) <= 1 diff -r f4d1d5d9ccbb -r 90f2f20367bc schemas/base.py --- a/schemas/base.py Tue Jul 27 12:36:03 2010 +0200 +++ b/schemas/base.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""core CubicWeb schema, but not necessary at bootstrap time +"""core CubicWeb schema, but not necessary at bootstrap time""" -""" __docformat__ = "restructuredtext en" _ = unicode diff -r f4d1d5d9ccbb -r 90f2f20367bc schemas/bootstrap.py --- a/schemas/bootstrap.py Tue Jul 27 12:36:03 2010 +0200 +++ b/schemas/bootstrap.py Wed Nov 03 16:38:28 2010 +0100 @@ -154,6 +154,17 @@ value = String(description=_('depends on the constraint type')) +class CWUniqueTogetherConstraint(EntityType): + """defines a sql-level multicolumn unique index""" + __permissions__ = PUB_SYSTEM_ENTITY_PERMS + constraint_of = SubjectRelation('CWEType', cardinality='1*', composite='object', + inlined=True) + relations = SubjectRelation(('CWAttribute', 'CWRelation'), cardinality='+*', + constraints=[RQLConstraint( + 'O from_entity X, S constraint_of X, O relation_type T, ' + 'T final TRUE OR (T final FALSE AND T inlined TRUE)')]) + + class CWConstraintType(EntityType): """define a schema constraint type""" __permissions__ = PUB_SYSTEM_ENTITY_PERMS diff -r f4d1d5d9ccbb -r 90f2f20367bc schemas/workflow.py --- a/schemas/workflow.py Tue Jul 27 12:36:03 2010 +0200 +++ b/schemas/workflow.py Wed Nov 03 16:38:28 2010 +0100 @@ -139,7 +139,7 @@ subworkflow_state = SubjectRelation( 'State', cardinality='1*', constraints=[RQLConstraint('T subworkflow_exit S, T subworkflow WF, O state_of WF', - msg=_('exit state must a subworkflow state'))], + msg=_('exit state must be a subworkflow state'))], description=_('subworkflow state')) destination_state = SubjectRelation( 'State', cardinality='?*', diff -r f4d1d5d9ccbb -r 90f2f20367bc selectors.py --- a/selectors.py Tue Jul 27 12:36:03 2010 +0200 +++ b/selectors.py Wed Nov 03 16:38:28 2010 +0100 @@ -169,7 +169,7 @@ or below the :func:`objectify_selector` decorator of your selector function so it gets traceable when :class:`traced_selection` is activated (see :ref:`DebuggingSelectors`). -.. autofunction:: cubicweb.selectors.lltrace +.. autofunction:: cubicweb.appobject.lltrace .. note:: Selectors __call__ should *always* return a positive integer, and shall never @@ -183,127 +183,53 @@ Once in a while, one needs to understand why a view (or any application object) is, or is not selected appropriately. Looking at which selectors fired (or did -not) is the way. The :class:`cubicweb.selectors.traced_selection` context +not) is the way. The :class:`cubicweb.appobject.traced_selection` context manager to help with that, *if you're running your instance in debug mode*. -.. autoclass:: cubicweb.selectors.traced_selection +.. autoclass:: cubicweb.appobject.traced_selection - -.. |cubicweb| replace:: *CubicWeb* """ __docformat__ = "restructuredtext en" import logging from warnings import warn +from operator import eq from logilab.common.deprecation import class_renamed from logilab.common.compat import all, any from logilab.common.interface import implements as implements_iface -from yams import BASE_TYPES +from yams.schema import BASE_TYPES, role_name +from rql.nodes import Function -from cubicweb import Unauthorized, NoSelectableObject, NotAnEntity, role +from cubicweb import (Unauthorized, NoSelectableObject, NotAnEntity, + CW_EVENT_MANAGER, role) # even if not used, let yes here so it's importable through this module -from cubicweb.appobject import Selector, objectify_selector, yes -from cubicweb.vregistry import class_regid -from cubicweb.cwconfig import CubicWebConfiguration +from cubicweb.uilib import eid_param +from cubicweb.appobject import Selector, objectify_selector, lltrace, yes from cubicweb.schema import split_expression -# helpers for debugging selectors -SELECTOR_LOGGER = logging.getLogger('cubicweb.selectors') -TRACED_OIDS = None - -def _trace_selector(cls, selector, args, ret): - # /!\ lltrace decorates pure function or __call__ method, this - # means argument order may be different - if isinstance(cls, Selector): - selname = str(cls) - vobj = args[0] - else: - selname = selector.__name__ - vobj = cls - if TRACED_OIDS == 'all' or class_regid(vobj) in TRACED_OIDS: - #SELECTOR_LOGGER.warning('selector %s returned %s for %s', selname, ret, cls) - print '%s -> %s for %s(%s)' % (selname, ret, vobj, vobj.__regid__) - -def lltrace(selector): - """use this decorator on your selectors so the becomes traceable with - :class:`traced_selection` - """ - # don't wrap selectors if not in development mode - if CubicWebConfiguration.mode == 'system': # XXX config.debug - return selector - def traced(cls, *args, **kwargs): - ret = selector(cls, *args, **kwargs) - if TRACED_OIDS is not None: - _trace_selector(cls, selector, args, ret) - return ret - traced.__name__ = selector.__name__ - traced.__doc__ = selector.__doc__ - return traced - -class traced_selection(object): - """ - Typical usage is : - - .. sourcecode:: python +from cubicweb.appobject import traced_selection # XXX for bw compat - >>> from cubicweb.selectors import traced_selection - >>> with traced_selection(): - ... # some code in which you want to debug selectors - ... # for all objects - - Don't forget the 'from __future__ import with_statement' at the module top-level - if you're using python prior to 2.6. - - This will yield lines like this in the logs:: - - selector one_line_rset returned 0 for - - You can also give to :class:`traced_selection` the identifiers of objects on - which you want to debug selection ('oid1' and 'oid2' in the example above). - - .. sourcecode:: python - - >>> with traced_selection( ('regid1', 'regid2') ): - ... # some code in which you want to debug selectors - ... # for objects with __regid__ 'regid1' and 'regid2' - - A potentially usefull point to set up such a tracing function is - the `cubicweb.vregistry.Registry.select` method body. - """ - - def __init__(self, traced='all'): - self.traced = traced - - def __enter__(self): - global TRACED_OIDS - TRACED_OIDS = self.traced - - def __exit__(self, exctype, exc, traceback): - global TRACED_OIDS - TRACED_OIDS = None - return traceback is None - - -def score_interface(etypesreg, cls_or_inst, cls, iface): +def score_interface(etypesreg, eclass, iface): """Return XXX if the give object (maybe an instance or class) implements the interface. """ if getattr(iface, '__registry__', None) == 'etypes': # adjust score if the interface is an entity class - parents = etypesreg.parent_classes(cls_or_inst.__regid__) - if iface is cls: + parents, any = etypesreg.parent_classes(eclass.__regid__) + if iface is eclass: return len(parents) + 4 - if iface is parents[-1]: # Any + if iface is any: # Any return 1 - for index, basecls in enumerate(reversed(parents[:-1])): + for index, basecls in enumerate(reversed(parents)): if iface is basecls: return index + 3 return 0 - if implements_iface(cls_or_inst, iface): - # implenting an interface takes precedence other special Any interface + # XXX iface in implements deprecated in 3.9 + if implements_iface(eclass, iface): + # implementing an interface takes precedence other special Any interface return 2 return 0 @@ -321,31 +247,6 @@ return super(PartialSelectorMixIn, self).__call__(cls, *args, **kwargs) -class ImplementsMixIn(object): - """mix-in class for selectors checking implemented interfaces of something - """ - def __init__(self, *expected_ifaces, **kwargs): - super(ImplementsMixIn, self).__init__(**kwargs) - self.expected_ifaces = expected_ifaces - - def __str__(self): - return '%s(%s)' % (self.__class__.__name__, - ','.join(str(s) for s in self.expected_ifaces)) - - def score_interfaces(self, req, cls_or_inst, cls): - score = 0 - etypesreg = req.vreg['etypes'] - for iface in self.expected_ifaces: - if isinstance(iface, basestring): - # entity type - try: - iface = etypesreg.etype_class(iface) - except KeyError: - continue # entity type not in the schema - score += score_interface(etypesreg, cls_or_inst, cls, iface) - return score - - class EClassSelector(Selector): """abstract class for selectors working on *entity class(es)* specified explicitly or found of the result set. @@ -375,14 +276,17 @@ self.accept_none = accept_none @lltrace - def __call__(self, cls, req, rset=None, row=None, col=0, **kwargs): + def __call__(self, cls, req, rset=None, row=None, col=0, accept_none=None, + **kwargs): if kwargs.get('entity'): return self.score_class(kwargs['entity'].__class__, req) if not rset: return 0 score = 0 if row is None: - if not self.accept_none: + if accept_none is None: + accept_none = self.accept_none + if not accept_none: if any(rset[i][col] is None for i in xrange(len(rset))): return 0 for etype in rset.column_types(col): @@ -442,7 +346,8 @@ """ @lltrace - def __call__(self, cls, req, rset=None, row=None, col=0, **kwargs): + def __call__(self, cls, req, rset=None, row=None, col=0, accept_none=None, + **kwargs): if not rset and not kwargs.get('entity'): return 0 score = 0 @@ -450,9 +355,11 @@ score = self.score_entity(kwargs['entity']) elif row is None: col = col or 0 + if accept_none is None: + accept_none = self.accept_none for row, rowvalue in enumerate(rset.rows): if rowvalue[col] is None: # outer join - if not self.accept_none: + if not accept_none: return 0 continue escore = self.score(req, rset, row, col) @@ -482,7 +389,7 @@ """Take a list of expected values as initializer argument and store them into the :attr:`expected` set attribute. - You should implements the :meth:`_get_value(cls, req, **kwargs)` method + You should implement the :meth:`_get_value(cls, req, **kwargs)` method which should return the value for the given context. The selector will then return 1 if the value is expected, else 0. """ @@ -528,19 +435,49 @@ * `registry`, a registry name - * `regid`, an object identifier in this registry + * `regids`, object identifiers in this registry, one of them should be + selectable. """ - def __init__(self, registry, regid): + selectable_score = 1 + def __init__(self, registry, *regids): self.registry = registry - self.regid = regid + self.regids = regids + + @lltrace + def __call__(self, cls, req, **kwargs): + for regid in self.regids: + try: + req.vreg[self.registry].select(regid, req, **kwargs) + return self.selectable_score + except NoSelectableObject: + continue + return 0 + + +class adaptable(appobject_selectable): + """Return 1 if another appobject is selectable using the same input context. + + Initializer arguments: + + * `regids`, adapter identifiers (e.g. interface names) to which the context + (usually entities) should be adaptable. One of them should be selectable + when multiple identifiers are given. + """ + def __init__(self, *regids): + super(adaptable, self).__init__('adapters', *regids) def __call__(self, cls, req, **kwargs): - try: - req.vreg[self.registry].select(self.regid, req, **kwargs) - return 1 - except NoSelectableObject: - return 0 - + kwargs.setdefault('accept_none', False) + # being adaptable to an interface should takes precedence other is_instance('Any'), + # but not other explicit is_instance('SomeEntityType'), and: + # * is_instance('Any') score is 1 + # * is_instance('SomeEntityType') score is at least 2 + score = super(adaptable, self).__call__(cls, req, **kwargs) + if score >= 2: + return score - 0.5 + if score == 1: + return score + 0.5 + return score # rset selectors ############################################################## @@ -586,8 +523,8 @@ @objectify_selector @lltrace def one_line_rset(cls, req, rset=None, row=None, **kwargs): - """Return 1 if the result set is of size 1 or if a specific row in the - result set is specified ('row' argument). + """Return 1 if the result set is of size 1, or greater but a specific row in + the result set is specified ('row' argument). """ if rset is not None and (row is not None or rset.rowcount == 1): return 1 @@ -595,25 +532,34 @@ class multi_lines_rset(Selector): - """If `nb`is specified, return 1 if the result set has exactly `nb` row of - result. Else (`nb` is None), return 1 if the result set contains *at least* + """Return 1 if the operator expression matches between `num` elements + in the result set and the `expected` value if defined. + + By default, multi_lines_rset(expected) matches equality expression: + `nb` row(s) in result set equals to expected value + But, you can perform richer comparisons by overriding default operator: + multi_lines_rset(expected, operator.gt) + + If `expected` is None, return 1 if the result set contains *at least* two rows. + If rset is None, return 0. """ - def __init__(self, nb=None): - self.expected = nb + def __init__(self, expected=None, operator=eq): + self.expected = expected + self.operator = operator def match_expected(self, num): if self.expected is None: return num > 1 - return num == self.expected + return self.operator(num, self.expected) @lltrace def __call__(self, cls, req, rset=None, **kwargs): - return rset is not None and self.match_expected(rset.rowcount) + return int(rset is not None and self.match_expected(rset.rowcount)) class multi_columns_rset(multi_lines_rset): - """If `nb`is specified, return 1 if the result set has exactly `nb` column + """If `nb` is specified, return 1 if the result set has exactly `nb` column per row. Else (`nb` is None), return 1 if the result set contains *at least* two columns per row. Return 0 for empty result set. """ @@ -659,12 +605,17 @@ @lltrace def sorted_rset(cls, req, rset=None, **kwargs): """Return 1 for sorted result set (e.g. from an RQL query containing an - :ref:ORDERBY clause. + :ref:ORDERBY clause), with exception that it will return 0 if the rset is + 'ORDERBY FTIRANK(VAR)' (eg sorted by rank value of the has_text index). """ if rset is None: return 0 - rqlst = rset.syntax_tree() - if len(rqlst.children) > 1 or not rqlst.children[0].orderby: + selects = rset.syntax_tree().children + if (len(selects) > 1 or + not selects[0].orderby or + (isinstance(selects[0].orderby[0].term, Function) and + selects[0].orderby[0].term.name == 'FTIRANK') + ): return 0 return 2 @@ -712,7 +663,7 @@ class non_final_entity(EClassSelector): """Return 1 for entity of a non final entity type(s). Remember, "final" entity types are String, Int, etc... This is equivalent to - `implements('Any')` but more optimized. + `is_instance('Any')` but more optimized. See :class:`~cubicweb.selectors.EClassSelector` documentation for entity class lookup / score rules according to the input context. @@ -726,7 +677,7 @@ return 1 # necessarily true if we're there -class implements(ImplementsMixIn, EClassSelector): +class implements(EClassSelector): """Return non-zero score for entity that are of the given type(s) or implements at least one of the given interface(s). If multiple arguments are given, matching one of them is enough. @@ -739,9 +690,95 @@ .. note:: when interface is an entity class, the score will reflect class proximity so the most specific object will be selected. + + .. note:: deprecated in cubicweb >= 3.9, use either + :class:`~cubicweb.selectors.is_instance` or + :class:`~cubicweb.selectors.adaptable`. """ + + def __init__(self, *expected_ifaces, **kwargs): + emit_warn = kwargs.pop('warn', True) + super(implements, self).__init__(**kwargs) + self.expected_ifaces = expected_ifaces + if emit_warn: + warn('[3.9] implements selector is deprecated, use either ' + 'is_instance or adaptable', DeprecationWarning, stacklevel=2) + + def __str__(self): + return '%s(%s)' % (self.__class__.__name__, + ','.join(str(s) for s in self.expected_ifaces)) + def score_class(self, eclass, req): - return self.score_interfaces(req, eclass, eclass) + score = 0 + etypesreg = req.vreg['etypes'] + for iface in self.expected_ifaces: + if isinstance(iface, basestring): + # entity type + try: + iface = etypesreg.etype_class(iface) + except KeyError: + continue # entity type not in the schema + score += score_interface(etypesreg, eclass, iface) + return score + +def _reset_is_instance_cache(vreg): + vreg._is_instance_selector_cache = {} + +CW_EVENT_MANAGER.bind('before-registry-reset', _reset_is_instance_cache) + +class is_instance(EClassSelector): + """Return non-zero score for entity that is an instance of the one of given + type(s). If multiple arguments are given, matching one of them is enough. + + Entity types should be given as string, the corresponding class will be + fetched from the registry at selection time. + + See :class:`~cubicweb.selectors.EClassSelector` documentation for entity + class lookup / score rules according to the input context. + + .. note:: the score will reflect class proximity so the most specific object + will be selected. + """ + + def __init__(self, *expected_etypes, **kwargs): + super(is_instance, self).__init__(**kwargs) + self.expected_etypes = expected_etypes + for etype in self.expected_etypes: + assert isinstance(etype, basestring), etype + + def __str__(self): + return '%s(%s)' % (self.__class__.__name__, + ','.join(str(s) for s in self.expected_etypes)) + + def score_class(self, eclass, req): + # cache on vreg to avoid reloading issues + cache = req.vreg._is_instance_selector_cache + try: + expected_eclasses = cache[self] + except KeyError: + # turn list of entity types as string into a list of + # (entity class, parent classes) + etypesreg = req.vreg['etypes'] + expected_eclasses = cache[self] = [] + for etype in self.expected_etypes: + try: + expected_eclasses.append(etypesreg.etype_class(etype)) + except KeyError: + continue # entity type not in the schema + parents, any = req.vreg['etypes'].parent_classes(eclass.__regid__) + score = 0 + for expectedcls in expected_eclasses: + # adjust score according to class proximity + if expectedcls is eclass: + score += len(parents) + 4 + elif expectedcls is any: # Any + score += 1 + else: + for index, basecls in enumerate(reversed(parents)): + if expectedcls is basecls: + score += index + 3 + break + return score class score_entity(EntitySelector): @@ -765,6 +802,41 @@ return 1 self.score_entity = intscore +class attribute_edited(EntitySelector): + """Scores if the specified attribute has been edited + This is useful for selection of forms by the edit controller. + The initial use case is on a form, in conjunction with match_transition, + which will not score at edit time:: + + is_instance('Version') & (match_transition('ready') | + attribute_edited('publication_date')) + """ + def __init__(self, attribute, once_is_enough=False): + super(attribute_edited, self).__init__(once_is_enough) + self._attribute = attribute + + def score_entity(self, entity): + return eid_param(role_name(self._attribute, 'subject'), entity.eid) in entity._cw.form + +class has_mimetype(EntitySelector): + """Return 1 if the entity adapt to IDownloadable and has the given MIME type. + + You can give 'image/' to match any image for instance, or 'image/png' to match + only PNG images. + """ + def __init__(self, mimetype, once_is_enough=False): + super(has_mimetype, self).__init__(once_is_enough) + self.mimetype = mimetype + + def score_entity(self, entity): + idownloadable = entity.cw_adapt_to('IDownloadable') + if idownloadable is None: + return 0 + mt = idownloadable.download_content_type() + if not (mt and mt.startswith(self.mimetype)): + return 0 + return 1 + class relation_possible(EntitySelector): """Return 1 for entity that supports the relation, provided that the @@ -978,12 +1050,12 @@ return self.score_entity(kwargs['entity']) if rset is None: return 0 - user = req.user - action = self.action if row is None: score = 0 need_local_check = [] geteschema = req.vreg.schema.eschema + user = req.user + action = self.action for etype in rset.column_types(0): if etype in BASE_TYPES: return 0 @@ -1000,16 +1072,18 @@ if need_local_check: # check local role for entities of necessary types for i, row in enumerate(rset): - if not rset.description[i][0] in need_local_check: + if not rset.description[i][col] in need_local_check: continue - if not self.score(req, rset, i, col): + # micro-optimisation instead of calling self.score(req, + # rset, i, col): rset may be large + if not rset.get_entity(i, col).cw_has_perm(action): return 0 score += 1 return score return self.score(req, rset, row, col) def score_entity(self, entity): - if entity.has_perm(self.action): + if entity.cw_has_perm(self.action): return 1 return 0 @@ -1233,18 +1307,15 @@ return len(self.expected) -class specified_etype_implements(implements): +class specified_etype_implements(is_instance): """Return non-zero score if the entity type specified by an 'etype' key searched in (by priority) input context kwargs and request form parameters match a known entity type (case insensitivly), and it's associated entity - class is of one of the type(s) given to the initializer or implements at - least one of the given interfaces. If multiple arguments are given, matching - one of them is enough. + class is of one of the type(s) given to the initializer. If multiple + arguments are given, matching one of them is enough. - Entity types should be given as string, the corresponding class will be - fetched from the entity types registry at selection time. - - .. note:: when interface is an entity class, the score will reflect class + .. note:: as with :class:`~cubicweb.selectors.is_instance`, entity types + should be given as string and the score will reflect class proximity so the most specific object will be selected. This selector is usually used by views holding entity creation forms (since @@ -1280,19 +1351,13 @@ class match_transition(ExpectedValueSelector): - """Return 1 if: - - * a `transition` argument is found in the input context which - has a `.name` attribute matching one of the expected names given to the - initializer - - * no transition specified. + """Return 1 if `transition` argument is found in the input context + which has a `.name` attribute matching one of the expected names + given to the initializer """ @lltrace def __call__(self, cls, req, transition=None, **kwargs): # XXX check this is a transition that apply to the object? - if transition is None: - return 1 if transition is not None and getattr(transition, 'name', None) in self.expected: return 1 return 0 @@ -1300,25 +1365,30 @@ class is_in_state(score_entity): """return 1 if entity is in one of the states given as argument list - you should use this instead of your own score_entity x: x.state == 'bla' - selector to avoid some gotchas: + you should use this instead of your own :class:`score_entity` selector to + avoid some gotchas: * possible views gives a fake entity with no state - * you must use the latest tr info, not entity.state for repository side + * you must use the latest tr info, not entity.in_state for repository side checking of the current state """ def __init__(self, *states): def score(entity, states=set(states)): + trinfo = entity.cw_adapt_to('IWorkflowable').latest_trinfo() try: - return entity.latest_trinfo().new_state.name in states + return trinfo.new_state.name in states except AttributeError: return None super(is_in_state, self).__init__(score) +@objectify_selector +def debug_mode(cls, req, rset=None, **kwargs): + """Return 1 if running in debug mode""" + return req.vreg.config.debugmode and 1 or 0 ## deprecated stuff ############################################################ -entity_implements = class_renamed('entity_implements', implements) +entity_implements = class_renamed('entity_implements', is_instance) class _but_etype(EntitySelector): """accept if the given entity types are not found in the result set. @@ -1336,7 +1406,7 @@ return 0 return 1 -but_etype = class_renamed('but_etype', _but_etype, 'use ~implements(*etypes) instead') +but_etype = class_renamed('but_etype', _but_etype, 'use ~is_instance(*etypes) instead') # XXX deprecated the one_* variants of selectors below w/ multi_xxx(nb=1)? diff -r f4d1d5d9ccbb -r 90f2f20367bc server/__init__.py --- a/server/__init__.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/__init__.py Wed Nov 03 16:38:28 2010 +0100 @@ -131,11 +131,9 @@ config.creating = True config.consider_user_state = False config.set_language = False - # only enable the system source at initialization time + admin which is not - # an actual source but contains initial manager account information - config.enabled_sources = ('system', 'admin') + # only enable the system source at initialization time + config.enabled_sources = ('system',) repo = Repository(config, vreg=vreg) - assert len(repo.sources) == 1, repo.sources schema = repo.schema sourcescfg = config.sources() _title = '-> creating tables ' diff -r f4d1d5d9ccbb -r 90f2f20367bc server/checkintegrity.py --- a/server/checkintegrity.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/checkintegrity.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,10 +15,14 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""Check integrity of a CubicWeb repository. Hum actually only the system database -is checked. +"""Integrity checking tool for instances: +* integrity of a CubicWeb repository. Hum actually only the system database is + checked. + +* consistency of multi-sources instance mapping file """ + from __future__ import with_statement __docformat__ = "restructuredtext en" @@ -28,7 +32,7 @@ from logilab.common.shellutils import ProgressBar -from cubicweb.schema import PURE_VIRTUAL_RTYPES +from cubicweb.schema import META_RTYPES, VIRTUAL_RTYPES, PURE_VIRTUAL_RTYPES from cubicweb.server.sqlutils import SQL_PREFIX from cubicweb.server.session import security_enabled @@ -99,8 +103,6 @@ print 'no text index table' dbhelper.init_fti(cursor) repo.system_source.do_fti = True # ensure full-text indexation is activated - if withpb: - pb = ProgressBar(len(etypes) + 1) if etypes is None: print 'Reindexing entities' etypes = set() @@ -123,6 +125,7 @@ dbhelper.fti_table, dbhelper.fti_uid_attr, ','.join("'%s'" % etype for etype in etypes))) if withpb: + pb = ProgressBar(len(etypes) + 1) pb.update() # reindex entities by generating rql queries which set all indexable # attribute to their current value @@ -237,7 +240,12 @@ table, column, column, eid) session.system_sql(sql) continue - cursor = session.system_sql('SELECT eid_from FROM %s_relation;' % rschema) + try: + cursor = session.system_sql('SELECT eid_from FROM %s_relation;' % rschema) + except Exception, ex: + # usually because table doesn't exist + print 'ERROR', ex + continue for row in cursor.fetchall(): eid = row[0] if not has_eid(session, cursor, eid, eids): @@ -326,3 +334,98 @@ session.set_pool() reindex_entities(repo.schema, session, withpb=withpb) cnx.commit() + + +def warning(msg, *args): + if args: + msg = msg % args + print 'WARNING: %s' % msg + +def error(msg, *args): + if args: + msg = msg % args + print 'ERROR: %s' % msg + +def check_mapping(schema, mapping, warning=warning, error=error): + # first check stuff found in mapping file exists in the schema + for attr in ('support_entities', 'support_relations'): + for ertype in mapping[attr].keys(): + try: + mapping[attr][ertype] = erschema = schema[ertype] + except KeyError: + error('reference to unknown type %s in %s', ertype, attr) + del mapping[attr][ertype] + else: + if erschema.final or erschema in META_RTYPES: + error('type %s should not be mapped in %s', ertype, attr) + del mapping[attr][ertype] + for attr in ('dont_cross_relations', 'cross_relations'): + for rtype in list(mapping[attr]): + try: + rschema = schema.rschema(rtype) + except KeyError: + error('reference to unknown relation type %s in %s', rtype, attr) + mapping[attr].remove(rtype) + else: + if rschema.final or rschema in VIRTUAL_RTYPES: + error('relation type %s should not be mapped in %s', + rtype, attr) + mapping[attr].remove(rtype) + # check relation in dont_cross_relations aren't in support_relations + for rschema in mapping['dont_cross_relations']: + if rschema in mapping['support_relations']: + warning('relation %s is in dont_cross_relations and in support_relations', + rschema) + # check relation in cross_relations are in support_relations + for rschema in mapping['cross_relations']: + if rschema not in mapping['support_relations']: + warning('relation %s is in cross_relations but not in support_relations', + rschema) + # check for relation in both cross_relations and dont_cross_relations + for rschema in mapping['cross_relations'] & mapping['dont_cross_relations']: + error('relation %s is in both cross_relations and dont_cross_relations', + rschema) + # now check for more handy things + seen = set() + for eschema in mapping['support_entities'].values(): + for rschema, ttypes, role in eschema.relation_definitions(): + if rschema in META_RTYPES: + continue + ttypes = [ttype for ttype in ttypes if ttype in mapping['support_entities']] + if not rschema in mapping['support_relations']: + somethingprinted = False + for ttype in ttypes: + rdef = rschema.role_rdef(eschema, ttype, role) + seen.add(rdef) + if rdef.role_cardinality(role) in '1+': + error('relation %s with %s as %s and target type %s is ' + 'mandatory but not supported', + rschema, eschema, role, ttype) + somethingprinted = True + elif ttype in mapping['support_entities']: + if rdef not in seen: + warning('%s could be supported', rdef) + somethingprinted = True + if rschema not in mapping['dont_cross_relations']: + if role == 'subject' and rschema.inlined: + error('inlined relation %s of %s should be supported', + rschema, eschema) + elif not somethingprinted and rschema not in seen: + print 'you may want to specify something for %s' % rschema + seen.add(rschema) + else: + if not ttypes: + warning('relation %s with %s as %s is supported but no target ' + 'type supported', rschema, role, eschema) + if rschema in mapping['cross_relations'] and rschema.inlined: + error('you should unline relation %s which is supported and ' + 'may be crossed ', rschema) + for rschema in mapping['support_relations'].values(): + if rschema in META_RTYPES: + continue + for subj, obj in rschema.rdefs: + if subj in mapping['support_entities'] and obj in mapping['support_entities']: + break + else: + error('relation %s is supported but none if its definitions ' + 'matches supported entities', rschema) diff -r f4d1d5d9ccbb -r 90f2f20367bc server/hook.py --- a/server/hook.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/hook.py Wed Nov 03 16:38:28 2010 +0100 @@ -57,13 +57,13 @@ from itertools import chain from logilab.common.decorators import classproperty -from logilab.common.deprecation import deprecated +from logilab.common.deprecation import deprecated, class_renamed from logilab.common.logging_ext import set_log_methods from cubicweb import RegistryNotFound from cubicweb.cwvreg import CWRegistry, VRegistry from cubicweb.selectors import (objectify_selector, lltrace, ExpectedValueSelector, - implements) + is_instance) from cubicweb.appobject import AppObject from cubicweb.server.session import security_enabled @@ -246,7 +246,7 @@ if ertype.islower(): rtypes.append(ertype) else: - cls.__select__ = cls.__select__ & implements(ertype) + cls.__select__ = cls.__select__ & is_instance(ertype) if rtypes: cls.__select__ = cls.__select__ & match_rtype(*rtypes) return cls @@ -262,7 +262,7 @@ def __call__(self): if hasattr(self, 'call'): cls = self.__class__ - warn('[3.6] %s.%s: call is deprecated, implements __call__' + warn('[3.6] %s.%s: call is deprecated, implement __call__' % (cls.__module__, cls.__name__), DeprecationWarning) if self.event.endswith('_relation'): self.call(self._cw, self.eidfrom, self.rtype, self.eidto) @@ -278,15 +278,21 @@ set_log_methods(Hook, getLogger('cubicweb.hook')) -# base classes for relation propagation ######################################## +# abtract hooks for relation propagation ####################################### +# See example usage in hooks of the nosylist cube -class PropagateSubjectRelationHook(Hook): +class PropagateRelationHook(Hook): """propagate some `main_rtype` relation on entities linked as object of `subject_relations` or as subject of `object_relations` (the watched relations). This hook ensure that when one of the watched relation is added, the `main_rtype` relation is added to the target entity of the relation. + Notice there are no default behaviour defined when a watched relation is + deleted, you'll have to handle this by yourself. + + You usually want to use the :class:`match_rtype_sets` selector on concret + classes. """ events = ('after_add_relation',) @@ -312,56 +318,77 @@ {'x': meid, 'e': seid}) -class PropagateSubjectRelationAddHook(Hook): - """propagate to entities at the end of watched relations when a `main_rtype` - relation is added +class PropagateRelationAddHook(Hook): + """Propagate to entities at the end of watched relations when a `main_rtype` + relation is added. + + `subject_relations` and `object_relations` attributes should be specified on + subclasses and are usually shared references with attributes of the same + name on :class:`PropagateRelationHook`. + + Because of those shared references, you can use `skip_subject_relations` and + `skip_object_relations` attributes when you don't want to propagate to + entities linked through some particular relations. """ events = ('after_add_relation',) - # to set in concrete class + # to set in concrete class (mandatory) subject_relations = None object_relations = None + # to set in concrete class (optionaly) + skip_subject_relations = () + skip_object_relations = () def __call__(self): eschema = self._cw.vreg.schema.eschema(self._cw.describe(self.eidfrom)[0]) execute = self._cw.execute for rel in self.subject_relations: - if rel in eschema.subjrels: + if rel in eschema.subjrels and not rel in self.skip_subject_relations: execute('SET R %s P WHERE X eid %%(x)s, P eid %%(p)s, ' 'X %s R, NOT R %s P' % (self.rtype, rel, self.rtype), {'x': self.eidfrom, 'p': self.eidto}) for rel in self.object_relations: - if rel in eschema.objrels: + if rel in eschema.objrels and not rel in self.skip_object_relations: execute('SET R %s P WHERE X eid %%(x)s, P eid %%(p)s, ' 'R %s X, NOT R %s P' % (self.rtype, rel, self.rtype), {'x': self.eidfrom, 'p': self.eidto}) -class PropagateSubjectRelationDelHook(Hook): - """propagate to entities at the end of watched relations when a `main_rtype` - relation is deleted +class PropagateRelationDelHook(PropagateRelationAddHook): + """Propagate to entities at the end of watched relations when a `main_rtype` + relation is deleted. + + This is the opposite of the :class:`PropagateRelationAddHook`, see its + documentation for how to use this class. """ events = ('after_delete_relation',) - # to set in concrete class - subject_relations = None - object_relations = None - def __call__(self): eschema = self._cw.vreg.schema.eschema(self._cw.describe(self.eidfrom)[0]) execute = self._cw.execute for rel in self.subject_relations: - if rel in eschema.subjrels: + if rel in eschema.subjrels and not rel in self.skip_subject_relations: execute('DELETE R %s P WHERE X eid %%(x)s, P eid %%(p)s, ' 'X %s R' % (self.rtype, rel), {'x': self.eidfrom, 'p': self.eidto}) for rel in self.object_relations: - if rel in eschema.objrels: + if rel in eschema.objrels and not rel in self.skip_object_relations: execute('DELETE R %s P WHERE X eid %%(x)s, P eid %%(p)s, ' 'R %s X' % (self.rtype, rel), {'x': self.eidfrom, 'p': self.eidto}) +PropagateSubjectRelationHook = class_renamed( + 'PropagateSubjectRelationHook', PropagateRelationHook, + '[3.9] PropagateSubjectRelationHook has been renamed to PropagateRelationHook') +PropagateSubjectRelationAddHook = class_renamed( + 'PropagateSubjectRelationAddHook', PropagateRelationAddHook, + '[3.9] PropagateSubjectRelationAddHook has been renamed to PropagateRelationAddHook') +PropagateSubjectRelationDelHook = class_renamed( + 'PropagateSubjectRelationDelHook', PropagateRelationDelHook, + '[3.9] PropagateSubjectRelationDelHook has been renamed to PropagateRelationDelHook') + + # abstract classes for operation ############################################### class Operation(object): diff -r f4d1d5d9ccbb -r 90f2f20367bc server/migractions.py --- a/server/migractions.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/migractions.py Wed Nov 03 16:38:28 2010 +0100 @@ -44,17 +44,20 @@ from logilab.common.deprecation import deprecated from logilab.common.decorators import cached, clear_cache -from logilab.common.testlib import mock_object from yams.constraints import SizeConstraint from yams.schema2sql import eschema2sql, rschema2sql -from cubicweb import AuthenticationError -from cubicweb.schema import (META_RTYPES, VIRTUAL_RTYPES, +from cubicweb import AuthenticationError, ExecutionError +from cubicweb.selectors import is_instance +from cubicweb.schema import (ETYPE_NAME_MAP, META_RTYPES, VIRTUAL_RTYPES, + PURE_VIRTUAL_RTYPES, CubicWebRelationSchema, order_eschemas) +from cubicweb.cwvreg import CW_EVENT_MANAGER from cubicweb.dbapi import get_repository, repo_connect from cubicweb.migration import MigrationHelper, yes from cubicweb.server.session import hooks_control +from cubicweb.server import hook try: from cubicweb.server import SOURCE_TYPES, schemaserial as ss from cubicweb.server.utils import manager_userpasswd, ask_source_config @@ -63,6 +66,24 @@ pass +def mock_object(**params): + return type('Mock', (), params)() + +class ClearGroupMap(hook.Hook): + __regid__ = 'cw.migration.clear_group_mapping' + __select__ = hook.Hook.__select__ & is_instance('CWGroup') + events = ('after_add_entity', 'after_update_entity',) + def __call__(self): + clear_cache(self.mih, 'group_mapping') + self.mih._synchronized.clear() + + @classmethod + def mih_register(cls, repo): + # may be already registered in tests (e.g. unittest_migractions at + # least) + if not cls.__regid__ in repo.vreg['after_add_entity_hooks']: + repo.vreg.register(ClearGroupMap) + class ServerMigrationHelper(MigrationHelper): """specific migration helper for server side migration scripts, providind actions related to schema/data migration @@ -82,8 +103,17 @@ self.repo_connect() # no config on shell to a remote instance if config is not None and (cnx or connect): + repo = self.repo self.session.data['rebuild-infered'] = False - self.repo.hm.call_hooks('server_maintenance', repo=self.repo) + # register a hook to clear our group_mapping cache and the + # self._synchronized set when some group is added or updated + ClearGroupMap.mih = self + ClearGroupMap.mih_register(repo) + CW_EVENT_MANAGER.bind('after-registry-reload', + ClearGroupMap.mih_register, repo) + # notify we're starting maintenance (called instead of server_start + # which is called on regular start + repo.hm.call_hooks('server_maintenance', repo=repo) if not schema and not getattr(config, 'quick_start', False): schema = config.load_schema(expand_cubes=True) self.fs_schema = schema @@ -116,27 +146,18 @@ super(ServerMigrationHelper, self).migrate(vcconf, toupgrade, options) def cmd_process_script(self, migrscript, funcname=None, *args, **kwargs): - """execute a migration script - in interactive mode, display the migration script path, ask for - confirmation and execute it if confirmed - """ try: - if migrscript.endswith('.sql'): - if self.execscript_confirm(migrscript): - sqlexec(open(migrscript).read(), self.session.system_sql) - elif migrscript.endswith('.py') or migrscript.endswith('.txt'): - return super(ServerMigrationHelper, self).cmd_process_script( - migrscript, funcname, *args, **kwargs) - else: - print >> sys.stderr - print >> sys.stderr, ('-> ignoring %s, only .py .sql and .txt scripts are considered' % - migrscript) - print >> sys.stderr - self.commit() + return super(ServerMigrationHelper, self).cmd_process_script( + migrscript, funcname, *args, **kwargs) + except ExecutionError, err: + print >> sys.stderr, "-> %s" % err except: self.rollback() raise + # Adjust docstring + cmd_process_script.__doc__ = MigrationHelper.cmd_process_script.__doc__ + # server specific migration methods ######################################## def backup_database(self, backupfile=None, askconfirm=True): @@ -187,7 +208,7 @@ askconfirm=True): # check if not osp.exists(backupfile): - raise Exception("Backup file %s doesn't exist" % backupfile) + raise ExecutionError("Backup file %s doesn't exist" % backupfile) if askconfirm and not self.confirm('Restore %s database from %s ?' % (self.config.appid, backupfile)): return @@ -201,7 +222,7 @@ else: for name in bkup.getnames(): if name[0] in '/.': - raise Exception('Security check failed, path starts with "/" or "."') + raise ExecutionError('Security check failed, path starts with "/" or "."') bkup.close() # XXX seek error if not close+open !?! bkup = tarfile.open(backupfile, 'r|gz') bkup.extractall(path=tmpdir) @@ -280,7 +301,7 @@ if self.session: self.session.set_pool() - def rqlexecall(self, rqliter, ask_confirm=True): + def rqlexecall(self, rqliter, ask_confirm=False): for rql, kwargs in rqliter: self.rqlexec(rql, kwargs, ask_confirm=ask_confirm) @@ -386,9 +407,13 @@ for gname in newgroups: if not confirm or self.confirm('Grant %s permission of %s to %s?' % (action, erschema, gname)): - self.rqlexec('SET T %s G WHERE G eid %%(x)s, T eid %s' - % (perm, teid), - {'x': gm[gname]}, ask_confirm=False) + try: + self.rqlexec('SET T %s G WHERE G eid %%(x)s, T eid %s' + % (perm, teid), + {'x': gm[gname]}, ask_confirm=False) + except KeyError: + self.error('can grant %s perm to unexistant group %s', + action, gname) # handle rql expressions newexprs = dict((expr.expression, expr) for expr in erschema.get_rqlexprs(action)) for expreid, expression in self.rqlexec('Any E, EX WHERE T %s E, E expression EX, ' @@ -455,6 +480,7 @@ * description * internationalizable, fulltextindexed, indexed, meta * relations from/to this entity + * __unique_together__ * permissions if `syncperms` """ etype = str(etype) @@ -502,6 +528,44 @@ continue self._synchronize_rdef_schema(subj, rschema, obj, syncprops=syncprops, syncperms=syncperms) + if syncprops: # need to process __unique_together__ after rdefs were processed + repo_unique_together = set([frozenset(ut) + for ut in repoeschema._unique_together]) + unique_together = set([frozenset(ut) + for ut in eschema._unique_together]) + for ut in repo_unique_together - unique_together: + restrictions = ', '.join(['C relations R%(i)d, ' + 'R%(i)d relation_type T%(i)d, ' + 'R%(i)d from_entity X, ' + 'T%(i)d name %%(T%(i)d)s' % {'i': i, + 'col':col} + for (i, col) in enumerate(ut)]) + substs = {'etype': etype} + for i, col in enumerate(ut): + substs['T%d'%i] = col + self.rqlexec('DELETE CWUniqueTogetherConstraint C ' + 'WHERE C constraint_of E, ' + ' E name %%(etype)s,' + ' %s' % restrictions, + substs) + for ut in unique_together - repo_unique_together: + relations = ', '.join(['C relations R%d' % i + for (i, col) in enumerate(ut)]) + restrictions = ', '.join(['R%(i)d relation_type T%(i)d, ' + 'R%(i)d from_entity E, ' + 'T%(i)d name %%(T%(i)d)s' % {'i': i, + 'col':col} + for (i, col) in enumerate(ut)]) + substs = {'etype': etype} + for i, col in enumerate(ut): + substs['T%d'%i] = col + self.rqlexec('INSERT CWUniqueTogetherConstraint C:' + ' C constraint_of E, ' + ' %s ' + 'WHERE ' + ' E name %%(etype)s,' + ' %s' % (relations, restrictions), + substs) def _synchronize_rdef_schema(self, subjtype, rtype, objtype, syncperms=True, syncprops=True): @@ -596,7 +660,8 @@ newcubes_schema = self.config.load_schema(construction_mode='non-strict') # XXX we have to replace fs_schema, used in cmd_add_relation_type # etc. and fsschema of migration script contexts - self.fs_schema = self._create_context()['fsschema'] = newcubes_schema + self.fs_schema = newcubes_schema + self.update_context('fsschema', self.fs_schema) new = set() # execute pre-create files driver = self.repo.system_source.dbdriver @@ -714,13 +779,8 @@ targeted type is known """ instschema = self.repo.schema - assert not etype in instschema - # # XXX (syt) plz explain: if we're adding an entity type, it should - # # not be there... - # eschema = instschema[etype] - # if eschema.final: - # instschema.del_entity_type(etype) - # else: + assert not etype in instschema, \ + '%s already defined in the instance schema' % etype eschema = self.fs_schema.eschema(etype) confirm = self.verbosity >= 2 groupmap = self.group_mapping() @@ -734,8 +794,8 @@ try: specialized.eid = instschema[specialized].eid except KeyError: - raise Exception('trying to add entity type but parent type is ' - 'not yet in the database schema') + raise ExecutionError('trying to add entity type but parent type is ' + 'not yet in the database schema') self.rqlexecall(ss.eschemaspecialize2rql(eschema), ask_confirm=confirm) # register entity's attributes for rschema, attrschema in eschema.attribute_definitions(): @@ -849,15 +909,96 @@ if commit: self.commit() - def cmd_rename_entity_type(self, oldname, newname, commit=True): + def cmd_rename_entity_type(self, oldname, newname, attrs=None, commit=True): """rename an existing entity type in the persistent schema `oldname` is a string giving the name of the existing entity type `newname` is a string giving the name of the renamed entity type """ - self.rqlexec('SET ET name %(newname)s WHERE ET is CWEType, ET name %(oldname)s', - {'newname' : unicode(newname), 'oldname' : oldname}, - ask_confirm=False) + schema = self.repo.schema + if newname in schema: + assert oldname in ETYPE_NAME_MAP, \ + '%s should be mapped to %s in ETYPE_NAME_MAP' % (oldname, + newname) + if attrs is None: + attrs = ','.join(SQL_PREFIX + rschema.type + for rschema in schema[newname].subject_relations() + if (rschema.final or rschema.inlined) + and not rschema in PURE_VIRTUAL_RTYPES) + else: + attrs += ('eid', 'creation_date', 'modification_date', 'cwuri') + attrs = ','.join(SQL_PREFIX + attr for attr in attrs) + self.sqlexec('INSERT INTO %s%s(%s) SELECT %s FROM %s%s' % ( + SQL_PREFIX, newname, attrs, attrs, SQL_PREFIX, oldname), + ask_confirm=False) + # old entity type has not been added to the schema, can't gather it + new = schema.eschema(newname) + oldeid = self.rqlexec('CWEType ET WHERE ET name %(on)s', + {'on': oldname}, ask_confirm=False)[0][0] + # backport old type relations to new type + # XXX workflows, other relations? + for r1, rr1 in [('from_entity', 'to_entity'), + ('to_entity', 'from_entity')]: + self.rqlexec('SET X %(r1)s NET WHERE X %(r1)s OET, ' + 'NOT EXISTS(X2 %(r1)s NET, X relation_type XRT, ' + 'X2 relation_type XRT, X %(rr1)s XTE, X2 %(rr1)s XTE), ' + 'OET eid %%(o)s, NET eid %%(n)s' % locals(), + {'o': oldeid, 'n': new.eid}, ask_confirm=False) + # backport is / is_instance_of relation to new type + for rtype in ('is', 'is_instance_of'): + self.sqlexec('UPDATE %s_relation SET eid_to=%s WHERE eid_to=%s' + % (rtype, new.eid, oldeid), ask_confirm=False) + # delete relations using SQL to avoid relations content removal + # triggered by schema synchronization hooks. + session = self.session + for rdeftype in ('CWRelation', 'CWAttribute'): + thispending = set() + for eid, in self.sqlexec('SELECT cw_eid FROM cw_%s ' + 'WHERE cw_from_entity=%%(eid)s OR ' + ' cw_to_entity=%%(eid)s' % rdeftype, + {'eid': oldeid}, ask_confirm=False): + # we should add deleted eids into pending eids else we may + # get some validation error on commit since integrity hooks + # may think some required relation is missing... This also ensure + # repository caches are properly cleanup + hook.set_operation(session, 'pendingeids', eid, + hook.CleanupDeletedEidsCacheOp) + # and don't forget to remove record from system tables + self.repo.system_source.delete_info( + session, session.entity_from_eid(eid, rdeftype), + 'system', None) + thispending.add(eid) + self.sqlexec('DELETE FROM cw_%s ' + 'WHERE cw_from_entity=%%(eid)s OR ' + 'cw_to_entity=%%(eid)s' % rdeftype, + {'eid': oldeid}, ask_confirm=False) + # now we have to manually cleanup relations pointing to deleted + # entities + thiseids = ','.join(str(eid) for eid in thispending) + for rschema, ttypes, role in schema[rdeftype].relation_definitions(): + if rschema.type in VIRTUAL_RTYPES: + continue + sqls = [] + if role == 'object': + if rschema.inlined: + for eschema in ttypes: + sqls.append('DELETE FROM cw_%s WHERE cw_%s IN(%%s)' + % (eschema, rschema)) + else: + sqls.append('DELETE FROM %s_relation WHERE eid_to IN(%%s)' + % rschema) + elif not rschema.inlined: + sqls.append('DELETE FROM %s_relation WHERE eid_from IN(%%s)' + % rschema) + for sql in sqls: + self.sqlexec(sql % thiseids, ask_confirm=False) + # remove the old type: use rql to propagate deletion + self.rqlexec('DELETE CWEType ET WHERE ET name %(on)s', {'on': oldname}, + ask_confirm=False) + else: + self.rqlexec('SET ET name %(newname)s WHERE ET is CWEType, ET name %(on)s', + {'newname' : unicode(newname), 'on' : oldname}, + ask_confirm=False) if commit: self.commit() @@ -882,10 +1023,15 @@ self.commit() gmap = self.group_mapping() cmap = self.cstrtype_mapping() + done = set() for rdef in rschema.rdefs.itervalues(): if not (reposchema.has_entity(rdef.subject) and reposchema.has_entity(rdef.object)): continue + # symmetric relations appears twice + if (rdef.subject, rdef.object) in done: + continue + done.add( (rdef.subject, rdef.object) ) self._set_rdef_eid(rdef) ss.execschemarql(execute, rdef, ss.rdef2rql(rdef, cmap, gmap)) @@ -1152,10 +1298,10 @@ if commit: self.commit() - @deprecated('[3.5] use entity.fire_transition("transition") or entity.change_state("state")', - stacklevel=3) + @deprecated('[3.5] use iworkflowable.fire_transition("transition") or ' + 'iworkflowable.change_state("state")', stacklevel=3) def cmd_set_state(self, eid, statename, commit=False): - self._cw.entity_from_eid(eid).change_state(statename) + self._cw.entity_from_eid(eid).cw_adapt_to('IWorkflowable').change_state(statename) if commit: self.commit() @@ -1215,6 +1361,13 @@ self.commit() return entity + def cmd_update_etype_fti_weight(self, etype, weight): + if self.repo.system_source.dbdriver == 'postgres': + self.sqlexec('UPDATE appears SET weight=%(weight)s ' + 'FROM entities as X ' + 'WHERE X.eid=appears.uid AND X.type=%(type)s', + {'type': etype, 'weight': weight}, ask_confirm=False) + def cmd_reindex_entities(self, etypes=None): """force reindexaction of entities of the given types or of all indexable entity types @@ -1238,7 +1391,7 @@ cu = self.session.system_sql(sql, args) except: ex = sys.exc_info()[1] - if self.confirm('Error: %s\nabort?' % ex): + if self.confirm('Error: %s\nabort?' % ex, pdb=True): raise return try: @@ -1248,7 +1401,7 @@ return def rqlexec(self, rql, kwargs=None, cachekey=None, build_descr=True, - ask_confirm=True): + ask_confirm=False): """rql action""" if cachekey is not None: warn('[3.8] cachekey is deprecated, you can safely remove this argument', @@ -1266,7 +1419,7 @@ try: res = execute(rql, kwargs, build_descr=build_descr) except Exception, ex: - if self.confirm('Error: %s\nabort?' % ex): + if self.confirm('Error: %s\nabort?' % ex, pdb=True): raise return res @@ -1344,9 +1497,7 @@ def __iter__(self): return self - def next(self): - if self._rsetit is not None: - return self._rsetit.next() + def _get_rset(self): rql, kwargs = self.rql, self.kwargs if kwargs: msg = '%s (%s)' % (rql, kwargs) @@ -1356,11 +1507,23 @@ if not self._h.confirm('Execute rql: %s ?' % msg): raise StopIteration try: - rset = self._h._cw.execute(rql, kwargs) + return self._h._cw.execute(rql, kwargs) except Exception, ex: if self._h.confirm('Error: %s\nabort?' % ex): raise else: raise StopIteration + + def next(self): + if self._rsetit is not None: + return self._rsetit.next() + rset = self._get_rset() self._rsetit = iter(rset) return self._rsetit.next() + + def entities(self): + try: + rset = self._get_rset() + except StopIteration: + return [] + return rset.entities() diff -r f4d1d5d9ccbb -r 90f2f20367bc server/msplanner.py --- a/server/msplanner.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/msplanner.py Wed Nov 03 16:38:28 2010 +0100 @@ -96,13 +96,13 @@ from rql.stmts import Union, Select from rql.nodes import (VariableRef, Comparison, Relation, Constant, Variable, - Not, Exists) + Not, Exists, SortTerm, Function) from cubicweb import server from cubicweb.utils import make_uid +from cubicweb.rqlrewrite import add_types_restriction from cubicweb.server.utils import cleanup_solutions -from cubicweb.server.ssplanner import (SSPlanner, OneFetchStep, - add_types_restriction) +from cubicweb.server.ssplanner import SSPlanner, OneFetchStep from cubicweb.server.mssteps import * Variable._ms_table_key = lambda x: x.name @@ -110,6 +110,11 @@ # str() Constant.value to ensure generated table name won't be unicode Constant._ms_table_key = lambda x: str(x.value) +Variable._ms_may_be_processed = lambda x, terms, linkedterms: any( + t for t in terms if t in linkedterms.get(x, ())) +Relation._ms_may_be_processed = lambda x, terms, linkedterms: all( + getattr(hs, 'variable', hs) in terms for hs in x.get_variable_parts()) + def ms_scope(term): rel = None scope = term.scope @@ -411,7 +416,8 @@ for const in vconsts: self._set_source_for_term(source, const) elif not self._sourcesterms: - self._set_source_for_term(source, const) + for const in vconsts: + self._set_source_for_term(source, const) elif source in self._sourcesterms: source_scopes = frozenset(ms_scope(t) for t in self._sourcesterms[source]) for const in vconsts: @@ -419,9 +425,9 @@ self._set_source_for_term(source, const) # if system source is used, add every rewritten constant # to its supported terms even when associated entity - # doesn't actually come from it so we get a changes - # that allequals will return True as expected when - # computing needsplit + # doesn't actually come from it so we get a changes that + # allequals will return True as expected when computing + # needsplit # check const is used in a relation restriction if const.relation() and self.system_source in sourcesterms: self._set_source_for_term(self.system_source, const) @@ -432,14 +438,16 @@ # process non final relations only # note: don't try to get schema for 'is' relation (not available # during bootstrap) - if not rel.is_types_restriction() and not rschema(rel.r_type).final: + if not (rel.is_types_restriction() or rschema(rel.r_type).final): # nothing to do if relation is not supported by multiple sources # or if some source has it listed in its cross_relations # attribute # # XXX code below don't deal if some source allow relation # crossing but not another one - relsources = repo.rel_type_sources(rel.r_type) + relsources = [s for s in repo.rel_type_sources(rel.r_type) + if s is self.system_source + or s in self._sourcesterms] if len(relsources) < 2: # filter out sources being there because they have this # relation in their dont_cross_relations attribute @@ -478,6 +486,7 @@ # not supported by the source, so we can stop here continue self._sourcesterms.setdefault(ssource, {})[rel] = set(self._solindices) + solindices = None for term in crossvars: if len(termssources[term]) == 1 and iter(termssources[term]).next()[0].uri == 'system': for ov in crossvars: @@ -485,8 +494,14 @@ ssset = frozenset((ssource,)) self._remove_sources(ov, termssources[ov] - ssset) break + if solindices is None: + solindices = set(sol for s, sol in termssources[term] + if s is source) + else: + solindices &= set(sol for s, sol in termssources[term] + if s is source) else: - self._sourcesterms.setdefault(source, {})[rel] = set(self._solindices) + self._sourcesterms.setdefault(source, {})[rel] = solindices def _remove_invalid_sources(self, termssources): """removes invalid sources from `sourcesterms` member according to @@ -799,10 +814,13 @@ rhsvar = rhs.variable except AttributeError: rhsvar = rhs - if lhsvar in terms and not rhsvar in terms: - needsel.add(lhsvar.name) - elif rhsvar in terms and not lhsvar in terms: - needsel.add(rhsvar.name) + try: + if lhsvar in terms and not rhsvar in terms: + needsel.add(lhsvar.name) + elif rhsvar in terms and not lhsvar in terms: + needsel.add(rhsvar.name) + except AttributeError: + continue # not an attribute, no selection needed if final and source.uri != 'system': # check rewritten constants for vconsts in select.stinfo['rewritten'].itervalues(): @@ -937,13 +955,14 @@ exclude[vars[1]] = vars[0] except IndexError: pass - accept_term = lambda x: (not any(s for s in sources if not x in sourcesterms.get(s, ())) - and any(t for t in terms if t in linkedterms.get(x, ())) + accept_term = lambda x: (not any(s for s in sources + if not x in sourcesterms.get(s, ())) + and x._ms_may_be_processed(terms, linkedterms) and not exclude.get(x) in terms) if isinstance(term, Relation) and term in cross_rels: cross_terms = cross_rels.pop(term) base_accept_term = accept_term - accept_term = lambda x: (base_accept_term(x) or x in cross_terms) + accept_term = lambda x: (accept_term(x) or x in cross_terms) for refed in cross_terms: if not refed in candidates: terms.append(refed) @@ -954,7 +973,11 @@ modified = False for term in candidates[:]: if isinstance(term, Constant): - if sorted(set(x[0] for x in self._term_sources(term))) != sources: + termsources = set(x[0] for x in self._term_sources(term)) + # ensure system source is there for constant + if self.system_source in sources: + termsources.add(self.system_source) + if sorted(termsources) != sources: continue terms.append(term) candidates.remove(term) @@ -1076,14 +1099,14 @@ the rqlst should not be tagged at this point """ - if server.DEBUG & server.DBG_MS: - print '-'*80 - print 'PLANNING', rqlst # preprocess deals with security insertion and returns a new syntax tree # which have to be executed to fulfill the query: according # to permissions for variable's type, different rql queries may have to # be executed plan.preprocess(rqlst) + if server.DEBUG & server.DBG_MS: + print '-'*80 + print 'PLANNING', rqlst ppis = [PartPlanInformation(plan, select, self.rqlhelper) for select in rqlst.children] steps = self._union_plan(plan, ppis) @@ -1213,11 +1236,16 @@ sources, terms, scope, solindices, needsel, final) if final: solsinputmaps = ppi.merge_input_maps(solindices) + if len(solsinputmaps) > 1: + refrqlst = minrqlst for solindices, inputmap in solsinputmaps: if inputmap is None: inputmap = subinputmap else: inputmap.update(subinputmap) + if len(solsinputmaps) > 1: + minrqlst = refrqlst.copy() + sources = sources[:] if inputmap and len(sources) > 1: sources.remove(ppi.system_source) steps.append(ppi.build_final_part(minrqlst, solindices, None, @@ -1330,6 +1358,12 @@ orderby.append) if orderby: newroot.set_orderby(orderby) + elif rqlst.orderby: + for sortterm in rqlst.orderby: + if any(f for f in sortterm.iget_nodes(Function) if f.name == 'FTIRANK'): + newnode, oldnode = sortterm.accept(self, newroot, terms) + if newnode is not None: + newroot.add_sort_term(newnode) self.process_selection(newroot, terms, rqlst) elif not newroot.where: # no restrictions have been copied, just select terms and add @@ -1423,8 +1457,8 @@ if not node.is_types_restriction(): if node in self.skip and self.solindices.issubset(self.skip[node]): if not self.schema.rschema(node.r_type).final: - # can't really skip the relation if one variable is selected and only - # referenced by this relation + # can't really skip the relation if one variable is selected + # and only referenced by this relation for vref in node.iget_nodes(VariableRef): stinfo = vref.variable.stinfo if stinfo['selected'] and len(stinfo['relations']) == 1: @@ -1435,13 +1469,14 @@ return None, node if not self._relation_supported(node): raise UnsupportedBranch() - # don't copy type restriction unless this is the only relation for the - # rhs variable, else they'll be reinserted later as needed (else we may - # copy a type restriction while the variable is not actually used) - elif not any(self._relation_supported(rel) - for rel in node.children[0].variable.stinfo['relations']): - rel, node = self.visit_default(node, newroot, terms) - return rel, node + # don't copy type restriction unless this is the only supported relation + # for the lhs variable, else they'll be reinserted later as needed (in + # other cases we may copy a type restriction while the variable is not + # actually used) + elif not (node.neged(strict=True) or + any(self._relation_supported(rel) + for rel in node.children[0].variable.stinfo['relations'])): + return self.visit_default(node, newroot, terms) else: raise UnsupportedBranch() rschema = self.schema.rschema(node.r_type) @@ -1530,12 +1565,38 @@ copy.operator = '=' return copy, node + def visit_function(self, node, newroot, terms): + if node.name == 'FTIRANK': + # FTIRANK is somewhat special... Rank function should be included in + # the same query has the has_text relation, potentially added to + # selection for latter usage + if not self.hasaggrstep and self.final and node not in self.skip: + return self.visit_default(node, newroot, terms) + elif any(s for s in self.sources if s.uri != 'system'): + return None, node + # p = node.parent + # while p is not None and not isinstance(p, SortTerm): + # p = p.parent + # if isinstance(p, SortTerm): + if not self.hasaggrstep and self.final and node in self.skip: + return Constant(self.skip[node], 'Int'), node + # XXX only if not yet selected + newroot.append_selected(node.copy(newroot)) + self.skip[node] = len(newroot.selection) + return None, node + return self.visit_default(node, newroot, terms) + def visit_default(self, node, newroot, terms): subparts, node = self._visit_children(node, newroot, terms) return copy_node(newroot, node, subparts), node - visit_mathexpression = visit_constant = visit_function = visit_default - visit_sort = visit_sortterm = visit_default + visit_mathexpression = visit_constant = visit_default + + def visit_sortterm(self, node, newroot, terms): + subparts, node = self._visit_children(node, newroot, terms) + if not subparts: + return None, node + return copy_node(newroot, node, subparts), node def _visit_children(self, node, newroot, terms): subparts = [] @@ -1574,6 +1635,8 @@ for vref in supportedvars: if not vref in newroot.get_selected_variables(): newroot.append_selected(VariableRef(newroot.get_variable(vref.name))) + elif term in self.terms: + newroot.append_selected(term.copy(newroot)) def add_necessary_selection(self, newroot, terms): selected = tuple(newroot.get_selected_variables()) diff -r f4d1d5d9ccbb -r 90f2f20367bc server/mssteps.py --- a/server/mssteps.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/mssteps.py Wed Nov 03 16:38:28 2010 +0100 @@ -140,13 +140,6 @@ def mytest_repr(self): """return a representation of this step suitable for test""" - sel = self.select.selection - restr = self.select.where - self.select.selection = self.selection - self.select.where = None - rql = self.select.as_string(kwargs=self.plan.args) - self.select.selection = sel - self.select.where = restr try: # rely on a monkey patch (cf unittest_querier) table = self.plan.tablesinorder[self.table] @@ -155,12 +148,19 @@ # not monkey patched table = self.table outputtable = self.outputtable - return (self.__class__.__name__, rql, self.limit, self.offset, table, - outputtable) + sql = self.get_sql().replace(self.table, table) + return (self.__class__.__name__, sql, outputtable) def execute(self): """execute this step""" self.execute_children() + sql = self.get_sql() + if self.outputtable: + self.plan.create_temp_table(self.outputtable) + sql = 'INSERT INTO %s %s' % (self.outputtable, sql) + return self.plan.sqlexec(sql, self.plan.args) + + def get_sql(self): self.inputmap = inputmap = self.children[-1].outputmap # get the select clause clause = [] @@ -223,17 +223,15 @@ sql.append('LIMIT %s' % self.limit) if self.offset: sql.append('OFFSET %s' % self.offset) - #print 'DATA', plan.sqlexec('SELECT * FROM %s' % self.table, None) - sql = ' '.join(sql) - if self.outputtable: - self.plan.create_temp_table(self.outputtable) - sql = 'INSERT INTO %s %s' % (self.outputtable, sql) - return self.plan.sqlexec(sql, self.plan.args) + return ' '.join(sql) def visit_function(self, function): """generate SQL name for a function""" - return '%s(%s)' % (function.name, - ','.join(c.accept(self) for c in function.children)) + try: + return self.children[0].outputmap[str(function)] + except KeyError: + return '%s(%s)' % (function.name, + ','.join(c.accept(self) for c in function.children)) def visit_variableref(self, variableref): """get the sql name for a variable reference""" diff -r f4d1d5d9ccbb -r 90f2f20367bc server/querier.py --- a/server/querier.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/querier.py Wed Nov 03 16:38:28 2010 +0100 @@ -29,10 +29,11 @@ from logilab.common.compat import any from rql import RQLSyntaxError from rql.stmts import Union, Select -from rql.nodes import Relation, VariableRef, Constant, SubQuery, Exists, Not +from rql.nodes import (Relation, VariableRef, Constant, SubQuery, Function, + Exists, Not) -from cubicweb import Unauthorized, QueryError, UnknownEid, typed_eid -from cubicweb import server +from cubicweb import ValidationError, Unauthorized, QueryError, UnknownEid +from cubicweb import server, typed_eid from cubicweb.rset import ResultSet from cubicweb.server.utils import cleanup_solutions @@ -50,7 +51,8 @@ key = term.as_string() value = '%s.C%s' % (table, i) if varmap.get(key, value) != value: - raise Exception('variable name conflict on %s' % key) + raise Exception('variable name conflict on %s: got %s / %s' + % (key, value, varmap)) varmap[key] = value # permission utilities ######################################################## @@ -294,7 +296,26 @@ for term in origselection: newselect.append_selected(term.copy(newselect)) if select.orderby: - newselect.set_orderby([s.copy(newselect) for s in select.orderby]) + sortterms = [] + for sortterm in select.orderby: + sortterms.append(sortterm.copy(newselect)) + for fnode in sortterm.get_nodes(Function): + if fnode.name == 'FTIRANK': + # we've to fetch the has_text relation as well + var = fnode.children[0].variable + rel = iter(var.stinfo['ftirels']).next() + assert not rel.ored(), 'unsupported' + newselect.add_restriction(rel.copy(newselect)) + # remove relation from the orig select and + # cleanup variable stinfo + rel.parent.remove(rel) + var.stinfo['ftirels'].remove(rel) + var.stinfo['relations'].remove(rel) + # XXX not properly re-annotated after security insertion? + newvar = newselect.get_variable(var.name) + newvar.stinfo.setdefault('ftirels', set()).add(rel) + newvar.stinfo.setdefault('relations', set()).add(rel) + newselect.set_orderby(sortterms) _expand_selection(select.orderby, selected, aliases, select, newselect) select.orderby = () # XXX dereference? if select.groupby: @@ -339,6 +360,7 @@ select.set_possible_types(localchecks[()]) add_types_restriction(self.schema, select) add_noinvariant(noinvariant, restricted, select, nbtrees) + self.rqlhelper.annotate(union) def _check_permissions(self, rqlst): """return a dict defining "local checks", e.g. RQLExpression defined in @@ -400,7 +422,9 @@ # raise Unautorized exception if the user can't access to any solution if not newsolutions: raise Unauthorized('\n'.join(msgs)) - rqlst.set_possible_types(newsolutions) + if msgs: + # (else solutions have not been modified) + rqlst.set_possible_types(newsolutions) return localchecks, restricted_vars def finalize(self, select, solutions, insertedvars): @@ -411,6 +435,7 @@ for sol in solutions: sol[newvarname] = nvartype select.clean_solutions(solutions) + add_types_restriction(self.schema, select) self.rqlhelper.annotate(rqlst) self.preprocess(rqlst, security=False) return rqlst @@ -571,6 +596,8 @@ # rql parsing / analysing helper self.solutions = repo.vreg.solutions rqlhelper = repo.vreg.rqlhelper + # set backend on the rql helper, will be used for function checking + rqlhelper.backend = repo.config.sources()['system']['db-driver'] self._parse = rqlhelper.parse self._annotate = rqlhelper.annotate # rql planner @@ -674,15 +701,9 @@ # execute the plan try: results = plan.execute() - except Unauthorized: - # XXX this could be done in security's after_add_relation hooks - # since it's actually realy only needed there (other relations - # security is done *before* actual changes, and add/update entity - # security is done after changes but in an operation, and exception - # generated in operation's events properly generate a rollback on - # the session). Even though, this is done here for a better - # consistency: getting an Unauthorized exception means the - # transaction has been rollbacked + except (Unauthorized, ValidationError): + # getting an Unauthorized/ValidationError exception means the + # transaction must been rollbacked # # notes: # * we should not reset the pool here, since we don't want the @@ -690,7 +711,7 @@ # * don't rollback if we're in the commit process, will be handled # by the session if session.commit_state is None: - session.rollback(reset_pool=False) + session.commit_state = 'uncommitable' raise # build a description for the results if necessary descr = () diff -r f4d1d5d9ccbb -r 90f2f20367bc server/repository.py --- a/server/repository.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/repository.py Wed Nov 03 16:38:28 2010 +0100 @@ -50,12 +50,12 @@ UnknownEid, AuthenticationError, ExecutionError, ETypeNotSupportedBySources, MultiSourcesError, BadConnectionId, Unauthorized, ValidationError, - RepositoryError, typed_eid, onevent) + RepositoryError, UniqueTogetherError, typed_eid, onevent) from cubicweb import cwvreg, schema, server from cubicweb.server import utils, hook, pool, querier, sources from cubicweb.server.session import Session, InternalSession, InternalManager, \ security_enabled - +_ = unicode def del_existing_rel_if_needed(session, eidfrom, rtype, eidto): """delete existing relation when adding a new one if card is 1 or ? @@ -81,14 +81,14 @@ # not expected for this). So: don't do it, we pretend to ensure repository # consistency. # - # XXX we don't want read permissions to be applied but we want delete - # permission to be checked - rschema = session.repo.schema.rschema(rtype) - if card[0] in '1?': - if not rschema.inlined: # inlined relations will be implicitly deleted - with security_enabled(session, read=False): - session.execute('DELETE X %s Y WHERE X eid %%(x)s, ' - 'NOT Y eid %%(y)s' % rtype, + # notes: + # * inlined relations will be implicitly deleted for the subject entity + # * we don't want read permissions to be applied but we want delete + # permission to be checked + if card[0] in '1?' and not session.repo.schema.rschema(rtype).inlined: + with security_enabled(session, read=False): + session.execute('DELETE X %s Y WHERE X eid %%(x)s, ' + 'NOT Y eid %%(y)s' % rtype, {'x': eidfrom, 'y': eidto}) if card[1] in '1?': with security_enabled(session, read=False): @@ -104,10 +104,10 @@ XXX protect pyro access """ - def __init__(self, config, vreg=None, debug=False): + def __init__(self, config, vreg=None): self.config = config if vreg is None: - vreg = cwvreg.CubicWebVRegistry(config, debug) + vreg = cwvreg.CubicWebVRegistry(config) self.vreg = vreg self.pyro_registered = False self.info('starting repository from %s', self.config.apphome) @@ -135,7 +135,8 @@ continue source = self.get_source(uri, source_config) self.sources_by_uri[uri] = source - self.sources.append(source) + if config.source_enabled(uri): + self.sources.append(source) self.system_source = self.sources_by_uri['system'] # ensure system source is the first one self.sources.remove(self.system_source) @@ -154,13 +155,6 @@ if not isinstance(session.user, InternalManager): session.user.__class__ = usercls - def _bootstrap_hook_registry(self): - """called during bootstrap since we need the metadata hooks""" - hooksdirectory = join(CW_SOFTWARE_ROOT, 'hooks') - self.vreg.init_registration([hooksdirectory]) - self.vreg.load_file(join(hooksdirectory, 'metadata.py'), - 'cubicweb.hooks.metadata') - def open_connections_pools(self): config = self.config self._available_pools = Queue.Queue() @@ -186,7 +180,9 @@ for modname in ('__init__', 'authobjs', 'wfobjs'): self.vreg.load_file(join(etdirectory, '%s.py' % modname), 'cubicweb.entities.%s' % modname) - self._bootstrap_hook_registry() + hooksdirectory = join(CW_SOFTWARE_ROOT, 'hooks') + self.vreg.load_file(join(hooksdirectory, 'metadata.py'), + 'cubicweb.hooks.metadata') elif config.read_instance_schema: # normal start: load the instance schema from the database self.fill_schema() @@ -205,8 +201,8 @@ for source in self.sources: source.init() else: - # call init_creating so for instance native source can configurate - # tsearch according to postgres version + # call init_creating so that for instance native source can + # configurate tsearch according to postgres version for source in self.sources: source.init_creating() # close initialization pool and reopen fresh ones for proper @@ -234,13 +230,14 @@ if resetvreg: if self.config._cubes is None: self.config.init_cubes(self.get_cubes()) - # full reload of all appobjects - self.vreg.reset() + # trigger full reload of all appobjects self.vreg.set_schema(schema) else: self.vreg._set_schema(schema) self.querier.set_schema(schema) - for source in self.sources: + # don't use self.sources, we may want to give schema even to disabled + # sources + for source in self.sources_by_uri.values(): source.set_schema(schema) self.schema = schema @@ -392,7 +389,7 @@ raise AuthenticationError('authentication failed with all sources') cwuser = self._build_user(session, eid) if self.config.consider_user_state and \ - not cwuser.state in cwuser.AUTHENTICABLE_STATES: + not cwuser.cw_adapt_to('IWorkflowable').state in cwuser.AUTHENTICABLE_STATES: raise AuthenticationError('user is not in authenticable state') return cwuser @@ -413,6 +410,11 @@ # public (dbapi) interface ################################################ def stats(self): # XXX restrict to managers session? + """Return a dictionary containing some statistics about the repository + resources usage. + + This is a public method, not requiring a session id. + """ results = {} querier = self.querier source = self.system_source @@ -435,8 +437,9 @@ return results def get_schema(self): - """return the instance schema. This is a public method, not - requiring a session id + """Return the instance schema. + + This is a public method, not requiring a session id. """ try: # necessary to support pickling used by pyro @@ -446,8 +449,9 @@ self.schema.__hashmode__ = None def get_cubes(self): - """return the list of cubes used by this instance. This is a - public method, not requiring a session id. + """Return the list of cubes used by this instance. + + This is a public method, not requiring a session id. """ versions = self.get_versions(not (self.config.creating or self.config.repairing @@ -457,11 +461,31 @@ cubes.remove('cubicweb') return cubes + def get_option_value(self, option, foreid=None): + """Return the value for `option` in the configuration. If `foreid` is + specified, the actual repository to which this entity belongs is + derefenced and the option value retrieved from it. + + This is a public method, not requiring a session id. + """ + # XXX we may want to check we don't give sensible information + if foreid is None: + return self.config[option] + _, sourceuri, extid = self.type_and_source_from_eid(foreid) + if sourceuri == 'system': + return self.config[option] + pool = self._get_pool() + try: + return pool.connection(sourceuri).get_option_value(option, extid) + finally: + self._free_pool(pool) + @cached def get_versions(self, checkversions=False): - """return the a dictionary containing cubes used by this instance - as key with their version as value, including cubicweb version. This is a - public method, not requiring a session id. + """Return the a dictionary containing cubes used by this instance + as key with their version as value, including cubicweb version. + + This is a public method, not requiring a session id. """ from logilab.common.changelog import Version vcconf = {} @@ -491,6 +515,11 @@ @cached def source_defs(self): + """Return the a dictionary containing source uris as value and a + dictionary describing each source as value. + + This is a public method, not requiring a session id. + """ sources = self.config.sources().copy() # remove manager information sources.pop('admin', None) @@ -502,7 +531,10 @@ return sources def properties(self): - """return a result set containing system wide properties""" + """Return a result set containing system wide properties. + + This is a public method, not requiring a session id. + """ session = self.internal_session() try: # don't use session.execute, we don't want rset.req set @@ -573,7 +605,7 @@ session.close() session = Session(user, self, cnxprops) user._cw = user.cw_rset.req = session - user.clear_related_cache() + user.cw_clear_relation_cache() self._sessions[session.id] = session self.info('opened session %s for user %s', session.id, login) self.hm.call_hooks('session_open', session) @@ -932,7 +964,7 @@ self._extid_cache[cachekey] = eid self._type_source_cache[eid] = (etype, source.uri, extid) entity = source.before_entity_insertion(session, extid, etype, eid) - entity.edited_attributes = set(entity) + entity.edited_attributes = set(entity.cw_attr_cache) if source.should_call_hooks: self.hm.call_hooks('before_add_entity', session, entity=entity) # XXX call add_info with complete=False ? @@ -1042,37 +1074,39 @@ the entity instance """ # init edited_attributes before calling before_add_entity hooks - entity._is_saved = False # entity has an eid but is not yet saved - entity.edited_attributes = set(entity) - entity_ = entity.pre_add_hook() - # XXX kill that transmutation feature ! - if not entity_ is entity: - entity.__class__ = entity_.__class__ - entity.__dict__.update(entity_.__dict__) + entity._cw_is_saved = False # entity has an eid but is not yet saved + entity.edited_attributes = set(entity.cw_attr_cache) # XXX cw_edited_attributes eschema = entity.e_schema source = self.locate_etype_source(entity.__regid__) # allocate an eid to the entity before calling hooks - entity.set_eid(self.system_source.create_eid(session)) + entity.eid = self.system_source.create_eid(session) # set caches asap extid = self.init_entity_caches(session, entity, source) if server.DEBUG & server.DBG_REPO: - print 'ADD entity', entity.__regid__, entity.eid, dict(entity) + print 'ADD entity', self, entity.__regid__, entity.eid, entity.cw_attr_cache relations = [] if source.should_call_hooks: self.hm.call_hooks('before_add_entity', session, entity=entity) # XXX use entity.keys here since edited_attributes is not updated for # inline relations XXX not true, right? (see edited_attributes # affectation above) - for attr in entity.iterkeys(): + for attr in entity.cw_attr_cache.iterkeys(): rschema = eschema.subjrels[attr] if not rschema.final: # inlined relation relations.append((attr, entity[attr])) - entity.set_defaults() + entity._cw_set_defaults() if session.is_hook_category_activated('integrity'): - entity.check(creation=True) - source.add_entity(session, entity) + entity._cw_check(creation=True) + try: + source.add_entity(session, entity) + except UniqueTogetherError, exc: + etype, rtypes = exc.args + problems = {} + for col in rtypes: + problems[col] = _('violates unique_together constraints (%s)') % (','.join(rtypes)) + raise ValidationError(entity.eid, problems) self.add_info(session, entity, source, extid, complete=False) - entity._is_saved = True # entity has an eid and is saved + entity._cw_is_saved = True # entity has an eid and is saved # prefill entity relation caches for rschema in eschema.subject_relations(): rtype = str(rschema) @@ -1081,15 +1115,17 @@ if rschema.final: entity.setdefault(rtype, None) else: - entity.set_related_cache(rtype, 'subject', session.empty_rset()) + entity.cw_set_relation_cache(rtype, 'subject', + session.empty_rset()) for rschema in eschema.object_relations(): rtype = str(rschema) if rtype in schema.VIRTUAL_RTYPES: continue - entity.set_related_cache(rtype, 'object', session.empty_rset()) - # set inline relation cache before call to after_add_entity + entity.cw_set_relation_cache(rtype, 'object', session.empty_rset()) + # set inlined relation cache before call to after_add_entity for attr, value in relations: session.update_rel_cache_add(entity.eid, attr, value) + del_existing_rel_if_needed(session, entity.eid, attr, value) # trigger after_add_entity after after_add_relation if source.should_call_hooks: self.hm.call_hooks('after_add_entity', session, entity=entity) @@ -1107,7 +1143,7 @@ """ if server.DEBUG & server.DBG_REPO: print 'UPDATE entity', entity.__regid__, entity.eid, \ - dict(entity), edited_attributes + entity.cw_attr_cache, edited_attributes hm = self.hm eschema = entity.e_schema session.set_entity_cache(entity) @@ -1139,21 +1175,29 @@ relations.append((attr, entity[attr], previous_value)) if source.should_call_hooks: # call hooks for inlined relations - for attr, value, _ in relations: + for attr, value, _t in relations: hm.call_hooks('before_add_relation', session, eidfrom=entity.eid, rtype=attr, eidto=value) if not only_inline_rels: hm.call_hooks('before_update_entity', session, entity=entity) if session.is_hook_category_activated('integrity'): - entity.check() - source.update_entity(session, entity) + entity._cw_check() + try: + source.update_entity(session, entity) + except UniqueTogetherError, exc: + etype, rtypes = exc.args + problems = {} + for col in rtypes: + problems[col] = _('violates unique_together constraints (%s)') % (','.join(rtypes)) + raise ValidationError(entity.eid, problems) + self.system_source.update_info(session, entity, need_fti_update) if source.should_call_hooks: if not only_inline_rels: hm.call_hooks('after_update_entity', session, entity=entity) for attr, value, prevvalue in relations: # if the relation is already cached, update existant cache - relcache = entity.relation_cached(attr, 'subject') + relcache = entity.cw_relation_cached(attr, 'subject') if prevvalue is not None: hm.call_hooks('after_delete_relation', session, eidfrom=entity.eid, rtype=attr, eidto=prevvalue) @@ -1163,8 +1207,8 @@ if relcache is not None: session.update_rel_cache_add(entity.eid, attr, value) else: - entity.set_related_cache(attr, 'subject', - session.eid_rset(value)) + entity.cw_set_relation_cache(attr, 'subject', + session.eid_rset(value)) hm.call_hooks('after_add_relation', session, eidfrom=entity.eid, rtype=attr, eidto=value) finally: @@ -1226,15 +1270,17 @@ def pyro_register(self, host=''): """register the repository as a pyro object""" - import tempfile - from logilab.common.pyro_ext import register_object, config - config.PYRO_STORAGE = tempfile.gettempdir() # XXX until lgc > 0.45.1 is out - appid = self.config['pyro-instance-id'] or self.config.appid - daemon = register_object(self, appid, self.config['pyro-ns-group'], - self.config['pyro-host'], - self.config['pyro-ns-host']) - msg = 'repository registered as a pyro object using group %s and id %s' - self.info(msg, self.config['pyro-ns-group'], appid) + from logilab.common import pyro_ext as pyro + config = self.config + appid = '%s.%s' % pyro.ns_group_and_id( + config['pyro-instance-id'] or config.appid, + config['pyro-ns-group']) + # ensure config['pyro-instance-id'] is a full qualified pyro name + config['pyro-instance-id'] = appid + daemon = pyro.register_object(self, appid, + daemonhost=config['pyro-host'], + nshost=config['pyro-ns-host']) + self.info('repository registered as a pyro object %s', appid) self.pyro_registered = True return daemon @@ -1242,15 +1288,15 @@ @cached def rel_type_sources(self, rtype): - return [source for source in self.sources - if source.support_relation(rtype) - or rtype in source.dont_cross_relations] + return tuple([source for source in self.sources + if source.support_relation(rtype) + or rtype in source.dont_cross_relations]) @cached def can_cross_relation(self, rtype): - return [source for source in self.sources - if source.support_relation(rtype) - and rtype in source.cross_relations] + return tuple([source for source in self.sources + if source.support_relation(rtype) + and rtype in source.cross_relations]) @cached def is_multi_sources_relation(self, rtype): diff -r f4d1d5d9ccbb -r 90f2f20367bc server/rqlannotation.py --- a/server/rqlannotation.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/rqlannotation.py Wed Nov 03 16:38:28 2010 +0100 @@ -17,8 +17,8 @@ # with CubicWeb. If not, see . """Functions to add additional annotations on a rql syntax tree to ease later code generation. +""" -""" __docformat__ = "restructuredtext en" from logilab.common.compat import any diff -r f4d1d5d9ccbb -r 90f2f20367bc server/schemaserial.py --- a/server/schemaserial.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/schemaserial.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""functions for schema / permissions (de)serialization using RQL +"""functions for schema / permissions (de)serialization using RQL""" -""" __docformat__ = "restructuredtext en" import os @@ -25,9 +24,11 @@ from logilab.common.shellutils import ProgressBar -from yams import schema as schemamod, buildobjs as ybo +from yams import BadSchemaDefinition, schema as schemamod, buildobjs as ybo -from cubicweb.schema import CONSTRAINTS, ETYPE_NAME_MAP, VIRTUAL_RTYPES +from cubicweb import CW_SOFTWARE_ROOT, typed_eid +from cubicweb.schema import (CONSTRAINTS, ETYPE_NAME_MAP, + VIRTUAL_RTYPES, PURE_VIRTUAL_RTYPES) from cubicweb.server import sqlutils def group_mapping(cursor, interactive=True): @@ -57,10 +58,18 @@ if not value: continue try: - res[group] = int(value) + eid = typed_eid(value) except ValueError: print 'eid should be an integer' continue + for eid_ in res.values(): + if eid == eid_: + break + else: + print 'eid is not a group eid' + continue + res[name] = eid + break return res def cstrtype_mapping(cursor): @@ -78,7 +87,7 @@ """ repo = session.repo dbhelper = repo.system_source.dbhelper - # 3.6 migration + # XXX bw compat (3.6 migration) sqlcu = session.pool['system'] sqlcu.execute("SELECT * FROM cw_CWRType WHERE cw_name='symetric'") if sqlcu.fetchall(): @@ -86,8 +95,10 @@ dbhelper.TYPE_MAPPING['Boolean'], True) sqlcu.execute(sql) sqlcu.execute("UPDATE cw_CWRType SET cw_name='symmetric' WHERE cw_name='symetric'") - sidx = {} - permsdict = deserialize_ertype_permissions(session) + session.commit(False) + ertidx = {} + copiedeids = set() + permsidx = deserialize_ertype_permissions(session) schema.reading_from_database = True for eid, etype, desc in session.execute( 'Any X, N, D WHERE X is CWEType, X name N, X description D', @@ -97,20 +108,32 @@ # just set the eid eschema = schema.eschema(etype) eschema.eid = eid - sidx[eid] = eschema + ertidx[eid] = etype continue if etype in ETYPE_NAME_MAP: + needcopy = False netype = ETYPE_NAME_MAP[etype] # can't use write rql queries at this point, use raw sql - session.system_sql('UPDATE %(p)sCWEType SET %(p)sname=%%(n)s WHERE %(p)seid=%%(x)s' - % {'p': sqlutils.SQL_PREFIX}, - {'x': eid, 'n': netype}) - session.system_sql('UPDATE entities SET type=%(n)s WHERE type=%(x)s', - {'x': etype, 'n': netype}) + sqlexec = session.system_sql + if sqlexec('SELECT 1 FROM %(p)sCWEType WHERE %(p)sname=%%(n)s' + % {'p': sqlutils.SQL_PREFIX}, {'n': netype}).fetchone(): + # the new type already exists, we should copy (eg make existing + # instances of the old type instances of the new type) + assert etype.lower() != netype.lower() + needcopy = True + else: + # the new type doesn't exist, we should rename + sqlexec('UPDATE %(p)sCWEType SET %(p)sname=%%(n)s WHERE %(p)seid=%%(x)s' + % {'p': sqlutils.SQL_PREFIX}, {'x': eid, 'n': netype}) + if etype.lower() != netype.lower(): + sqlexec('ALTER TABLE %s%s RENAME TO %s%s' % ( + sqlutils.SQL_PREFIX, etype, sqlutils.SQL_PREFIX, netype)) + sqlexec('UPDATE entities SET type=%(n)s WHERE type=%(x)s', + {'x': etype, 'n': netype}) session.commit(False) try: - session.system_sql('UPDATE deleted_entities SET type=%(n)s WHERE type=%(x)s', - {'x': etype, 'n': netype}) + sqlexec('UPDATE deleted_entities SET type=%(n)s WHERE type=%(x)s', + {'x': etype, 'n': netype}) except: pass tocleanup = [eid] @@ -118,56 +141,102 @@ if etype == eidetype) repo.clear_caches(tocleanup) session.commit(False) + if needcopy: + ertidx[eid] = netype + copiedeids.add(eid) + # copy / CWEType entity removal expected to be done through + # rename_entity_type in a migration script + continue etype = netype - etype = ybo.EntityType(name=etype, description=desc, eid=eid) - eschema = schema.add_entity_type(etype) - sidx[eid] = eschema - set_perms(eschema, permsdict) + ertidx[eid] = etype + eschema = schema.add_entity_type( + ybo.EntityType(name=etype, description=desc, eid=eid)) + set_perms(eschema, permsidx) for etype, stype in session.execute( 'Any XN, ETN WHERE X is CWEType, X name XN, X specializes ET, ET name ETN', build_descr=False): + etype = ETYPE_NAME_MAP.get(etype, etype) + stype = ETYPE_NAME_MAP.get(stype, stype) schema.eschema(etype)._specialized_type = stype schema.eschema(stype)._specialized_by.append(etype) for eid, rtype, desc, sym, il, ftc in session.execute( 'Any X,N,D,S,I,FTC WHERE X is CWRType, X name N, X description D, ' 'X symmetric S, X inlined I, X fulltext_container FTC', build_descr=False): - rtype = ybo.RelationType(name=rtype, description=desc, - symmetric=bool(sym), inlined=bool(il), - fulltext_container=ftc, eid=eid) - rschema = schema.add_relation_type(rtype) - sidx[eid] = rschema - cstrsdict = deserialize_rdef_constraints(session) + ertidx[eid] = rtype + rschema = schema.add_relation_type( + ybo.RelationType(name=rtype, description=desc, + symmetric=bool(sym), inlined=bool(il), + fulltext_container=ftc, eid=eid)) + cstrsidx = deserialize_rdef_constraints(session) + pendingrdefs = [] + # closure to factorize common code of attribute/relation rdef addition + def _add_rdef(rdefeid, seid, reid, oeid, **kwargs): + rdef = ybo.RelationDefinition(ertidx[seid], ertidx[reid], ertidx[oeid], + constraints=cstrsidx.get(rdefeid, ()), + eid=rdefeid, **kwargs) + if seid in copiedeids or oeid in copiedeids: + # delay addition of this rdef. We'll insert them later if needed. We + # have to do this because: + # + # * on etype renaming, we want relation of the old entity type being + # redirected to the new type during migration + # + # * in the case of a copy, we've to take care that rdef already + # existing in the schema are not overwritten by a redirected one, + # since we want correct eid on them (redirected rdef will be + # removed in rename_entity_type) + pendingrdefs.append(rdef) + else: + # add_relation_def return a RelationDefinitionSchema if it has been + # actually added (can be None on duplicated relation definitions, + # e.g. if the relation type is marked as beeing symmetric) + rdefs = schema.add_relation_def(rdef) + if rdefs is not None: + ertidx[rdefeid] = rdefs + set_perms(rdefs, permsidx) + for values in session.execute( 'Any X,SE,RT,OE,CARD,ORD,DESC,IDX,FTIDX,I18N,DFLT WHERE X is CWAttribute,' 'X relation_type RT, X cardinality CARD, X ordernum ORD, X indexed IDX,' 'X description DESC, X internationalizable I18N, X defaultval DFLT,' 'X fulltextindexed FTIDX, X from_entity SE, X to_entity OE', build_descr=False): - rdefeid, seid, reid, teid, card, ord, desc, idx, ftidx, i18n, default = values - rdef = ybo.RelationDefinition(sidx[seid].type, sidx[reid].type, sidx[teid].type, - cardinality=card, - constraints=cstrsdict.get(rdefeid, ()), - order=ord, description=desc, - indexed=idx, fulltextindexed=ftidx, - internationalizable=i18n, - default=default, eid=rdefeid) - rdefs = schema.add_relation_def(rdef) - # rdefs can be None on duplicated relation definitions (e.g. symmetrics) - if rdefs is not None: - set_perms(rdefs, permsdict) + rdefeid, seid, reid, oeid, card, ord, desc, idx, ftidx, i18n, default = values + _add_rdef(rdefeid, seid, reid, oeid, + cardinality=card, description=desc, order=ord, + indexed=idx, fulltextindexed=ftidx, internationalizable=i18n, + default=default) for values in session.execute( 'Any X,SE,RT,OE,CARD,ORD,DESC,C WHERE X is CWRelation, X relation_type RT,' 'X cardinality CARD, X ordernum ORD, X description DESC, ' 'X from_entity SE, X to_entity OE, X composite C', build_descr=False): - rdefeid, seid, reid, teid, card, ord, desc, c = values - rdef = ybo.RelationDefinition(sidx[seid].type, sidx[reid].type, sidx[teid].type, - constraints=cstrsdict.get(rdefeid, ()), - cardinality=card, order=ord, description=desc, - composite=c, eid=rdefeid) - rdefs = schema.add_relation_def(rdef) - # rdefs can be None on duplicated relation definitions (e.g. symmetrics) + rdefeid, seid, reid, oeid, card, ord, desc, comp = values + _add_rdef(rdefeid, seid, reid, oeid, + cardinality=card, description=desc, order=ord, + composite=comp) + for rdef in pendingrdefs: + try: + rdefs = schema.add_relation_def(rdef) + except BadSchemaDefinition: + continue if rdefs is not None: - set_perms(rdefs, permsdict) + set_perms(rdefs, permsidx) + unique_togethers = {} + try: + rset = session.execute( + 'Any X,E,R WHERE ' + 'X is CWUniqueTogetherConstraint, ' + 'X constraint_of E, X relations R', build_descr=False) + except Exception: + session.rollback() # first migration introducing CWUniqueTogetherConstraint cw 3.9.6 + else: + for values in rset: + uniquecstreid, eeid, releid = values + eschema = schema.schema_by_eid(eeid) + relations = unique_togethers.setdefault(uniquecstreid, (eschema, [])) + relations[1].append(ertidx[releid].rtype.type) + for eschema, unique_together in unique_togethers.itervalues(): + eschema._unique_together.append(tuple(sorted(unique_together))) schema.infer_specialization_rules() session.commit() schema.reading_from_database = False @@ -204,7 +273,7 @@ res.setdefault(rdefeid, []).append(cstr) return res -def set_perms(erschema, permsdict): +def set_perms(erschema, permsidx): """set permissions on the given erschema according to the permission definition dictionary as built by deserialize_ertype_permissions for a given erschema's eid @@ -212,7 +281,7 @@ # reset erschema permissions here to avoid getting yams default anyway erschema.permissions = dict((action, ()) for action in erschema.ACTIONS) try: - thispermsdict = permsdict[erschema.eid] + thispermsdict = permsidx[erschema.eid] except KeyError: return for action, somethings in thispermsdict.iteritems(): @@ -281,6 +350,10 @@ rdef2rql(rdef, cstrtypemap, groupmap)) if pb is not None: pb.update() + # serialize unique_together constraints + for eschema in eschemas: + for unique_together in eschema._unique_together: + execschemarql(execute, eschema, [uniquetogether2rql(eschema, unique_together)]) for rql, kwargs in specialize2rql(schema): execute(rql, kwargs, build_descr=False) if pb is not None: @@ -338,6 +411,31 @@ values = {'x': eschema.eid, 'et': specialized_type.eid} yield 'SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s', values +def uniquetogether2rql(eschema, unique_together): + relations = [] + restrictions = [] + substs = {} + for i, name in enumerate(unique_together): + rschema = eschema.rdef(name) + var = 'R%d' % i + rtype = 'T%d' % i + substs[rtype] = rschema.rtype.type + relations.append('C relations %s' % var) + restrictions.append('%(var)s from_entity X, ' + '%(var)s relation_type %(rtype)s, ' + '%(rtype)s name %%(%(rtype)s)s' \ + % {'var': var, + 'rtype':rtype}) + relations = ', '.join(relations) + restrictions = ', '.join(restrictions) + rql = ('INSERT CWUniqueTogetherConstraint C: ' + ' C constraint_of X, %s ' + 'WHERE ' + ' X eid %%(x)s, %s' ) + + return rql % (relations, restrictions), substs + + def _ervalues(erschema): try: type_ = unicode(erschema.type) diff -r f4d1d5d9ccbb -r 90f2f20367bc server/server.py --- a/server/server.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/server.py Wed Nov 03 16:38:28 2010 +0100 @@ -74,10 +74,10 @@ class RepositoryServer(object): - def __init__(self, config, debug=False): + def __init__(self, config): """make the repository available as a PyRO object""" self.config = config - self.repo = Repository(config, debug=debug) + self.repo = Repository(config) self.ns = None self.quiting = None # event queue diff -r f4d1d5d9ccbb -r 90f2f20367bc server/serverconfig.py --- a/server/serverconfig.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/serverconfig.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,16 +15,15 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""server.serverconfig definition +"""server.serverconfig definition""" -""" __docformat__ = "restructuredtext en" from os.path import join, exists from logilab.common.configuration import REQUIRED, Method, Configuration, \ ini_format_section -from logilab.common.decorators import wproperty, cached, clear_cache +from logilab.common.decorators import wproperty, cached from cubicweb.toolsutils import read_config, restrict_perms_to_user from cubicweb.cwconfig import CubicWebConfiguration, merge_options @@ -46,15 +45,27 @@ ) class SourceConfiguration(Configuration): - def __init__(self, appid, options): - self.appid = appid # has to be done before super call + def __init__(self, appconfig, options): + self.appconfig = appconfig # has to be done before super call super(SourceConfiguration, self).__init__(options=options) # make Method('default_instance_id') usable in db option defs (in native.py) def default_instance_id(self): - return self.appid + return self.appconfig.appid -def generate_sources_file(appid, sourcesfile, sourcescfg, keys=None): + def input_option(self, option, optdict, inputlevel): + if self['db-driver'] == 'sqlite': + if option in ('db-user', 'db-password'): + return + if option == 'db-name': + optdict = optdict.copy() + optdict['help'] = 'path to the sqlite database' + optdict['default'] = join(self.appconfig.appdatahome, + self.appconfig.appid + '.sqlite') + super(SourceConfiguration, self).input_option(option, optdict, inputlevel) + + +def generate_sources_file(appconfig, sourcesfile, sourcescfg, keys=None): """serialize repository'sources configuration into a INI like file the `keys` parameter may be used to sort sections @@ -74,7 +85,7 @@ options = USER_OPTIONS else: options = SOURCE_TYPES[sconfig['adapter']].options - _sconfig = SourceConfiguration(appid, options=options) + _sconfig = SourceConfiguration(appconfig, options=options) for attr, val in sconfig.items(): if attr == 'uri': continue @@ -157,7 +168,7 @@ ('multi-sources-etypes', {'type' : 'csv', 'default': (), 'help': 'defines which entity types from this repository are used \ -by some other instances. You should set this properly so those instances to \ +by some other instances. You should set this properly for these instances to \ detect updates / deletions.', 'group': 'main', 'level': 3, }), @@ -228,11 +239,7 @@ # list of enables sources when sources restriction is necessary # (eg repository initialization at least) - _enabled_sources = None - @wproperty - def enabled_sources(self, sourceuris=None): - self._enabled_sources = sourceuris - clear_cache(self, 'sources') + enabled_sources = None def bootstrap_cubes(self): from logilab.common.textutils import splitstrip @@ -267,18 +274,17 @@ """return a dictionnaries containing sources definitions indexed by sources'uri """ - allsources = self.read_sources_file() - if self._enabled_sources is None: - return allsources - return dict((uri, config) for uri, config in allsources.items() - if uri in self._enabled_sources or uri == 'admin') + return self.read_sources_file() + + def source_enabled(self, uri): + return not self.enabled_sources or uri in self.enabled_sources def write_sources_file(self, sourcescfg): sourcesfile = self.sources_file() if exists(sourcesfile): import shutil shutil.copy(sourcesfile, sourcesfile + '.bak') - generate_sources_file(self.appid, sourcesfile, sourcescfg, + generate_sources_file(self, sourcesfile, sourcescfg, ['admin', 'system']) restrict_perms_to_user(sourcesfile) @@ -326,8 +332,7 @@ for uri in sources: assert uri in known_sources, uri enabled_sources = sources - self._enabled_sources = enabled_sources - clear_cache(self, 'sources') + self.enabled_sources = enabled_sources def migration_handler(self, schema=None, interactive=True, cnx=None, repo=None, connect=True, verbosity=None): diff -r f4d1d5d9ccbb -r 90f2f20367bc server/serverctl.py --- a/server/serverctl.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/serverctl.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""cubicweb-ctl commands and command handlers specific to the server.serverconfig +"""cubicweb-ctl commands and command handlers specific to the repository""" -""" __docformat__ = 'restructuredtext en' # *ctl module should limit the number of import to be imported as quickly as @@ -27,11 +26,11 @@ import os from logilab.common.configuration import Configuration -from logilab.common.clcommands import register_commands, cmd_run, pop_arg from logilab.common.shellutils import ASK from cubicweb import AuthenticationError, ExecutionError, ConfigurationError from cubicweb.toolsutils import Command, CommandHandler, underline_title +from cubicweb.cwctl import CWCTL from cubicweb.server import SOURCE_TYPES from cubicweb.server.serverconfig import (USER_OPTIONS, ServerConfiguration, SourceConfiguration) @@ -43,37 +42,44 @@ given server.serverconfig """ from getpass import getpass - from logilab.database import get_connection + from logilab.database import get_connection, get_db_helper dbhost = source.get('db-host') if dbname is None: dbname = source['db-name'] driver = source['db-driver'] - print '-> connecting to %s database' % driver, - if dbhost: - print '%s@%s' % (dbname, dbhost), - else: - print dbname, - if not verbose or (not special_privs and source.get('db-user')): - user = source['db-user'] - print 'as', user - if source.get('db-password'): - password = source['db-password'] + dbhelper = get_db_helper(driver) + if verbose: + print '-> connecting to %s database' % driver, + if dbhost: + print '%s@%s' % (dbname, dbhost), else: - password = getpass('password: ') + print dbname, + if dbhelper.users_support: + if not verbose or (not special_privs and source.get('db-user')): + user = source['db-user'] + if verbose: + print 'as', user + if source.get('db-password'): + password = source['db-password'] + else: + password = getpass('password: ') + else: + print + if special_privs: + print 'WARNING' + print ('the user will need the following special access rights ' + 'on the database:') + print special_privs + print + default_user = source.get('db-user', os.environ.get('USER', '')) + user = raw_input('Connect as user ? [%r]: ' % default_user) + user = user or default_user + if user == source.get('db-user') and source.get('db-password'): + password = source['db-password'] + else: + password = getpass('password: ') else: - print - if special_privs: - print 'WARNING' - print 'the user will need the following special access rights on the database:' - print special_privs - print - default_user = source.get('db-user', os.environ.get('USER', '')) - user = raw_input('Connect as user ? [%r]: ' % default_user) - user = user or default_user - if user == source.get('db-user') and source.get('db-password'): - password = source['db-password'] - else: - password = getpass('password: ') + user = password = None extra_args = source.get('db-extra-arguments') extra = extra_args and {'extra_args': extra_args} or {} cnx = get_connection(driver, dbhost, dbname, user, password=password, @@ -152,8 +158,8 @@ cfgname = 'repository' def bootstrap(self, cubes, inputlevel=0): - """create an instance by copying files from the given cube and by - asking information necessary to build required configuration files + """create an instance by copying files from the given cube and by asking + information necessary to build required configuration files """ from cubicweb.server.utils import ask_source_config config = self.config @@ -168,7 +174,7 @@ sourcesfile = config.sources_file() # XXX hack to make Method('default_instance_id') usable in db option # defs (in native.py) - sconfig = SourceConfiguration(config.appid, + sconfig = SourceConfiguration(config, options=SOURCE_TYPES['native'].options) sconfig.adapter = 'native' sconfig.input_config(inputlevel=inputlevel) @@ -211,7 +217,7 @@ def postcreate(self): if ASK.confirm('Run db-create to create the system database ?'): verbosity = (self.config.mode == 'installed') and 'y' or 'n' - cmd_run('db-create', self.config.appid, '--verbose=%s' % verbosity) + CWCTL.run(['db-create', self.config.appid, '--verbose=%s' % verbosity]) else: print ('-> nevermind, you can do it later with ' '"cubicweb-ctl db-create %s".' % self.config.appid) @@ -228,6 +234,9 @@ dbname = source['db-name'] helper = get_db_helper(source['db-driver']) if ASK.confirm('Delete database %s ?' % dbname): + if source['db-driver'] == 'sqlite': + os.unlink(source['db-name']) + return user = source['db-user'] or None cnx = _db_sys_cnx(source, 'DROP DATABASE', user=user) cursor = cnx.cursor() @@ -249,11 +258,12 @@ cmdname = 'start' cfgname = 'repository' - def start_server(self, ctlconf, debug): + def start_server(self, config): command = ['cubicweb-ctl start-repository '] - if debug: + if config.debugmode: command.append('--debug') - command.append(self.config.appid) + command.append('--loglevel %s' % config['log-threshold'].lower()) + command.append(config.appid) os.system(' '.join(command)) @@ -262,8 +272,7 @@ cfgname = 'repository' def poststop(self): - """if pyro is enabled, ensure the repository is correctly - unregistered + """if pyro is enabled, ensure the repository is correctly unregistered """ if self.config.pyro_enabled(): from cubicweb.server.repository import pyro_unregister @@ -272,6 +281,14 @@ # repository specific commands ################################################ +def createdb(helper, source, dbcnx, cursor, **kwargs): + if dbcnx.logged_user != source['db-user']: + helper.create_database(cursor, source['db-name'], source['db-user'], + source['db-encoding'], **kwargs) + else: + helper.create_database(cursor, source['db-name'], + dbencoding=source['db-encoding'], **kwargs) + class CreateInstanceDBCommand(Command): """Create the system database of an instance (run after 'create'). @@ -285,7 +302,7 @@ """ name = 'db-create' arguments = '' - + min_args = max_args = 1 options = ( ('create-db', {'short': 'c', 'type': 'yn', 'metavar': '', @@ -309,19 +326,18 @@ from logilab.database import get_db_helper verbose = self.get('verbose') automatic = self.get('automatic') - appid = pop_arg(args, msg='No instance specified !') + appid = args.pop() config = ServerConfiguration.config_for(appid) source = config.sources()['system'] dbname = source['db-name'] driver = source['db-driver'] - create_db = self.config.create_db helper = get_db_helper(driver) if driver == 'sqlite': if os.path.exists(dbname) and ( automatic or ASK.confirm('Database %s already exists. Drop it?' % dbname)): os.unlink(dbname) - elif create_db: + elif self.config.create_db: print '\n'+underline_title('Creating the system database') # connect on the dbms system base to create our base dbcnx = _db_sys_cnx(source, 'CREATE DATABASE and / or USER', verbose=verbose) @@ -338,12 +354,7 @@ cursor.execute('DROP DATABASE %s' % dbname) else: return - if dbcnx.logged_user != source['db-user']: - helper.create_database(cursor, dbname, source['db-user'], - source['db-encoding']) - else: - helper.create_database(cursor, dbname, - dbencoding=source['db-encoding']) + createdb(helper, source, dbcnx, cursor) dbcnx.commit() print '-> database %s created.' % dbname except: @@ -363,7 +374,7 @@ print '-> database for instance %s created and necessary extensions installed.' % appid print if automatic or ASK.confirm('Run db-init to initialize the system database ?'): - cmd_run('db-init', config.appid) + CWCTL.run(['db-init', config.appid]) else: print ('-> nevermind, you can do it later with ' '"cubicweb-ctl db-init %s".' % config.appid) @@ -381,7 +392,7 @@ """ name = 'db-init' arguments = '' - + min_args = max_args = 1 options = ( ('drop', {'short': 'd', 'action': 'store_true', @@ -394,7 +405,7 @@ print '\n'+underline_title('Initializing the system database') from cubicweb.server import init_repository from logilab.database import get_connection - appid = pop_arg(args, msg='No instance specified !') + appid = args[0] config = ServerConfiguration.config_for(appid) try: system = config.sources()['system'] @@ -423,7 +434,7 @@ """ name = 'db-grant-user' arguments = ' ' - + min_args = max_args = 2 options = ( ('set-owner', {'short': 'o', 'type' : 'yn', 'metavar' : '', @@ -434,8 +445,7 @@ def run(self, args): """run the command with its specific arguments""" from cubicweb.server.sqlutils import sqlexec, sqlgrants - appid = pop_arg(args, 1, msg='No instance specified !') - user = pop_arg(args, msg='No user specified !') + appid, user = args config = ServerConfiguration.config_for(appid) source = config.sources()['system'] set_owner = self.config.set_owner @@ -449,7 +459,7 @@ cnx.rollback() import traceback traceback.print_exc() - print '-> an error occured:', ex + print '-> an error occurred:', ex else: cnx.commit() print '-> rights granted to %s on instance %s.' % (appid, user) @@ -467,7 +477,7 @@ def run(self, args): """run the command with its specific arguments""" from cubicweb.server.utils import crypt_password, manager_userpasswd - appid = pop_arg(args, 1, msg='No instance specified !') + appid = args[0] config = ServerConfiguration.config_for(appid) sourcescfg = config.read_sources_file() try: @@ -491,7 +501,7 @@ passwdmsg='new password for %s' % adminlogin) try: cursor.execute("UPDATE cw_CWUser SET cw_upassword=%(p)s WHERE cw_login=%(l)s", - {'p': crypt_password(passwd), 'l': adminlogin}) + {'p': buffer(crypt_password(passwd)), 'l': adminlogin}) sconfig = Configuration(options=USER_OPTIONS) sconfig['login'] = adminlogin sconfig['password'] = passwd @@ -501,7 +511,7 @@ cnx.rollback() import traceback traceback.print_exc() - print '-> an error occured:', ex + print '-> an error occurred:', ex else: cnx.commit() print '-> password reset, sources file regenerated.' @@ -518,27 +528,33 @@ """ name = 'start-repository' arguments = '' - + min_args = max_args = 1 options = ( ('debug', {'short': 'D', 'action' : 'store_true', 'help': 'start server in debug mode.'}), + ('loglevel', + {'short': 'l', 'type' : 'choice', 'metavar': '', + 'default': None, 'choices': ('debug', 'info', 'warning', 'error'), + 'help': 'debug if -D is set, error otherwise', + }), ) def run(self, args): from logilab.common.daemon import daemonize + from cubicweb.cwctl import init_cmdline_log_threshold from cubicweb.server.server import RepositoryServer - appid = pop_arg(args, msg='No instance specified !') - config = ServerConfiguration.config_for(appid) - if sys.platform == 'win32': - if not self.config.debug: - from logging import getLogger - logger = getLogger('cubicweb.ctl') - logger.info('Forcing debug mode on win32 platform') - self.config.debug = True - debug = self.config.debug + appid = args[0] + debug = self['debug'] + if sys.platform == 'win32' and not debug: + from logging import getLogger + logger = getLogger('cubicweb.ctl') + logger.info('Forcing debug mode on win32 platform') + debug = True + config = ServerConfiguration.config_for(appid, debugmode=debug) + init_cmdline_log_threshold(config, self['loglevel']) # create the server - server = RepositoryServer(config, debug) + server = RepositoryServer(config) # ensure the directory where the pid-file should be set exists (for # instance /var/run/cubicweb may be deleted on computer restart) pidfile = config['pid-file'] @@ -581,7 +597,7 @@ rmcmd = 'ssh -t %s "rm -f /tmp/%s"' % (host, filename) print rmcmd if os.system(rmcmd) and not ASK.confirm( - 'An error occured while deleting remote dump at /tmp/%s. ' + 'An error occurred while deleting remote dump at /tmp/%s. ' 'Continue anyway?' % filename): raise ExecutionError('Error while deleting remote dump at /tmp/%s' % filename) @@ -659,7 +675,7 @@ """ name = 'db-dump' arguments = '' - + min_args = max_args = 1 options = ( ('output', {'short': 'o', 'type' : 'string', 'metavar' : '', @@ -674,7 +690,7 @@ ) def run(self, args): - appid = pop_arg(args, 1, msg='No instance specified !') + appid = args[0] if ':' in appid: host, appid = appid.split(':') _remote_dump(host, appid, self.config.output, self.config.sudo) @@ -690,6 +706,7 @@ """ name = 'db-restore' arguments = ' ' + min_args = max_args = 2 options = ( ('no-drop', @@ -707,8 +724,7 @@ ) def run(self, args): - appid = pop_arg(args, 1, msg='No instance specified !') - backupfile = pop_arg(args, msg='No backup file or timestamp specified !') + appid, backupfile = args _local_restore(appid, backupfile, drop=not self.config.no_drop, systemonly=not self.config.restore_all) @@ -726,7 +742,7 @@ """ name = 'db-copy' arguments = ' ' - + min_args = max_args = 2 options = ( ('no-drop', {'short': 'n', 'action' : 'store_true', @@ -748,8 +764,7 @@ def run(self, args): import tempfile - srcappid = pop_arg(args, 1, msg='No source instance specified !') - destappid = pop_arg(args, msg='No destination instance specified !') + srcappid, destappid = args fd, output = tempfile.mkstemp() os.close(fd) if ':' in srcappid: @@ -772,7 +787,7 @@ """ name = 'db-check' arguments = '' - + min_args = max_args = 1 options = ( ('checks', {'short': 'c', 'type' : 'csv', 'metavar' : '', @@ -803,7 +818,7 @@ def run(self, args): from cubicweb.server.checkintegrity import check - appid = pop_arg(args, 1, msg='No instance specified !') + appid = args[0] config = ServerConfiguration.config_for(appid) config.repairing = self.config.force repo, cnx = repo_cnx(config) @@ -819,12 +834,11 @@ """ name = 'db-rebuild-fti' arguments = '' - - options = () + min_args = max_args = 1 def run(self, args): from cubicweb.server.checkintegrity import reindex_entities - appid = pop_arg(args, 1, msg='No instance specified !') + appid = args[0] config = ServerConfiguration.config_for(appid) repo, cnx = repo_cnx(config) session = repo._get_session(cnx.sessionid, setpool=True) @@ -843,23 +857,48 @@ """ name = 'schema-sync' arguments = '' + min_args = max_args = 1 def run(self, args): - appid = pop_arg(args, msg='No instance specified !') + appid = args[0] config = ServerConfiguration.config_for(appid) mih = config.migration_handler() mih.cmd_synchronize_schema() -register_commands( (CreateInstanceDBCommand, - InitInstanceCommand, - GrantUserOnInstanceCommand, - ResetAdminPasswordCommand, - StartRepositoryCommand, - DBDumpCommand, - DBRestoreCommand, - DBCopyCommand, - CheckRepositoryCommand, - RebuildFTICommand, - SynchronizeInstanceSchemaCommand, - ) ) +class CheckMappingCommand(Command): + """Check content of the mapping file of an external source. + + The mapping is checked against the instance's schema, searching for + inconsistencies or stuff you may have forgotten. It's higly recommanded to + run it when you setup a multi-sources instance. + + + the identifier of the instance. + + + the mapping file to check. + """ + name = 'check-mapping' + arguments = ' ' + min_args = max_args = 2 + + def run(self, args): + from cubicweb.server.checkintegrity import check_mapping + from cubicweb.server.sources.pyrorql import load_mapping_file + appid, mappingfile = args + config = ServerConfiguration.config_for(appid) + config.quick_start = True + mih = config.migration_handler(connect=False, verbosity=1) + repo = mih.repo_connect() # necessary to get cubes + check_mapping(config.load_schema(), load_mapping_file(mappingfile)) + +for cmdclass in (CreateInstanceDBCommand, InitInstanceCommand, + GrantUserOnInstanceCommand, ResetAdminPasswordCommand, + StartRepositoryCommand, + DBDumpCommand, DBRestoreCommand, DBCopyCommand, + CheckRepositoryCommand, RebuildFTICommand, + SynchronizeInstanceSchemaCommand, + CheckMappingCommand, + ): + CWCTL.register(cmdclass) diff -r f4d1d5d9ccbb -r 90f2f20367bc server/session.py --- a/server/session.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/session.py Wed Nov 03 16:38:28 2010 +0100 @@ -31,7 +31,7 @@ from rql.nodes import ETYPE_PYOBJ_MAP, etype_from_pyobj from yams import BASE_TYPES -from cubicweb import Binary, UnknownEid, schema +from cubicweb import Binary, UnknownEid, QueryError, schema from cubicweb.req import RequestSessionBase from cubicweb.dbapi import ConnectionProperties from cubicweb.utils import make_uid, RepeatList @@ -250,7 +250,7 @@ entity = self.entity_cache(eid) except KeyError: return - rcache = entity.relation_cached(rtype, role) + rcache = entity.cw_relation_cached(rtype, role) if rcache is not None: rset, entities = rcache rset = rset.copy() @@ -266,14 +266,15 @@ targetentity.cw_col = 0 rset.rowcount += 1 entities.append(targetentity) - entity._related_cache['%s_%s' % (rtype, role)] = (rset, tuple(entities)) + entity._cw_related_cache['%s_%s' % (rtype, role)] = ( + rset, tuple(entities)) def _update_entity_rel_cache_del(self, eid, rtype, role, targeteid): try: entity = self.entity_cache(eid) except KeyError: return - rcache = entity.relation_cached(rtype, role) + rcache = entity.cw_relation_cached(rtype, role) if rcache is not None: rset, entities = rcache for idx, row in enumerate(rset.rows): @@ -292,7 +293,8 @@ del rset.description[idx] del entities[idx] rset.rowcount -= 1 - entity._related_cache['%s_%s' % (rtype, role)] = (rset, tuple(entities)) + entity._cw_related_cache['%s_%s' % (rtype, role)] = ( + rset, tuple(entities)) # resource accessors ###################################################### @@ -312,16 +314,15 @@ def set_language(self, language): """i18n configuration for translation""" - vreg = self.vreg language = language or self.user.property_value('ui.language') try: - gettext, pgettext = vreg.config.translations[language] + gettext, pgettext = self.vreg.config.translations[language] self._ = self.__ = gettext self.pgettext = pgettext except KeyError: - language = vreg.property_value('ui.language') + language = self.vreg.property_value('ui.language') try: - gettext, pgettext = vreg.config.translations[language] + gettext, pgettext = self.vreg.config.translations[language] self._ = self.__ = gettext self.pgettext = pgettext except KeyError: @@ -661,16 +662,6 @@ else: del self.transaction_data['ecache'][eid] - def base_url(self): - url = self.repo.config['base-url'] - if not url: - try: - url = self.repo.config.default_base_url() - except AttributeError: # default_base_url() might not be available - self.warning('missing base-url definition in server config') - url = u'' - return url - def from_controller(self): """return the id (string) of the controller issuing the request (no sense here, always return 'view') @@ -735,7 +726,10 @@ self._touch() self.debug('commit session %s done (no db activity)', self.id) return - if self.commit_state: + cstate = self.commit_state + if cstate == 'uncommitable': + raise QueryError('transaction must be rollbacked') + if cstate is not None: return # on rollback, an operation should have the following state # information: @@ -756,7 +750,6 @@ self.pending_operations[:] = processed self.debug('%s session %s done', trstate, self.id) except: - self.exception('error while %sing', trstate) # if error on [pre]commit: # # * set .failed = True on the operation causing the failure @@ -768,8 +761,12 @@ # instead of having to implements rollback, revertprecommit # and revertcommit, that will be enough in mont case. operation.failed = True - for operation in processed: - operation.handle_event('revert%s_event' % trstate) + for operation in reversed(processed): + try: + operation.handle_event('revert%s_event' % trstate) + except: + self.critical('error while reverting %sing', trstate, + exc_info=True) # XXX use slice notation since self.pending_operations is a # read-only property. self.pending_operations[:] = processed + self.pending_operations @@ -785,7 +782,7 @@ except: self.critical('error while %sing', trstate, exc_info=sys.exc_info()) - self.info('%s session %s done', trstate, self.id) + self.debug('%s session %s done', trstate, self.id) return self.transaction_uuid(set=False) finally: self._touch() @@ -1027,7 +1024,7 @@ def __init__(self, repo, cnxprops=None): super(InternalSession, self).__init__(InternalManager(), repo, cnxprops, _id='internal') - self.user.req = self # XXX remove when "vreg = user.req.vreg" hack in entity.py is gone + self.user._cw = self # XXX remove when "vreg = user._cw.vreg" hack in entity.py is gone self.cnxtype = 'inmemory' self.disable_hook_categories('integrity') diff -r f4d1d5d9ccbb -r 90f2f20367bc server/sources/__init__.py --- a/server/sources/__init__.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/sources/__init__.py Wed Nov 03 16:38:28 2010 +0100 @@ -182,7 +182,7 @@ wsupport = self.support_relations[rtype] except KeyError: rschema = self.schema.rschema(rtype) - if not rschema.final or rschema == 'has_text': + if not rschema.final or rschema.type == 'has_text': return False for etype in rschema.subjects(): try: @@ -306,7 +306,7 @@ pass def authenticate(self, session, login, **kwargs): - """if the source support CWUser entity type, it should implements + """if the source support CWUser entity type, it should implement this method which should return CWUser eid for the given login/password if this account is defined in this source and valid login / password is given. Else raise `AuthenticationError` @@ -342,7 +342,7 @@ entity. """ entity = self.repo.vreg['etypes'].etype_class(etype)(session) - entity.set_eid(eid) + entity.eid = eid return entity def after_entity_insertion(self, session, lid, entity): diff -r f4d1d5d9ccbb -r 90f2f20367bc server/sources/ldapuser.py --- a/server/sources/ldapuser.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/sources/ldapuser.py Wed Nov 03 16:38:28 2010 +0100 @@ -232,6 +232,8 @@ if res: ldapemailaddr = res[0].get(ldap_emailattr) if ldapemailaddr: + if isinstance(ldapemailaddr, list): + ldapemailaddr = ldapemailaddr[0] # XXX consider only the first email in the list rset = execute('Any X,A WHERE ' 'X address A, U use_email X, U eid %(u)s', {'u': eid}) @@ -522,7 +524,7 @@ eid, base) entity = session.entity_from_eid(eid, 'CWUser') self.repo.delete_info(session, entity, self.uri, base) - self.reset_cache() + self.reset_caches() return [] # except ldap.REFERRAL, e: # cnx = self.handle_referral(e) @@ -589,6 +591,8 @@ emailaddr = self._cache[dn][self.user_rev_attrs['email']] except KeyError: return + if isinstance(emailaddr, list): + emailaddr = emailaddr[0] # XXX consider only the first email in the list rset = session.execute('EmailAddress X WHERE X address %(addr)s', {'addr': emailaddr}) if rset: diff -r f4d1d5d9ccbb -r 90f2f20367bc server/sources/native.py --- a/server/sources/native.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/sources/native.py Wed Nov 03 16:38:28 2010 +0100 @@ -34,6 +34,7 @@ from base64 import b64decode, b64encode from contextlib import contextmanager from os.path import abspath +import re from logilab.common.compat import any from logilab.common.cache import Cache @@ -42,7 +43,9 @@ from logilab.common.shellutils import getlogin from logilab.database import get_db_helper -from cubicweb import UnknownEid, AuthenticationError, ValidationError, Binary +from yams import schema2sql as y2sql + +from cubicweb import UnknownEid, AuthenticationError, ValidationError, Binary, UniqueTogetherError from cubicweb import transaction as tx, server, neg_role from cubicweb.schema import VIRTUAL_RTYPES from cubicweb.cwconfig import CubicWebNoAppConfiguration @@ -127,6 +130,25 @@ restr = '(%s)' % ' OR '.join(clauses) return '%s WHERE %s' % (select, restr) +def rdef_table_column(rdef): + """return table and column used to store the given relation definition in + the database + """ + return (SQL_PREFIX + str(rdef.subject), + SQL_PREFIX + str(rdef.rtype)) + +def rdef_physical_info(dbhelper, rdef): + """return backend type and a boolean flag if NULL values should be allowed + for a given relation definition + """ + if rdef.object.final: + ttype = rdef.object + else: + ttype = 'Int' # eid type + coltype = y2sql.type_from_constraints(dbhelper, ttype, + rdef.constraints, creating=False) + allownull = rdef.cardinality[0] != '1' + return coltype, allownull class UndoException(Exception): """something went wrong during undoing""" @@ -193,7 +215,7 @@ 'default': 'postgres', # XXX use choice type 'help': 'database driver (postgres, mysql, sqlite, sqlserver2005)', - 'group': 'native-source', 'level': 1, + 'group': 'native-source', 'level': 0, }), ('db-host', {'type' : 'string', @@ -488,7 +510,7 @@ def manual_insert(self, results, table, session): """insert given result into a temporary table on the system source""" if server.DEBUG & server.DBG_RQL: - print ' manual insertion of', results, 'into', table + print ' manual insertion of', len(results), 'results into', table if not results: return query_args = ['%%(%s)s' % i for i in xrange(len(results[0]))] @@ -649,6 +671,21 @@ self.critical('transaction has been rollbacked') except: pass + if ex.__class__.__name__ == 'IntegrityError': + # need string comparison because of various backends + for arg in ex.args: + mo = re.search('unique_cw_[^ ]+_idx', arg) + if mo is not None: + index_name = mo.group(0) + elements = index_name.rstrip('_idx').split('_cw_')[1:] + etype = elements[0] + rtypes = elements[1:] + raise UniqueTogetherError(etype, rtypes) + mo = re.search('columns (.*) are not unique', arg) + if mo is not None: # sqlite in use + rtypes = [c.strip().lstrip('cw_') for c in mo.group(1).split(',')] + etype = '???' + raise UniqueTogetherError(etype, rtypes) raise return cursor @@ -678,6 +715,47 @@ # short cut to method requiring advanced db helper usage ################## + def update_rdef_column(self, session, rdef): + """update physical column for a relation definition (final or inlined) + """ + table, column = rdef_table_column(rdef) + coltype, allownull = rdef_physical_info(self.dbhelper, rdef) + if not self.dbhelper.alter_column_support: + self.error("backend can't alter %s.%s to %s%s", table, column, coltype, + not allownull and 'NOT NULL' or '') + return + self.dbhelper.change_col_type(LogCursor(session.pool[self.uri]), + table, column, coltype, allownull) + self.info('altered %s.%s: now %s%s', table, column, coltype, + not allownull and 'NOT NULL' or '') + + def update_rdef_null_allowed(self, session, rdef): + """update NULL / NOT NULL of physical column for a relation definition + (final or inlined) + """ + if not self.dbhelper.alter_column_support: + # not supported (and NOT NULL not set by yams in that case, so no + # worry) + return + table, column = rdef_table_column(rdef) + coltype, allownull = rdef_physical_info(self.dbhelper, rdef) + self.dbhelper.set_null_allowed(LogCursor(session.pool[self.uri]), + table, column, coltype, allownull) + + def update_rdef_indexed(self, session, rdef): + table, column = rdef_table_column(rdef) + if rdef.indexed: + self.create_index(session, table, column) + else: + self.drop_index(session, table, column) + + def update_rdef_unique(self, session, rdef): + table, column = rdef_table_column(rdef) + if rdef.constraint_by_type('UniqueConstraint'): + self.create_index(session, table, column, unique=True) + else: + self.drop_index(session, table, column, unique=True) + def create_index(self, session, table, column, unique=False): cursor = LogCursor(session.pool[self.uri]) self.dbhelper.create_index(cursor, table, column, unique) @@ -686,14 +764,6 @@ cursor = LogCursor(session.pool[self.uri]) self.dbhelper.drop_index(cursor, table, column, unique) - def change_col_type(self, session, table, column, coltype, null_allowed): - cursor = LogCursor(session.pool[self.uri]) - self.dbhelper.change_col_type(cursor, table, column, coltype, null_allowed) - - def set_null_allowed(self, session, table, column, coltype, null_allowed): - cursor = LogCursor(session.pool[self.uri]) - self.dbhelper.set_null_allowed(cursor, table, column, coltype, null_allowed) - # system source interface ################################################# def eid_type_source(self, session, eid): @@ -801,7 +871,6 @@ cnx.commit() return eid - def add_info(self, session, entity, source, extid, complete): """add type and source info for an eid into the system table""" # begin by inserting eid/type/source/extid into the entities table @@ -1079,10 +1148,10 @@ entity[rtype] = unicode(value, session.encoding, 'replace') else: entity[rtype] = value - entity.set_eid(eid) + entity.eid = eid session.repo.init_entity_caches(session, entity, self) entity.edited_attributes = set(entity) - entity.check() + entity._cw_check() self.repo.hm.call_hooks('before_add_entity', session, entity=entity) # restore the entity action.changes['cw_eid'] = eid @@ -1149,7 +1218,7 @@ return [session._( "Can't undo creation of entity %(eid)s of type %(etype)s, type " "no more supported" % {'eid': eid, 'etype': etype})] - entity.set_eid(eid) + entity.eid = eid # for proper eid/type cache update hook.set_operation(session, 'pendingeids', eid, CleanupDeletedEidsCacheOp) @@ -1237,7 +1306,8 @@ try: # use cursor_index_object, not cursor_reindex_object since # unindexing done in the FTIndexEntityOp - self.dbhelper.cursor_index_object(entity.eid, entity, + self.dbhelper.cursor_index_object(entity.eid, + entity.cw_adapt_to('IFTIndexable'), session.pool['system']) except Exception: # let KeyboardInterrupt / SystemExit propagate self.exception('error while reindexing %s', entity) @@ -1262,7 +1332,8 @@ # processed return done.add(eid) - for container in session.entity_from_eid(eid).fti_containers(): + iftindexable = session.entity_from_eid(eid).cw_adapt_to('IFTIndexable') + for container in iftindexable.fti_containers(): source.fti_unindex_entity(session, container.eid) source.fti_index_entity(session, container) diff -r f4d1d5d9ccbb -r 90f2f20367bc server/sources/pyrorql.py --- a/server/sources/pyrorql.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/sources/pyrorql.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""Source to query another RQL repository using pyro +"""Source to query another RQL repository using pyro""" -""" __docformat__ = "restructuredtext en" import threading @@ -44,6 +43,34 @@ select, col = union.locate_subquery(col, etype, args) return getattr(select.selection[col], 'uidtype', None) +def load_mapping_file(mappingfile): + mapping = {} + execfile(mappingfile, mapping) + for junk in ('__builtins__', '__doc__'): + mapping.pop(junk, None) + mapping.setdefault('support_relations', {}) + mapping.setdefault('dont_cross_relations', set()) + mapping.setdefault('cross_relations', set()) + + # do some basic checks of the mapping content + assert 'support_entities' in mapping, \ + 'mapping file should at least define support_entities' + assert isinstance(mapping['support_entities'], dict) + assert isinstance(mapping['support_relations'], dict) + assert isinstance(mapping['dont_cross_relations'], set) + assert isinstance(mapping['cross_relations'], set) + unknown = set(mapping) - set( ('support_entities', 'support_relations', + 'dont_cross_relations', 'cross_relations') ) + assert not unknown, 'unknown mapping attribute(s): %s' % unknown + # relations that are necessarily not crossed + mapping['dont_cross_relations'] |= set(('owned_by', 'created_by')) + for rtype in ('is', 'is_instance_of'): + assert rtype not in mapping['dont_cross_relations'], \ + '%s relation should not be in dont_cross_relations' % rtype + assert rtype not in mapping['support_relations'], \ + '%s relation should not be in support_relations' % rtype + return mapping + class ReplaceByInOperator(Exception): def __init__(self, eids): @@ -59,8 +86,6 @@ # migration connect_for_migration = False - support_entities = None - options = ( # XXX pyro-ns host/port ('pyro-ns-id', @@ -127,12 +152,11 @@ mappingfile = source_config['mapping-file'] if not mappingfile[0] == '/': mappingfile = join(repo.config.apphome, mappingfile) - mapping = {} - execfile(mappingfile, mapping) + mapping = load_mapping_file(mappingfile) self.support_entities = mapping['support_entities'] - self.support_relations = mapping.get('support_relations', {}) - self.dont_cross_relations = mapping.get('dont_cross_relations', ()) - self.cross_relations = mapping.get('cross_relations', ()) + self.support_relations = mapping['support_relations'] + self.dont_cross_relations = mapping['dont_cross_relations'] + self.cross_relations = mapping['cross_relations'] baseurl = source_config.get('base-url') if baseurl and not baseurl.endswith('/'): source_config['base-url'] += '/' @@ -173,7 +197,8 @@ """method called by the repository once ready to handle request""" interval = int(self.config.get('synchronization-interval', 5*60)) self.repo.looping_task(interval, self.synchronize) - self.repo.looping_task(self._query_cache.ttl.seconds/10, self._query_cache.clear_expired) + self.repo.looping_task(self._query_cache.ttl.seconds/10, + self._query_cache.clear_expired) def synchronize(self, mtime=None): """synchronize content known by this repository with content in the diff -r f4d1d5d9ccbb -r 90f2f20367bc server/sources/rql2sql.py --- a/server/sources/rql2sql.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/sources/rql2sql.py Wed Nov 03 16:38:28 2010 +0100 @@ -101,15 +101,12 @@ subquery. This function check this and rewrite the rql syntax tree if necessary (in place). Return a boolean telling if the tree has been modified """ - torewrite = set() modified = False for varname in tuple(unstable): var = select.defined_vars[varname] if not var.stinfo.get('optrelations'): continue - modified = True unstable.remove(varname) - torewrite.add(var) newselect = Select() newselect.need_distinct = False myunion = Union() @@ -139,10 +136,17 @@ var.stinfo['rhsrelations'].add(newrel) if rel.optional in ('right', 'both'): var.add_optional_relation(newrel) + if not select.where and not modified: + # oops, generated the same thing as the original select.... + # restore original query, else we'll indefinitly loop + for var, rel in towrap_rels: + select.add_restriction(rel) + continue + modified = True # extract subquery solutions mysolutions = [sol.copy() for sol in solutions] cleanup_solutions(newselect, mysolutions) - newselect.set_possible_types(solutions) + newselect.set_possible_types(mysolutions) # full sub-query aliases = [VariableRef(select.get_variable(avar.name, i)) for i, avar in enumerate(newselect.selection)] @@ -611,12 +615,14 @@ sql += '\nHAVING %s' % having # sort if sorts: - sql += '\nORDER BY %s' % ','.join(self._sortterm_sql(sortterm, - fselectidx) - for sortterm in sorts) - if fneedwrap: - selection = ['T1.C%s' % i for i in xrange(len(origselection))] - sql = 'SELECT %s FROM (%s) AS T1' % (','.join(selection), sql) + sqlsortterms = [self._sortterm_sql(sortterm, fselectidx) + for sortterm in sorts] + sqlsortterms = [x for x in sqlsortterms if x is not None] + if sqlsortterms: + sql += '\nORDER BY %s' % ','.join(sqlsortterms) + if sorts and fneedwrap: + selection = ['T1.C%s' % i for i in xrange(len(origselection))] + sql = 'SELECT %s FROM (%s) AS T1' % (','.join(selection), sql) state.finalize_source_cbs() finally: select.selection = origselection @@ -696,12 +702,14 @@ def _sortterm_sql(self, sortterm, selectidx): term = sortterm.term try: - sqlterm = str(selectidx.index(str(term)) + 1) + sqlterm = selectidx.index(str(term)) + 1 except ValueError: # Constant node or non selected term - sqlterm = str(term.accept(self)) + sqlterm = term.accept(self) + if sqlterm is None: + return None if sortterm.asc: - return sqlterm + return str(sqlterm) else: return '%s DESC' % sqlterm @@ -753,6 +761,8 @@ restrictions.append(restriction) restriction = ' AND '.join(restrictions) if not restriction: + if tables: + return 'SELECT 1 FROM %s' % ', '.join(tables) return '' if not tables: # XXX could leave surrounding EXISTS() in this case no? @@ -814,26 +824,35 @@ def _visit_inlined_relation(self, relation): lhsvar, _, rhsvar, rhsconst = relation_info(relation) - # we are sure here to have a lhsvar - assert lhsvar is not None - if isinstance(relation.parent, Not) \ - and len(lhsvar.stinfo['relations']) > 1 \ - and (rhsvar is not None and rhsvar._q_invariant): + # we are sure lhsvar is not None + lhssql = self._inlined_var_sql(lhsvar, relation.r_type) + if rhsvar is None: + moresql = None + else: + moresql = self._extra_join_sql(relation, lhssql, rhsvar) + if isinstance(relation.parent, Not): self._state.done.add(relation.parent) - return '%s IS NULL' % self._inlined_var_sql(lhsvar, relation.r_type) - lhssql = self._inlined_var_sql(lhsvar, relation.r_type) - if rhsconst is not None: - return '%s=%s' % (lhssql, rhsconst.accept(self)) - if isinstance(rhsvar, Variable) and not rhsvar.name in self._varmap: + if rhsvar is not None and rhsvar._q_invariant: + sql = '%s IS NULL' % lhssql + else: + # column != 1234 may not get back rows where column is NULL... + sql = '(%s IS NULL OR %s!=%s)' % ( + lhssql, lhssql, (rhsvar or rhsconst).accept(self)) + elif rhsconst is not None: + sql = '%s=%s' % (lhssql, rhsconst.accept(self)) + elif isinstance(rhsvar, Variable) and rhsvar._q_invariant and \ + not rhsvar.name in self._varmap: # if the rhs variable is only linked to this relation, this mean we # only want the relation to exists, eg NOT NULL in case of inlined # relation - if rhsvar._q_invariant: - sql = self._extra_join_sql(relation, lhssql, rhsvar) - if sql: - return sql - return '%s IS NOT NULL' % lhssql - return '%s=%s' % (lhssql, rhsvar.accept(self)) + if moresql is not None: + return moresql + return '%s IS NOT NULL' % lhssql + else: + sql = '%s=%s' % (lhssql, rhsvar.accept(self)) + if moresql is None: + return sql + return '%s AND %s' % (sql, moresql) def _process_relation_term(self, relation, rid, termvar, termconst, relfield): if termconst or not termvar._q_invariant: @@ -845,7 +864,7 @@ termsql = termvar.accept(self) yield '%s.%s=%s' % (rid, relfield, termsql) extrajoin = self._extra_join_sql(relation, '%s.%s' % (rid, relfield), termvar) - if extrajoin: + if extrajoin is not None: yield extrajoin def _visit_relation(self, relation, rschema): @@ -1060,7 +1079,8 @@ not_ = True else: not_ = False - return self.dbhelper.fti_restriction_sql(alias, const.eval(self._args), + query = const.eval(self._args) + return self.dbhelper.fti_restriction_sql(alias, query, jointo, not_) + restriction def visit_comparison(self, cmp): @@ -1104,6 +1124,15 @@ def visit_function(self, func): """generate SQL name for a function""" + if func.name == 'FTIRANK': + try: + rel = iter(func.children[0].variable.stinfo['ftirels']).next() + except KeyError: + raise BadRQLQuery("can't use FTIRANK on variable not used in an" + " 'has_text' relation (eg full-text search)") + const = rel.get_parts()[1].children[0] + return self.dbhelper.fti_rank_order(self._fti_table(rel), + const.eval(self._args)) args = [c.accept(self) for c in func.children] if func in self._state.source_cb_funcs: # function executed as a callback on the source @@ -1114,11 +1143,11 @@ def visit_constant(self, constant): """generate SQL name for a constant""" - value = constant.value if constant.type is None: return 'NULL' + value = constant.value if constant.type == 'Int' and isinstance(constant.parent, SortTerm): - return constant.value + return value if constant.type in ('Date', 'Datetime'): rel = constant.relation() if rel is not None: @@ -1127,13 +1156,15 @@ if constant.type == 'Boolean': value = self.dbhelper.boolean_value(value) if constant.type == 'Substitute': - _id = constant.value - if isinstance(_id, unicode): - _id = _id.encode() + try: + # we may found constant from simplified var in varmap + return self._mapped_term(constant, '%%(%s)s' % value)[0] + except KeyError: + _id = value + if isinstance(_id, unicode): + _id = _id.encode() else: _id = str(id(constant)).replace('-', '', 1) - if isinstance(value, unicode): - value = value.encode(self.dbencoding) self._query_attrs[_id] = value return '%%(%s)s' % _id @@ -1222,7 +1253,7 @@ # no principal defined, relation is necessarily the principal and # so nothing to return here pass - return '' + return None def _temp_table_scope(self, select, table): scope = 9999 @@ -1237,12 +1268,19 @@ break return scope + def _mapped_term(self, term, key): + """return sql and table alias to the `term`, mapped as `key` or raise + KeyError when the key is not found in the varmap + """ + sql = self._varmap[key] + tablealias = sql.split('.', 1)[0] + scope = self._temp_table_scope(term.stmt, tablealias) + self.add_table(tablealias, scope=scope) + return sql, tablealias + def _var_info(self, var): try: - sql = self._varmap[var.name] - tablealias = sql.split('.', 1)[0] - scope = self._temp_table_scope(var.stmt, tablealias) - self.add_table(tablealias, scope=scope) + return self._mapped_term(var, var.name) except KeyError: scope = self._state.scopes[var.scope] etype = self._state.solution[var.name] diff -r f4d1d5d9ccbb -r 90f2f20367bc server/sources/storages.py --- a/server/sources/storages.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/sources/storages.py Wed Nov 03 16:38:28 2010 +0100 @@ -18,6 +18,7 @@ """custom storages for the system source""" from os import unlink, path as osp +from contextlib import contextmanager from yams.schema import role_name @@ -93,6 +94,17 @@ return path return None +@contextmanager +def fsimport(session): + present = 'fs_importing' in session.transaction_data + old_value = session.transaction_data.get('fs_importing') + session.transaction_data['fs_importing'] = True + yield + if present: + session.transaction_data['fs_importing'] = old_value + else: + del session.transaction_data['fs_importing'] + class BytesFileSystemStorage(Storage): """store Bytes attribute value on the file system""" @@ -174,7 +186,7 @@ # PIL processing that use filename extension to detect content-type, as # well as providing more understandable file names on the fs. basename = [str(entity.eid), attr] - name = entity.attr_metadata(attr, 'name') + name = entity.cw_attr_metadata(attr, 'name') if name is not None: basename.append(name.encode(self.fsencoding)) fspath = uniquify_path(self.default_directory, '_'.join(basename)) diff -r f4d1d5d9ccbb -r 90f2f20367bc server/sqlutils.py --- a/server/sqlutils.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/sqlutils.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""SQL utilities functions and classes. +"""SQL utilities functions and classes.""" -""" __docformat__ = "restructuredtext en" import os @@ -263,8 +262,7 @@ eschema = entity.e_schema for attr in entity.edited_attributes: value = entity[attr] - rschema = eschema.subjrels[attr] - if rschema.final: + if value is not None and eschema.subjrels[attr].final: atype = str(entity.e_schema.destination(attr)) if atype == 'Boolean': value = self.dbhelper.boolean_value(value) diff -r f4d1d5d9ccbb -r 90f2f20367bc server/ssplanner.py --- a/server/ssplanner.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/ssplanner.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,15 +15,12 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""plan execution of rql queries on a single source +"""plan execution of rql queries on a single source""" -""" from __future__ import with_statement __docformat__ = "restructuredtext en" -from copy import copy - from rql.stmts import Union, Select from rql.nodes import Constant, Relation @@ -479,7 +476,7 @@ result = [[]] for row in result: # get a new entity definition for this row - edef = copy(base_edef) + edef = base_edef.cw_copy() # complete this entity def using row values index = 0 for rtype, rorder, value in self.rdefs: @@ -487,7 +484,7 @@ value = row[index] index += 1 if rorder == InsertRelationsStep.FINAL: - edef.rql_set_value(rtype, value) + edef._cw_rql_set_value(rtype, value) elif rorder == InsertRelationsStep.RELATION: self.plan.add_relation_def( (edef, rtype, value) ) edef.querier_pending_relations[(rtype, 'subject')] = value @@ -584,7 +581,7 @@ edef = edefs[eid] except KeyError: edefs[eid] = edef = session.entity_from_eid(eid) - edef.rql_set_value(str(rschema), rhsval) + edef._cw_rql_set_value(str(rschema), rhsval) else: repo.glob_add_relation(session, lhsval, str(rschema), rhsval) result[i] = newrow diff -r f4d1d5d9ccbb -r 90f2f20367bc server/test/data/migratedapp/schema.py --- a/server/test/data/migratedapp/schema.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/test/data/migratedapp/schema.py Wed Nov 03 16:38:28 2010 +0100 @@ -69,7 +69,7 @@ mydate = Date(default='TODAY') shortpara = String(maxsize=64) ecrit_par = SubjectRelation('Personne', constraints=[RQLConstraint('S concerne A, O concerne A')]) - attachment = SubjectRelation(('File', 'Image')) + attachment = SubjectRelation('File') class Text(Para): __specializes_schema__ = True @@ -101,6 +101,7 @@ class Personne(EntityType): + __unique_together__ = [('nom', 'prenom', 'datenaiss')] nom = String(fulltextindexed=True, required=True, maxsize=64) prenom = String(fulltextindexed=True, maxsize=64) civility = String(maxsize=1, default='M', fulltextindexed=True) @@ -126,7 +127,6 @@ 'delete': ('managers', 'owners'), 'add': ('managers', 'users',) } - nom = String(maxsize=64, fulltextindexed=True) web = String(maxsize=128) tel = Int() @@ -138,6 +138,9 @@ cp = String(maxsize=12) ville= String(maxsize=32) +class same_as(RelationDefinition): + subject = ('Societe',) + object = 'ExternalUri' class evaluee(RelationDefinition): subject = ('Personne', 'CWUser', 'Societe') diff -r f4d1d5d9ccbb -r 90f2f20367bc server/test/data/schema.py --- a/server/test/data/schema.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/test/data/schema.py Wed Nov 03 16:38:28 2010 +0100 @@ -19,7 +19,8 @@ from yams.buildobjs import (EntityType, RelationType, RelationDefinition, SubjectRelation, RichString, String, Int, Boolean, Datetime) from yams.constraints import SizeConstraint -from cubicweb.schema import (WorkflowableEntityType, RQLConstraint, +from cubicweb.schema import (WorkflowableEntityType, + RQLConstraint, RQLUniqueConstraint, ERQLExpression, RRQLExpression) class Affaire(WorkflowableEntityType): @@ -92,11 +93,15 @@ }) migrated_from = SubjectRelation('Note') - attachment = SubjectRelation(('File', 'Image')) - inline1 = SubjectRelation('Affaire', inlined=True, cardinality='?*') + attachment = SubjectRelation('File') + inline1 = SubjectRelation('Affaire', inlined=True, cardinality='?*', + constraints=[RQLUniqueConstraint('S type T, S inline1 A1, A1 todo_by C, ' + 'Y type T, Y inline1 A2, A2 todo_by C', + 'S,Y')]) todo_by = SubjectRelation('CWUser') class Personne(EntityType): + __unique_together__ = [('nom', 'prenom', 'inline2')] nom = String(fulltextindexed=True, required=True, maxsize=64) prenom = String(fulltextindexed=True, maxsize=64) sexe = String(maxsize=1, default='M', fulltextindexed=True) diff -r f4d1d5d9ccbb -r 90f2f20367bc server/test/data/site_cubicweb.py --- a/server/test/data/site_cubicweb.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/test/data/site_cubicweb.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,6 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -""" - -""" from logilab.database import FunctionDescr from logilab.database.sqlite import register_sqlite_pyfunc @@ -25,7 +22,7 @@ try: class DUMB_SORT(FunctionDescr): - supported_backends = ('sqlite',) + pass register_function(DUMB_SORT) def dumb_sort(something): diff -r f4d1d5d9ccbb -r 90f2f20367bc server/test/data/sources_fti --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/server/test/data/sources_fti Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,14 @@ +[system] + +db-driver = postgres +db-host = localhost +db-port = +adapter = native +db-name = cw_fti_test +db-encoding = UTF-8 +db-user = syt +db-password = syt + +[admin] +login = admin +password = gingkow diff -r f4d1d5d9ccbb -r 90f2f20367bc server/test/data/sources_ldap1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/server/test/data/sources_ldap1 Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,35 @@ +[system] +adapter=native +# database driver (postgres or sqlite) +db-driver=sqlite +# database host +db-host= +# database name +db-name=tmpdb +# database user +db-user=admin +# database password +db-password=gingkow +# database encoding +db-encoding=utf8 + +[admin] +login = admin +password = gingkow + +[ldapuser] +adapter=ldapuser +# ldap host +host=ldap1 +# base DN to lookup for usres +user-base-dn=ou=People,dc=logilab,dc=fr +# user search scope +user-scope=ONELEVEL +# classes of user +user-classes=top,posixAccount +# attribute used as login on authentication +user-login-attr=uid +# name of a group in which ldap users will be by default +user-default-group=users +# map from ldap user attributes to cubicweb attributes +user-attrs-map=gecos:email,uid:login diff -r f4d1d5d9ccbb -r 90f2f20367bc server/test/data/sources_ldap2 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/server/test/data/sources_ldap2 Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,35 @@ +[system] +adapter=native +# database driver (postgres or sqlite) +db-driver=sqlite +# database host +db-host= +# database name +db-name=tmpdb +# database user +db-user=admin +# database password +db-password=gingkow +# database encoding +db-encoding=utf8 + +[admin] +login = admin +password = gingkow + +[ldapuser] +adapter=ldapuser +# ldap host +host=ldap1 +# base DN to lookup for usres +user-base-dn=ou=People,dc=logilab,dc=net +# user search scope +user-scope=ONELEVEL +# classes of user +user-classes=top,OpenLDAPperson +# attribute used as login on authentication +user-login-attr=uid +# name of a group in which ldap users will be by default +user-default-group=users +# map from ldap user attributes to cubicweb attributes +user-attrs-map=mail:email,uid:login diff -r f4d1d5d9ccbb -r 90f2f20367bc server/test/data/sourcesldap --- a/server/test/data/sourcesldap Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,35 +0,0 @@ -[system] -adapter=native -# database driver (postgres or sqlite) -db-driver=sqlite -# database host -db-host= -# database name -db-name=tmpdb -# database user -db-user=admin -# database password -db-password=gingkow -# database encoding -db-encoding=utf8 - -[admin] -login = admin -password = gingkow - -[ldapuser] -adapter=ldapuser -# ldap host -host=ldap1 -# base DN to lookup for usres -user-base-dn=ou=People,dc=logilab,dc=fr -# user search scope -user-scope=ONELEVEL -# classes of user -user-classes=top,posixAccount -# attribute used as login on authentication -user-login-attr=uid -# name of a group in which ldap users will be by default -user-default-group=users -# map from ldap user attributes to cubicweb attributes -user-attrs-map=gecos:email,uid:login diff -r f4d1d5d9ccbb -r 90f2f20367bc server/test/unittest_fti.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/server/test/unittest_fti.py Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,63 @@ +from __future__ import with_statement + +import socket + +from cubicweb.devtools import ApptestConfiguration +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.selectors import is_instance +from cubicweb.entities.adapters import IFTIndexableAdapter + +class PostgresFTITC(CubicWebTC): + config = ApptestConfiguration('data', sourcefile='sources_fti') + + def setUp(self): + if not socket.gethostname().endswith('.logilab.fr'): + self.skipTest('XXX require logilab configuration') + super(PostgresFTITC, self).setUp() + + def test_occurence_count(self): + req = self.request() + c1 = req.create_entity('Card', title=u'c1', + content=u'cubicweb cubicweb cubicweb') + c2 = req.create_entity('Card', title=u'c3', + content=u'cubicweb') + c3 = req.create_entity('Card', title=u'c2', + content=u'cubicweb cubicweb') + self.commit() + self.assertEqual(req.execute('Card X ORDERBY FTIRANK(X) DESC WHERE X has_text "cubicweb"').rows, + [[c1.eid], [c3.eid], [c2.eid]]) + + + def test_attr_weight(self): + class CardIFTIndexableAdapter(IFTIndexableAdapter): + __select__ = is_instance('Card') + attr_weight = {'title': 'A'} + with self.temporary_appobjects(CardIFTIndexableAdapter): + req = self.request() + c1 = req.create_entity('Card', title=u'c1', + content=u'cubicweb cubicweb cubicweb') + c2 = req.create_entity('Card', title=u'c2', + content=u'cubicweb cubicweb') + c3 = req.create_entity('Card', title=u'cubicweb', + content=u'autre chose') + self.commit() + self.assertEqual(req.execute('Card X ORDERBY FTIRANK(X) DESC WHERE X has_text "cubicweb"').rows, + [[c3.eid], [c1.eid], [c2.eid]]) + + + def test_entity_weight(self): + class PersonneIFTIndexableAdapter(IFTIndexableAdapter): + __select__ = is_instance('Personne') + entity_weight = 2.0 + with self.temporary_appobjects(PersonneIFTIndexableAdapter): + req = self.request() + c1 = req.create_entity('Personne', nom=u'c1', prenom=u'cubicweb') + c2 = req.create_entity('Comment', content=u'cubicweb cubicweb', comments=c1) + c3 = req.create_entity('Comment', content=u'cubicweb cubicweb cubicweb', comments=c1) + self.commit() + self.assertEqual(req.execute('Any X ORDERBY FTIRANK(X) DESC WHERE X has_text "cubicweb"').rows, + [[c1.eid], [c3.eid], [c2.eid]]) + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r f4d1d5d9ccbb -r 90f2f20367bc server/test/unittest_hook.py --- a/server/test/unittest_hook.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/test/unittest_hook.py Wed Nov 03 16:38:28 2010 +0100 @@ -23,7 +23,6 @@ from cubicweb.devtools import TestServerConfiguration from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.selectors import implements from cubicweb.server import hook from cubicweb.hooks import integrity, syncschema @@ -48,7 +47,7 @@ l1 = hook.LateOperation(session) l2 = hook.LateOperation(session) l3 = hook.Operation(session) - self.assertEquals(session.pending_operations, [l3, l1, l2]) + self.assertEqual(session.pending_operations, [l3, l1, l2]) @clean_session_ops def test_single_last_operation(self): @@ -57,21 +56,21 @@ l1 = hook.LateOperation(session) l2 = hook.LateOperation(session) l3 = hook.Operation(session) - self.assertEquals(session.pending_operations, [l3, l1, l2, l0]) + self.assertEqual(session.pending_operations, [l3, l1, l2, l0]) l4 = hook.SingleLastOperation(session) - self.assertEquals(session.pending_operations, [l3, l1, l2, l4]) + self.assertEqual(session.pending_operations, [l3, l1, l2, l4]) @clean_session_ops def test_global_operation_order(self): session = self.session op1 = integrity._DelayedDeleteOp(session) - op2 = syncschema.MemSchemaRDefDel(session) + op2 = syncschema.RDefDelOp(session) # equivalent operation generated by op2 but replace it here by op3 so we # can check the result... op3 = syncschema.MemSchemaNotifyChanges(session) op4 = integrity._DelayedDeleteOp(session) op5 = integrity._CheckORelationOp(session) - self.assertEquals(session.pending_operations, [op1, op2, op4, op5, op3]) + self.assertEqual(session.pending_operations, [op1, op2, op4, op5, op3]) class HookCalled(Exception): pass @@ -103,19 +102,19 @@ class _Hook(hook.Hook): events = ('before_add_entiti',) ex = self.assertRaises(Exception, self.o.register, _Hook) - self.assertEquals(str(ex), 'bad event before_add_entiti on %s._Hook' % __name__) + self.assertEqual(str(ex), 'bad event before_add_entiti on %s._Hook' % __name__) def test_register_bad_hook2(self): class _Hook(hook.Hook): events = None ex = self.assertRaises(Exception, self.o.register, _Hook) - self.assertEquals(str(ex), 'bad .events attribute None on %s._Hook' % __name__) + self.assertEqual(str(ex), 'bad .events attribute None on %s._Hook' % __name__) def test_register_bad_hook3(self): class _Hook(hook.Hook): events = 'before_add_entity' ex = self.assertRaises(Exception, self.o.register, _Hook) - self.assertEquals(str(ex), 'bad event b on %s._Hook' % __name__) + self.assertEqual(str(ex), 'bad event b on %s._Hook' % __name__) def test_call_hook(self): self.o.register(AddAnyHook) @@ -139,17 +138,17 @@ def test_startup_shutdown(self): import hooks # cubicweb/server/test/data/hooks.py - self.assertEquals(hooks.CALLED_EVENTS['server_startup'], True) + self.assertEqual(hooks.CALLED_EVENTS['server_startup'], True) # don't actually call repository.shutdown ! self.repo.hm.call_hooks('server_shutdown', repo=self.repo) - self.assertEquals(hooks.CALLED_EVENTS['server_shutdown'], True) + self.assertEqual(hooks.CALLED_EVENTS['server_shutdown'], True) def test_session_open_close(self): import hooks # cubicweb/server/test/data/hooks.py cnx = self.login('anon') - self.assertEquals(hooks.CALLED_EVENTS['session_open'], 'anon') + self.assertEqual(hooks.CALLED_EVENTS['session_open'], 'anon') cnx.close() - self.assertEquals(hooks.CALLED_EVENTS['session_close'], 'anon') + self.assertEqual(hooks.CALLED_EVENTS['session_close'], 'anon') # class RelationHookTC(TestCase): @@ -163,30 +162,30 @@ # """make sure before_xxx_relation hooks are called directly""" # self.o.register(self._before_relation_hook, # 'before_add_relation', 'concerne') -# self.assertEquals(self.called, []) +# self.assertEqual(self.called, []) # self.o.call_hooks('before_add_relation', 'concerne', 'USER', # 1, 'concerne', 2) -# self.assertEquals(self.called, [(1, 'concerne', 2)]) +# self.assertEqual(self.called, [(1, 'concerne', 2)]) # def test_after_add_relation(self): # """make sure after_xxx_relation hooks are deferred""" # self.o.register(self._after_relation_hook, # 'after_add_relation', 'concerne') -# self.assertEquals(self.called, []) +# self.assertEqual(self.called, []) # self.o.call_hooks('after_add_relation', 'concerne', 'USER', # 1, 'concerne', 2) # self.o.call_hooks('after_add_relation', 'concerne', 'USER', # 3, 'concerne', 4) -# self.assertEquals(self.called, [(1, 'concerne', 2), (3, 'concerne', 4)]) +# self.assertEqual(self.called, [(1, 'concerne', 2), (3, 'concerne', 4)]) # def test_before_delete_relation(self): # """make sure before_xxx_relation hooks are called directly""" # self.o.register(self._before_relation_hook, # 'before_delete_relation', 'concerne') -# self.assertEquals(self.called, []) +# self.assertEqual(self.called, []) # self.o.call_hooks('before_delete_relation', 'concerne', 'USER', # 1, 'concerne', 2) -# self.assertEquals(self.called, [(1, 'concerne', 2)]) +# self.assertEqual(self.called, [(1, 'concerne', 2)]) # def test_after_delete_relation(self): # """make sure after_xxx_relation hooks are deferred""" @@ -196,7 +195,7 @@ # 1, 'concerne', 2) # self.o.call_hooks('after_delete_relation', 'concerne', 'USER', # 3, 'concerne', 4) -# self.assertEquals(self.called, [(1, 'concerne', 2), (3, 'concerne', 4)]) +# self.assertEqual(self.called, [(1, 'concerne', 2), (3, 'concerne', 4)]) # def _before_relation_hook(self, pool, subject, r_type, object): diff -r f4d1d5d9ccbb -r 90f2f20367bc server/test/unittest_ldapuser.py --- a/server/test/unittest_ldapuser.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/test/unittest_ldapuser.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,7 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""cubicweb.server.sources.ldapusers unit and functional tests - -""" +"""cubicweb.server.sources.ldapusers unit and functional tests""" import socket @@ -30,10 +28,14 @@ if '17.1' in socket.gethostbyname('ldap1'): SYT = 'syt' + SYT_EMAIL = 'Sylvain Thenault' ADIM = 'adim' + SOURCESFILE = 'data/sources_ldap1' else: SYT = 'sthenault' + SYT_EMAIL = 'sylvain.thenault@logilab.fr' ADIM = 'adimascio' + SOURCESFILE = 'data/sources_ldap2' def nopwd_authenticate(self, session, login, password): @@ -59,7 +61,7 @@ class LDAPUserSourceTC(CubicWebTC): config = TestServerConfiguration('data') - config.sources_file = lambda : 'data/sourcesldap' + config.sources_file = lambda: SOURCESFILE def patch_authenticate(self): self._orig_authenticate = LDAPUserSource.authenticate @@ -92,19 +94,19 @@ def test_base(self): # check a known one e = self.sexecute('CWUser X WHERE X login %(login)s', {'login': SYT}).get_entity(0, 0) - self.assertEquals(e.login, SYT) + self.assertEqual(e.login, SYT) e.complete() - self.assertEquals(e.creation_date, None) - self.assertEquals(e.modification_date, None) - self.assertEquals(e.firstname, None) - self.assertEquals(e.surname, None) - self.assertEquals(e.in_group[0].name, 'users') - self.assertEquals(e.owned_by[0].login, SYT) - self.assertEquals(e.created_by, ()) - self.assertEquals(e.primary_email[0].address, 'Sylvain Thenault') + self.assertEqual(e.creation_date, None) + self.assertEqual(e.modification_date, None) + self.assertEqual(e.firstname, None) + self.assertEqual(e.surname, None) + self.assertEqual(e.in_group[0].name, 'users') + self.assertEqual(e.owned_by[0].login, SYT) + self.assertEqual(e.created_by, ()) + self.assertEqual(e.primary_email[0].address, SYT_EMAIL) # email content should be indexed on the user rset = self.sexecute('CWUser X WHERE X has_text "thenault"') - self.assertEquals(rset.rows, [[e.eid]]) + self.assertEqual(rset.rows, [[e.eid]]) def test_not(self): eid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': SYT})[0][0] @@ -117,16 +119,16 @@ aeid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': ADIM})[0][0] rset = self.sexecute('CWUser X, Y WHERE X login %(syt)s, Y login %(adim)s', {'syt': SYT, 'adim': ADIM}) - self.assertEquals(rset.rows, [[seid, aeid]]) + self.assertEqual(rset.rows, [[seid, aeid]]) rset = self.sexecute('Any X,Y,L WHERE X login L, X login %(syt)s, Y login %(adim)s', {'syt': SYT, 'adim': ADIM}) - self.assertEquals(rset.rows, [[seid, aeid, SYT]]) + self.assertEqual(rset.rows, [[seid, aeid, SYT]]) def test_in(self): seid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': SYT})[0][0] aeid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': ADIM})[0][0] rset = self.sexecute('Any X,L ORDERBY L WHERE X login IN("%s", "%s"), X login L' % (SYT, ADIM)) - self.assertEquals(rset.rows, [[aeid, ADIM], [seid, SYT]]) + self.assertEqual(rset.rows, [[aeid, ADIM], [seid, SYT]]) def test_relations(self): eid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': SYT})[0][0] @@ -144,28 +146,28 @@ def test_upper(self): eid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': SYT})[0][0] rset = self.sexecute('Any UPPER(L) WHERE X eid %s, X login L' % eid) - self.assertEquals(rset[0][0], SYT.upper()) + self.assertEqual(rset[0][0], SYT.upper()) def test_unknown_attr(self): eid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': SYT})[0][0] rset = self.sexecute('Any L,C,M WHERE X eid %s, X login L, ' 'X creation_date C, X modification_date M' % eid) - self.assertEquals(rset[0][0], SYT) - self.assertEquals(rset[0][1], None) - self.assertEquals(rset[0][2], None) + self.assertEqual(rset[0][0], SYT) + self.assertEqual(rset[0][1], None) + self.assertEqual(rset[0][2], None) def test_sort(self): logins = [l for l, in self.sexecute('Any L ORDERBY L WHERE X login L')] - self.assertEquals(logins, sorted(logins)) + self.assertEqual(logins, sorted(logins)) def test_lower_sort(self): logins = [l for l, in self.sexecute('Any L ORDERBY lower(L) WHERE X login L')] - self.assertEquals(logins, sorted(logins)) + self.assertEqual(logins, sorted(logins)) def test_or(self): rset = self.sexecute('DISTINCT Any X WHERE X login %(login)s OR (X in_group G, G name "managers")', {'login': SYT}) - self.assertEquals(len(rset), 2, rset.rows) # syt + admin + self.assertEqual(len(rset), 2, rset.rows) # syt + admin def test_nonregr_set_owned_by(self): # test that when a user coming from ldap is triggering a transition @@ -173,29 +175,30 @@ self.sexecute('SET X in_group G WHERE X login %(syt)s, G name "managers"', {'syt': SYT}) self.commit() syt = self.sexecute('CWUser X WHERE X login %(login)s', {'login': SYT}).get_entity(0, 0) - self.assertEquals([g.name for g in syt.in_group], ['managers', 'users']) + self.assertEqual([g.name for g in syt.in_group], ['managers', 'users']) self.patch_authenticate() cnx = self.login(SYT, password='dummypassword') cu = cnx.cursor() adim = cu.execute('CWUser X WHERE X login %(login)s', {'login': ADIM}).get_entity(0, 0) - adim.fire_transition('deactivate') + iworkflowable = adim.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') try: cnx.commit() adim.clear_all_caches() - self.assertEquals(adim.in_state[0].name, 'deactivated') - trinfo = adim.latest_trinfo() - self.assertEquals(trinfo.owned_by[0].login, SYT) + self.assertEqual(adim.in_state[0].name, 'deactivated') + trinfo = iworkflowable.latest_trinfo() + self.assertEqual(trinfo.owned_by[0].login, SYT) # select from_state to skip the user's creation TrInfo rset = self.sexecute('Any U ORDERBY D DESC WHERE WF wf_info_for X,' 'WF creation_date D, WF from_state FS,' 'WF owned_by U?, X eid %(x)s', {'x': adim.eid}) - self.assertEquals(rset.rows, [[syt.eid]]) + self.assertEqual(rset.rows, [[syt.eid]]) finally: # restore db state self.restore_connection() adim = self.sexecute('CWUser X WHERE X login %(login)s', {'login': ADIM}).get_entity(0, 0) - adim.fire_transition('activate') + adim.cw_adapt_to('IWorkflowable').fire_transition('activate') self.sexecute('DELETE X in_group G WHERE X login %(syt)s, G name "managers"', {'syt': SYT}) def test_same_column_names(self): @@ -212,14 +215,14 @@ self.sexecute('SET U in_group G WHERE G name ~= "bougloup%", U login "admin"') self.sexecute('SET U in_group G WHERE G name = "bougloup1", U login %(syt)s', {'syt': SYT}) rset = self.sexecute('Any L,SN ORDERBY L WHERE X in_state S, S name SN, X login L, EXISTS(X in_group G, G name ~= "bougloup%")') - self.assertEquals(rset.rows, [['admin', 'activated'], [SYT, 'activated']]) + self.assertEqual(rset.rows, [['admin', 'activated'], [SYT, 'activated']]) def test_exists2(self): self.create_user('comme') self.create_user('cochon') self.sexecute('SET X copain Y WHERE X login "comme", Y login "cochon"') rset = self.sexecute('Any GN ORDERBY GN WHERE X in_group G, G name GN, (G name "managers" OR EXISTS(X copain T, T login in ("comme", "cochon")))') - self.assertEquals(rset.rows, [['managers'], ['users']]) + self.assertEqual(rset.rows, [['managers'], ['users']]) def test_exists3(self): self.create_user('comme') @@ -229,7 +232,7 @@ self.sexecute('SET X copain Y WHERE X login %(syt)s, Y login "cochon"', {'syt': SYT}) self.failUnless(self.sexecute('Any X, Y WHERE X copain Y, X login %(syt)s, Y login "cochon"', {'syt': SYT})) rset = self.sexecute('Any GN,L WHERE X in_group G, X login L, G name GN, G name "managers" OR EXISTS(X copain T, T login in ("comme", "cochon"))') - self.assertEquals(sorted(rset.rows), [['managers', 'admin'], ['users', 'comme'], ['users', SYT]]) + self.assertEqual(sorted(rset.rows), [['managers', 'admin'], ['users', 'comme'], ['users', SYT]]) def test_exists4(self): self.create_user('comme') @@ -251,7 +254,7 @@ all = self.sexecute('Any GN, L WHERE X in_group G, X login L, G name GN') all.rows.remove(['users', 'comme']) all.rows.remove(['users', SYT]) - self.assertEquals(sorted(rset.rows), sorted(all.rows)) + self.assertEqual(sorted(rset.rows), sorted(all.rows)) def test_exists5(self): self.create_user('comme') @@ -264,25 +267,25 @@ rset= self.sexecute('Any L WHERE X login L, ' 'EXISTS(X copain T, T login in ("comme", "cochon")) AND ' 'NOT EXISTS(X copain T2, T2 login "billy")') - self.assertEquals(sorted(rset.rows), [['cochon'], [SYT]]) + self.assertEqual(sorted(rset.rows), [['cochon'], [SYT]]) rset= self.sexecute('Any GN,L WHERE X in_group G, X login L, G name GN, ' 'EXISTS(X copain T, T login in ("comme", "cochon")) AND ' 'NOT EXISTS(X copain T2, T2 login "billy")') - self.assertEquals(sorted(rset.rows), [['guests', 'cochon'], + self.assertEqual(sorted(rset.rows), [['guests', 'cochon'], ['users', 'cochon'], ['users', SYT]]) def test_cd_restriction(self): rset = self.sexecute('CWUser X WHERE X creation_date > "2009-02-01"') # admin/anon but no ldap user since it doesn't support creation_date - self.assertEquals(sorted(e.login for e in rset.entities()), + self.assertEqual(sorted(e.login for e in rset.entities()), ['admin', 'anon']) def test_union(self): afeids = self.sexecute('State X') ueids = self.sexecute('CWUser X') rset = self.sexecute('(Any X WHERE X is State) UNION (Any X WHERE X is CWUser)') - self.assertEquals(sorted(r[0] for r in rset.rows), + self.assertEqual(sorted(r[0] for r in rset.rows), sorted(r[0] for r in afeids + ueids)) def _init_security_test(self): @@ -293,23 +296,23 @@ def test_security1(self): cu = self._init_security_test() rset = cu.execute('CWUser X WHERE X login %(login)s', {'login': SYT}) - self.assertEquals(rset.rows, []) + self.assertEqual(rset.rows, []) rset = cu.execute('Any X WHERE X login "iaminguestsgrouponly"') - self.assertEquals(len(rset.rows), 1) + self.assertEqual(len(rset.rows), 1) def test_security2(self): cu = self._init_security_test() rset = cu.execute('Any X WHERE X has_text %(syt)s', {'syt': SYT}) - self.assertEquals(rset.rows, []) + self.assertEqual(rset.rows, []) rset = cu.execute('Any X WHERE X has_text "iaminguestsgrouponly"') - self.assertEquals(len(rset.rows), 1) + self.assertEqual(len(rset.rows), 1) def test_security3(self): cu = self._init_security_test() rset = cu.execute('Any F WHERE X has_text %(syt)s, X firstname F', {'syt': SYT}) - self.assertEquals(rset.rows, []) + self.assertEqual(rset.rows, []) rset = cu.execute('Any F WHERE X has_text "iaminguestsgrouponly", X firstname F') - self.assertEquals(rset.rows, [[None]]) + self.assertEqual(rset.rows, [[None]]) def test_nonregr1(self): self.sexecute('Any X,AA ORDERBY AA DESC WHERE E eid %(x)s, E owned_by X, ' @@ -350,34 +353,34 @@ def test_count(self): trfunc = GlobTrFunc('count', 0) res = trfunc.apply([[1], [2], [3], [4]]) - self.assertEquals(res, [[4]]) + self.assertEqual(res, [[4]]) trfunc = GlobTrFunc('count', 1) res = trfunc.apply([[1, 2], [2, 4], [3, 6], [1, 5]]) - self.assertEquals(res, [[1, 2], [2, 1], [3, 1]]) + self.assertEqual(res, [[1, 2], [2, 1], [3, 1]]) def test_sum(self): trfunc = GlobTrFunc('sum', 0) res = trfunc.apply([[1], [2], [3], [4]]) - self.assertEquals(res, [[10]]) + self.assertEqual(res, [[10]]) trfunc = GlobTrFunc('sum', 1) res = trfunc.apply([[1, 2], [2, 4], [3, 6], [1, 5]]) - self.assertEquals(res, [[1, 7], [2, 4], [3, 6]]) + self.assertEqual(res, [[1, 7], [2, 4], [3, 6]]) def test_min(self): trfunc = GlobTrFunc('min', 0) res = trfunc.apply([[1], [2], [3], [4]]) - self.assertEquals(res, [[1]]) + self.assertEqual(res, [[1]]) trfunc = GlobTrFunc('min', 1) res = trfunc.apply([[1, 2], [2, 4], [3, 6], [1, 5]]) - self.assertEquals(res, [[1, 2], [2, 4], [3, 6]]) + self.assertEqual(res, [[1, 2], [2, 4], [3, 6]]) def test_max(self): trfunc = GlobTrFunc('max', 0) res = trfunc.apply([[1], [2], [3], [4]]) - self.assertEquals(res, [[4]]) + self.assertEqual(res, [[4]]) trfunc = GlobTrFunc('max', 1) res = trfunc.apply([[1, 2], [2, 4], [3, 6], [1, 5]]) - self.assertEquals(res, [[1, 5], [2, 4], [3, 6]]) + self.assertEqual(res, [[1, 5], [2, 4], [3, 6]]) # XXX LDAPUserSourceTC._init_repo() @@ -397,6 +400,8 @@ self.pool = repo._get_pool() session = mock_object(pool=self.pool) self.o = RQL2LDAPFilter(ldapsource, session) + self.ldapclasses = ''.join('(objectClass=%s)' % ldapcls + for ldapcls in ldapsource.user_classes) def tearDown(self): repo._free_pool(self.pool) @@ -404,14 +409,14 @@ def test_base(self): rqlst = self._prepare('CWUser X WHERE X login "toto"').children[0] - self.assertEquals(self.o.generate(rqlst, 'X')[1], - '(&(objectClass=top)(objectClass=posixAccount)(uid=toto))') + self.assertEqual(self.o.generate(rqlst, 'X')[1], + '(&%s(uid=toto))' % self.ldapclasses) def test_kwargs(self): rqlst = self._prepare('CWUser X WHERE X login %(x)s').children[0] self.o._args = {'x': "toto"} - self.assertEquals(self.o.generate(rqlst, 'X')[1], - '(&(objectClass=top)(objectClass=posixAccount)(uid=toto))') + self.assertEqual(self.o.generate(rqlst, 'X')[1], + '(&%s(uid=toto))' % self.ldapclasses) def test_get_attr(self): rqlst = self._prepare('Any X WHERE E firstname X, E eid 12').children[0] diff -r f4d1d5d9ccbb -r 90f2f20367bc server/test/unittest_migractions.py --- a/server/test/unittest_migractions.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/test/unittest_migractions.py Wed Nov 03 16:38:28 2010 +0100 @@ -80,11 +80,11 @@ 'RDEF relation_type RT, RDEF ordernum O, RT name RTN')) self.mh.cmd_add_attribute('Note', 'whatever') self.failUnless('whatever' in self.schema) - self.assertEquals(self.schema['whatever'].subjects(), ('Note',)) - self.assertEquals(self.schema['whatever'].objects(), ('Int',)) - self.assertEquals(self.schema['Note'].default('whatever'), 2) + self.assertEqual(self.schema['whatever'].subjects(), ('Note',)) + self.assertEqual(self.schema['whatever'].objects(), ('Int',)) + self.assertEqual(self.schema['Note'].default('whatever'), 2) note = self.execute('Note X').get_entity(0, 0) - self.assertEquals(note.whatever, 2) + self.assertEqual(note.whatever, 2) orderdict2 = dict(self.mh.rqlexec('Any RTN, O WHERE X name "Note", RDEF from_entity X, ' 'RDEF relation_type RT, RDEF ordernum O, RT name RTN')) whateverorder = migrschema['whatever'].rdef('Note', 'Int').order @@ -92,8 +92,8 @@ if v >= whateverorder: orderdict[k] = v+1 orderdict['whatever'] = whateverorder - self.assertDictEquals(orderdict, orderdict2) - #self.assertEquals([r.type for r in self.schema['Note'].ordered_relations()], + self.assertDictEqual(orderdict, orderdict2) + #self.assertEqual([r.type for r in self.schema['Note'].ordered_relations()], # ['modification_date', 'creation_date', 'owned_by', # 'eid', 'ecrit_par', 'inline1', 'date', 'type', # 'whatever', 'date', 'in_basket']) @@ -106,12 +106,12 @@ self.failIf('shortpara' in self.schema) self.mh.cmd_add_attribute('Note', 'shortpara') self.failUnless('shortpara' in self.schema) - self.assertEquals(self.schema['shortpara'].subjects(), ('Note', )) - self.assertEquals(self.schema['shortpara'].objects(), ('String', )) + self.assertEqual(self.schema['shortpara'].subjects(), ('Note', )) + self.assertEqual(self.schema['shortpara'].objects(), ('String', )) # test created column is actually a varchar(64) notesql = self.mh.sqlexec("SELECT sql FROM sqlite_master WHERE type='table' and name='%sNote'" % SQL_PREFIX)[0][0] fields = dict(x.strip().split()[:2] for x in notesql.split('(', 1)[1].rsplit(')', 1)[0].split(',')) - self.assertEquals(fields['%sshortpara' % SQL_PREFIX], 'varchar(64)') + self.assertEqual(fields['%sshortpara' % SQL_PREFIX], 'varchar(64)') self.mh.rollback() def test_add_datetime_with_default_value_attribute(self): @@ -119,15 +119,15 @@ self.failIf('shortpara' in self.schema) self.mh.cmd_add_attribute('Note', 'mydate') self.failUnless('mydate' in self.schema) - self.assertEquals(self.schema['mydate'].subjects(), ('Note', )) - self.assertEquals(self.schema['mydate'].objects(), ('Date', )) + self.assertEqual(self.schema['mydate'].subjects(), ('Note', )) + self.assertEqual(self.schema['mydate'].objects(), ('Date', )) testdate = date(2005, 12, 13) eid1 = self.mh.rqlexec('INSERT Note N')[0][0] eid2 = self.mh.rqlexec('INSERT Note N: N mydate %(mydate)s', {'mydate' : testdate})[0][0] d1 = self.mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid1})[0][0] d2 = self.mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid2})[0][0] - self.assertEquals(d1, date.today()) - self.assertEquals(d2, testdate) + self.assertEqual(d1, date.today()) + self.assertEqual(d2, testdate) self.mh.rollback() def test_rename_attribute(self): @@ -149,10 +149,10 @@ for etype in ('Personne', 'Email'): s1 = self.mh.rqlexec('Any N WHERE WF workflow_of ET, ET name "%s", WF name N' % etype)[0][0] - self.assertEquals(s1, "foo") + self.assertEqual(s1, "foo") s1 = self.mh.rqlexec('Any N WHERE ET default_workflow WF, ET name "%s", WF name N' % etype)[0][0] - self.assertEquals(s1, "foo") + self.assertEqual(s1, "foo") def test_add_entity_type(self): self.failIf('Folder2' in self.schema) @@ -163,18 +163,18 @@ self.failUnless('filed_under2' in self.schema) self.failUnless(self.execute('CWRType X WHERE X name "filed_under2"')) self.schema.rebuild_infered_relations() - self.assertEquals(sorted(str(rs) for rs in self.schema['Folder2'].subject_relations()), + self.assertEqual(sorted(str(rs) for rs in self.schema['Folder2'].subject_relations()), ['created_by', 'creation_date', 'cwuri', 'description', 'description_format', 'eid', 'filed_under2', 'has_text', 'identity', 'in_basket', 'is', 'is_instance_of', 'modification_date', 'name', 'owned_by']) - self.assertEquals([str(rs) for rs in self.schema['Folder2'].object_relations()], + self.assertEqual([str(rs) for rs in self.schema['Folder2'].object_relations()], ['filed_under2', 'identity']) - self.assertEquals(sorted(str(e) for e in self.schema['filed_under2'].subjects()), + self.assertEqual(sorted(str(e) for e in self.schema['filed_under2'].subjects()), sorted(str(e) for e in self.schema.entities() if not e.final)) - self.assertEquals(self.schema['filed_under2'].objects(), ('Folder2',)) + self.assertEqual(self.schema['filed_under2'].objects(), ('Folder2',)) eschema = self.schema.eschema('Folder2') for cstr in eschema.rdef('name').constraints: self.failUnless(hasattr(cstr, 'eid')) @@ -201,22 +201,22 @@ self.mh.cmd_add_relation_type('filed_under2') self.schema.rebuild_infered_relations() self.failUnless('filed_under2' in self.schema) - self.assertEquals(sorted(str(e) for e in self.schema['filed_under2'].subjects()), + self.assertEqual(sorted(str(e) for e in self.schema['filed_under2'].subjects()), sorted(str(e) for e in self.schema.entities() if not e.final)) - self.assertEquals(self.schema['filed_under2'].objects(), ('Folder2',)) + self.assertEqual(self.schema['filed_under2'].objects(), ('Folder2',)) self.mh.cmd_drop_relation_type('filed_under2') self.failIf('filed_under2' in self.schema) def test_add_relation_definition_nortype(self): self.mh.cmd_add_relation_definition('Personne', 'concerne2', 'Affaire') - self.assertEquals(self.schema['concerne2'].subjects(), + self.assertEqual(self.schema['concerne2'].subjects(), ('Personne',)) - self.assertEquals(self.schema['concerne2'].objects(), + self.assertEqual(self.schema['concerne2'].objects(), ('Affaire', )) - self.assertEquals(self.schema['concerne2'].rdef('Personne', 'Affaire').cardinality, + self.assertEqual(self.schema['concerne2'].rdef('Personne', 'Affaire').cardinality, '1*') self.mh.cmd_add_relation_definition('Personne', 'concerne2', 'Note') - self.assertEquals(sorted(self.schema['concerne2'].objects()), ['Affaire', 'Note']) + self.assertEqual(sorted(self.schema['concerne2'].objects()), ['Affaire', 'Note']) self.mh.create_entity('Personne', nom=u'tot') self.mh.create_entity('Affaire') self.mh.rqlexec('SET X concerne2 Y WHERE X is Personne, Y is Affaire') @@ -227,59 +227,59 @@ self.failIf('concerne2' in self.schema) def test_drop_relation_definition_existant_rtype(self): - self.assertEquals(sorted(str(e) for e in self.schema['concerne'].subjects()), + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), ['Affaire', 'Personne']) - self.assertEquals(sorted(str(e) for e in self.schema['concerne'].objects()), + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision']) self.mh.cmd_drop_relation_definition('Personne', 'concerne', 'Affaire') - self.assertEquals(sorted(str(e) for e in self.schema['concerne'].subjects()), + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), ['Affaire']) - self.assertEquals(sorted(str(e) for e in self.schema['concerne'].objects()), + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), ['Division', 'Note', 'Societe', 'SubDivision']) self.mh.cmd_add_relation_definition('Personne', 'concerne', 'Affaire') - self.assertEquals(sorted(str(e) for e in self.schema['concerne'].subjects()), + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), ['Affaire', 'Personne']) - self.assertEquals(sorted(str(e) for e in self.schema['concerne'].objects()), + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision']) # trick: overwrite self.maxeid to avoid deletion of just reintroduced types self.maxeid = self.execute('Any MAX(X)')[0][0] def test_drop_relation_definition_with_specialization(self): - self.assertEquals(sorted(str(e) for e in self.schema['concerne'].subjects()), + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), ['Affaire', 'Personne']) - self.assertEquals(sorted(str(e) for e in self.schema['concerne'].objects()), + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision']) self.mh.cmd_drop_relation_definition('Affaire', 'concerne', 'Societe') - self.assertEquals(sorted(str(e) for e in self.schema['concerne'].subjects()), + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), ['Affaire', 'Personne']) - self.assertEquals(sorted(str(e) for e in self.schema['concerne'].objects()), + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), ['Affaire', 'Division', 'Note', 'SubDivision']) self.schema.rebuild_infered_relations() # need to be explicitly called once everything is in place - self.assertEquals(sorted(str(e) for e in self.schema['concerne'].objects()), + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), ['Affaire', 'Note']) self.mh.cmd_add_relation_definition('Affaire', 'concerne', 'Societe') - self.assertEquals(sorted(str(e) for e in self.schema['concerne'].subjects()), + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()), ['Affaire', 'Personne']) - self.assertEquals(sorted(str(e) for e in self.schema['concerne'].objects()), + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), ['Affaire', 'Note', 'Societe']) self.schema.rebuild_infered_relations() # need to be explicitly called once everything is in place - self.assertEquals(sorted(str(e) for e in self.schema['concerne'].objects()), + self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()), ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision']) # trick: overwrite self.maxeid to avoid deletion of just reintroduced types self.maxeid = self.execute('Any MAX(X)')[0][0] def test_rename_relation(self): - self.skip('implement me') + self.skipTest('implement me') def test_change_relation_props_non_final(self): rschema = self.schema['concerne'] card = rschema.rdef('Affaire', 'Societe').cardinality - self.assertEquals(card, '**') + self.assertEqual(card, '**') try: self.mh.cmd_change_relation_props('Affaire', 'concerne', 'Societe', cardinality='?*') card = rschema.rdef('Affaire', 'Societe').cardinality - self.assertEquals(card, '?*') + self.assertEqual(card, '?*') finally: self.mh.cmd_change_relation_props('Affaire', 'concerne', 'Societe', cardinality='**') @@ -287,12 +287,12 @@ def test_change_relation_props_final(self): rschema = self.schema['adel'] card = rschema.rdef('Personne', 'String').fulltextindexed - self.assertEquals(card, False) + self.assertEqual(card, False) try: self.mh.cmd_change_relation_props('Personne', 'adel', 'String', fulltextindexed=True) card = rschema.rdef('Personne', 'String').fulltextindexed - self.assertEquals(card, True) + self.assertEqual(card, True) finally: self.mh.cmd_change_relation_props('Personne', 'adel', 'String', fulltextindexed=False) @@ -309,13 +309,14 @@ migrschema['titre'].rdefs[('Personne', 'String')].description = 'title for this person' delete_concerne_rqlexpr = self._rrqlexpr_rset('delete', 'concerne') add_concerne_rqlexpr = self._rrqlexpr_rset('add', 'concerne') + self.mh.cmd_sync_schema_props_perms(commit=False) - self.assertEquals(cursor.execute('Any D WHERE X name "Personne", X description D')[0][0], + self.assertEqual(cursor.execute('Any D WHERE X name "Personne", X description D')[0][0], 'blabla bla') - self.assertEquals(cursor.execute('Any D WHERE X name "titre", X description D')[0][0], + self.assertEqual(cursor.execute('Any D WHERE X name "titre", X description D')[0][0], 'usually a title') - self.assertEquals(cursor.execute('Any D WHERE X relation_type RT, RT name "titre",' + self.assertEqual(cursor.execute('Any D WHERE X relation_type RT, RT name "titre",' 'X from_entity FE, FE name "Personne",' 'X description D')[0][0], 'title for this person') @@ -326,29 +327,29 @@ expected = [u'nom', u'prenom', u'sexe', u'promo', u'ass', u'adel', u'titre', u'web', u'tel', u'fax', u'datenaiss', u'test', 'description', u'firstname', u'creation_date', 'cwuri', u'modification_date'] - self.assertEquals(rinorder, expected) + self.assertEqual(rinorder, expected) # test permissions synchronization #################################### # new rql expr to add note entity eexpr = self._erqlexpr_entity('add', 'Note') - self.assertEquals(eexpr.expression, + self.assertEqual(eexpr.expression, 'X ecrit_part PE, U in_group G, ' 'PE require_permission P, P name "add_note", P require_group G') - self.assertEquals([et.name for et in eexpr.reverse_add_permission], ['Note']) - self.assertEquals(eexpr.reverse_read_permission, ()) - self.assertEquals(eexpr.reverse_delete_permission, ()) - self.assertEquals(eexpr.reverse_update_permission, ()) + self.assertEqual([et.name for et in eexpr.reverse_add_permission], ['Note']) + self.assertEqual(eexpr.reverse_read_permission, ()) + self.assertEqual(eexpr.reverse_delete_permission, ()) + self.assertEqual(eexpr.reverse_update_permission, ()) # no more rqlexpr to delete and add para attribute self.failIf(self._rrqlexpr_rset('add', 'para')) self.failIf(self._rrqlexpr_rset('delete', 'para')) # new rql expr to add ecrit_par relation rexpr = self._rrqlexpr_entity('add', 'ecrit_par') - self.assertEquals(rexpr.expression, + self.assertEqual(rexpr.expression, 'O require_permission P, P name "add_note", ' 'U in_group G, P require_group G') - self.assertEquals([rdef.rtype.name for rdef in rexpr.reverse_add_permission], ['ecrit_par']) - self.assertEquals(rexpr.reverse_read_permission, ()) - self.assertEquals(rexpr.reverse_delete_permission, ()) + self.assertEqual([rdef.rtype.name for rdef in rexpr.reverse_add_permission], ['ecrit_par']) + self.assertEqual(rexpr.reverse_read_permission, ()) + self.assertEqual(rexpr.reverse_delete_permission, ()) # no more rqlexpr to delete and add travaille relation self.failIf(self._rrqlexpr_rset('add', 'travaille')) self.failIf(self._rrqlexpr_rset('delete', 'travaille')) @@ -359,13 +360,13 @@ self.failIf(self._erqlexpr_rset('read', 'Affaire')) # rqlexpr to update Affaire entity has been updated eexpr = self._erqlexpr_entity('update', 'Affaire') - self.assertEquals(eexpr.expression, 'X concerne S, S owned_by U') + self.assertEqual(eexpr.expression, 'X concerne S, S owned_by U') # no change for rqlexpr to add and delete Affaire entity - self.assertEquals(len(self._erqlexpr_rset('delete', 'Affaire')), 1) - self.assertEquals(len(self._erqlexpr_rset('add', 'Affaire')), 1) + self.assertEqual(len(self._erqlexpr_rset('delete', 'Affaire')), 1) + self.assertEqual(len(self._erqlexpr_rset('add', 'Affaire')), 1) # no change for rqlexpr to add and delete concerne relation - self.assertEquals(len(self._rrqlexpr_rset('delete', 'concerne')), len(delete_concerne_rqlexpr)) - self.assertEquals(len(self._rrqlexpr_rset('add', 'concerne')), len(add_concerne_rqlexpr)) + self.assertEqual(len(self._rrqlexpr_rset('delete', 'concerne')), len(delete_concerne_rqlexpr)) + self.assertEqual(len(self._rrqlexpr_rset('add', 'concerne')), len(add_concerne_rqlexpr)) # * migrschema involve: # * 7 rqlexprs deletion (2 in (Affaire read + Societe + travaille) + 1 # in para attribute) @@ -373,29 +374,36 @@ # * 2 new (Note add, ecrit_par add) # * 2 implicit new for attributes update_permission (Note.para, Personne.test) # remaining orphan rql expr which should be deleted at commit (composite relation) - self.assertEquals(cursor.execute('Any COUNT(X) WHERE X is RQLExpression, ' + self.assertEqual(cursor.execute('Any COUNT(X) WHERE X is RQLExpression, ' 'NOT ET1 read_permission X, NOT ET2 add_permission X, ' 'NOT ET3 delete_permission X, NOT ET4 update_permission X')[0][0], 7+1) # finally - self.assertEquals(cursor.execute('Any COUNT(X) WHERE X is RQLExpression')[0][0], + self.assertEqual(cursor.execute('Any COUNT(X) WHERE X is RQLExpression')[0][0], nbrqlexpr_start + 1 + 2 + 2) - - self.mh.rollback() + self.mh.commit() + # unique_together test + self.assertEqual(len(self.schema.eschema('Personne')._unique_together), 1) + self.assertItemsEqual(self.schema.eschema('Personne')._unique_together[0], + ('nom', 'prenom', 'datenaiss')) + rset = cursor.execute('Any C WHERE C is CWUniqueTogetherConstraint') + self.assertEqual(len(rset), 1) + relations = [r.rtype.name for r in rset.get_entity(0,0).relations] + self.assertItemsEqual(relations, ('nom', 'prenom', 'datenaiss')) def _erqlexpr_rset(self, action, ertype): rql = 'RQLExpression X WHERE ET is CWEType, ET %s_permission X, ET name %%(name)s' % action return self.mh.session.execute(rql, {'name': ertype}) def _erqlexpr_entity(self, action, ertype): rset = self._erqlexpr_rset(action, ertype) - self.assertEquals(len(rset), 1) + self.assertEqual(len(rset), 1) return rset.get_entity(0, 0) def _rrqlexpr_rset(self, action, ertype): rql = 'RQLExpression X WHERE RT is CWRType, RDEF %s_permission X, RT name %%(name)s, RDEF relation_type RT' % action return self.mh.session.execute(rql, {'name': ertype}) def _rrqlexpr_entity(self, action, ertype): rset = self._rrqlexpr_rset(action, ertype) - self.assertEquals(len(rset), 1) + self.assertEqual(len(rset), 1) return rset.get_entity(0, 0) def test_set_size_constraint(self): @@ -413,7 +421,7 @@ def test_add_remove_cube_and_deps(self): cubes = set(self.config.cubes()) schema = self.repo.schema - self.assertEquals(sorted((str(s), str(o)) for s, o in schema['see_also'].rdefs.keys()), + self.assertEqual(sorted((str(s), str(o)) for s, o in schema['see_also'].rdefs.keys()), sorted([('EmailThread', 'EmailThread'), ('Folder', 'Folder'), ('Bookmark', 'Bookmark'), ('Bookmark', 'Note'), ('Note', 'Note'), ('Note', 'Bookmark')])) @@ -425,19 +433,19 @@ self.failIf(self.config.cube_dir('email') in self.config.cubes_path()) self.failIf('file' in self.config.cubes()) self.failIf(self.config.cube_dir('file') in self.config.cubes_path()) - for ertype in ('Email', 'EmailThread', 'EmailPart', 'File', 'Image', + for ertype in ('Email', 'EmailThread', 'EmailPart', 'File', 'sender', 'in_thread', 'reply_to', 'data_format'): self.failIf(ertype in schema, ertype) - self.assertEquals(sorted(schema['see_also'].rdefs.keys()), + self.assertEqual(sorted(schema['see_also'].rdefs.keys()), sorted([('Folder', 'Folder'), ('Bookmark', 'Bookmark'), ('Bookmark', 'Note'), ('Note', 'Note'), ('Note', 'Bookmark')])) - self.assertEquals(sorted(schema['see_also'].subjects()), ['Bookmark', 'Folder', 'Note']) - self.assertEquals(sorted(schema['see_also'].objects()), ['Bookmark', 'Folder', 'Note']) - self.assertEquals(self.execute('Any X WHERE X pkey "system.version.email"').rowcount, 0) - self.assertEquals(self.execute('Any X WHERE X pkey "system.version.file"').rowcount, 0) + self.assertEqual(sorted(schema['see_also'].subjects()), ['Bookmark', 'Folder', 'Note']) + self.assertEqual(sorted(schema['see_also'].objects()), ['Bookmark', 'Folder', 'Note']) + self.assertEqual(self.execute('Any X WHERE X pkey "system.version.email"').rowcount, 0) + self.assertEqual(self.execute('Any X WHERE X pkey "system.version.file"').rowcount, 0) except : import traceback traceback.print_exc() @@ -448,22 +456,22 @@ self.failUnless(self.config.cube_dir('email') in self.config.cubes_path()) self.failUnless('file' in self.config.cubes()) self.failUnless(self.config.cube_dir('file') in self.config.cubes_path()) - for ertype in ('Email', 'EmailThread', 'EmailPart', 'File', 'Image', + for ertype in ('Email', 'EmailThread', 'EmailPart', 'File', 'sender', 'in_thread', 'reply_to', 'data_format'): self.failUnless(ertype in schema, ertype) - self.assertEquals(sorted(schema['see_also'].rdefs.keys()), + self.assertEqual(sorted(schema['see_also'].rdefs.keys()), sorted([('EmailThread', 'EmailThread'), ('Folder', 'Folder'), ('Bookmark', 'Bookmark'), ('Bookmark', 'Note'), ('Note', 'Note'), ('Note', 'Bookmark')])) - self.assertEquals(sorted(schema['see_also'].subjects()), ['Bookmark', 'EmailThread', 'Folder', 'Note']) - self.assertEquals(sorted(schema['see_also'].objects()), ['Bookmark', 'EmailThread', 'Folder', 'Note']) + self.assertEqual(sorted(schema['see_also'].subjects()), ['Bookmark', 'EmailThread', 'Folder', 'Note']) + self.assertEqual(sorted(schema['see_also'].objects()), ['Bookmark', 'EmailThread', 'Folder', 'Note']) from cubes.email.__pkginfo__ import version as email_version from cubes.file.__pkginfo__ import version as file_version - self.assertEquals(self.execute('Any V WHERE X value V, X pkey "system.version.email"')[0][0], + self.assertEqual(self.execute('Any V WHERE X value V, X pkey "system.version.email"')[0][0], email_version) - self.assertEquals(self.execute('Any V WHERE X value V, X pkey "system.version.file"')[0][0], + self.assertEqual(self.execute('Any V WHERE X value V, X pkey "system.version.file"')[0][0], file_version) # trick: overwrite self.maxeid to avoid deletion of just reintroduced # types (and their associated tables!) @@ -501,19 +509,19 @@ def test_remove_dep_cube(self): ex = self.assertRaises(ConfigurationError, self.mh.cmd_remove_cube, 'file') - self.assertEquals(str(ex), "can't remove cube file, used as a dependency") + self.assertEqual(str(ex), "can't remove cube file, used as a dependency") def test_introduce_base_class(self): self.mh.cmd_add_entity_type('Para') self.mh.repo.schema.rebuild_infered_relations() - self.assertEquals(sorted(et.type for et in self.schema['Para'].specialized_by()), + self.assertEqual(sorted(et.type for et in self.schema['Para'].specialized_by()), ['Note']) - self.assertEquals(self.schema['Note'].specializes().type, 'Para') + self.assertEqual(self.schema['Note'].specializes().type, 'Para') self.mh.cmd_add_entity_type('Text') self.mh.repo.schema.rebuild_infered_relations() - self.assertEquals(sorted(et.type for et in self.schema['Para'].specialized_by()), + self.assertEqual(sorted(et.type for et in self.schema['Para'].specialized_by()), ['Note', 'Text']) - self.assertEquals(self.schema['Text'].specializes().type, 'Para') + self.assertEqual(self.schema['Text'].specializes().type, 'Para') # test columns have been actually added text = self.execute('INSERT Text X: X para "hip", X summary "hop", X newattr "momo"').get_entity(0, 0) note = self.execute('INSERT Note X: X para "hip", X shortpara "hop", X newattr "momo"').get_entity(0, 0) @@ -540,10 +548,20 @@ self.commit() finally: self.session.data['rebuild-infered'] = False - self.assertEquals(sorted(et.type for et in self.schema['Para'].specialized_by()), + self.assertEqual(sorted(et.type for et in self.schema['Para'].specialized_by()), []) - self.assertEquals(self.schema['Note'].specializes(), None) - self.assertEquals(self.schema['Text'].specializes(), None) + self.assertEqual(self.schema['Note'].specializes(), None) + self.assertEqual(self.schema['Text'].specializes(), None) + + + def test_add_symmetric_relation_type(self): + same_as_sql = self.mh.sqlexec("SELECT sql FROM sqlite_master WHERE type='table' " + "and name='same_as_relation'") + self.failIf(same_as_sql) + self.mh.cmd_add_relation_type('same_as') + same_as_sql = self.mh.sqlexec("SELECT sql FROM sqlite_master WHERE type='table' " + "and name='same_as_relation'") + self.failUnless(same_as_sql) if __name__ == '__main__': unittest_main() diff -r f4d1d5d9ccbb -r 90f2f20367bc server/test/unittest_msplanner.py --- a/server/test/unittest_msplanner.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/test/unittest_msplanner.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,6 +15,9 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . + +from logilab.common.decorators import clear_cache + from cubicweb.devtools import init_test_database from cubicweb.devtools.repotest import BasePlannerTC, test_plan @@ -45,7 +48,7 @@ uri = 'ccc' support_entities = {'Card': True, 'Note': True, 'State': True} support_relations = {'in_state': True, 'multisource_rel': True, 'multisource_inlined_rel': True, - 'multisource_crossed_rel': True} + 'multisource_crossed_rel': True,} dont_cross_relations = set(('fiche', 'state_of')) cross_relations = set(('multisource_crossed_rel',)) @@ -57,10 +60,11 @@ {'X': 'CWConstraint'}, {'X': 'CWConstraintType'}, {'X': 'CWEType'}, {'X': 'CWGroup'}, {'X': 'CWPermission'}, {'X': 'CWProperty'}, {'X': 'CWRType'}, {'X': 'CWRelation'}, {'X': 'CWUser'}, + {'X': 'CWUniqueTogetherConstraint'}, {'X': 'Card'}, {'X': 'Comment'}, {'X': 'Division'}, {'X': 'Email'}, {'X': 'EmailAddress'}, {'X': 'EmailPart'}, {'X': 'EmailThread'}, {'X': 'ExternalUri'}, {'X': 'File'}, - {'X': 'Folder'}, {'X': 'Image'}, {'X': 'Note'}, + {'X': 'Folder'}, {'X': 'Note'}, {'X': 'Personne'}, {'X': 'RQLExpression'}, {'X': 'Societe'}, {'X': 'State'}, {'X': 'SubDivision'}, {'X': 'SubWorkflowExitPoint'}, {'X': 'Tag'}, {'X': 'TrInfo'}, {'X': 'Transition'}, @@ -137,8 +141,8 @@ for var in sourcevars.keys(): solindices = sourcevars.pop(var) sourcevars[var._ms_table_key()] = solindices - self.assertEquals(ppi._sourcesterms, sourcesterms) - self.assertEquals(ppi.needsplit, needsplit) + self.assertEqual(ppi._sourcesterms, sourcesterms) + self.assertEqual(ppi.needsplit, needsplit) def test_simple_system_only(self): @@ -364,6 +368,8 @@ def setUp(self): BaseMSPlannerTC.setUp(self) self.planner = MSPlanner(self.o.schema, self.repo.vreg.rqlhelper) + for cached in ('rel_type_sources', 'can_cross_relation', 'is_multi_sources_relation'): + clear_cache(self.repo, cached) _test = test_plan @@ -413,7 +419,7 @@ """retrieve CWUser X from both sources and return concatenation of results """ self._test('CWUser X ORDERBY X LIMIT 10 OFFSET 10', - [('AggrStep', 'Any X ORDERBY X', 10, 10, 'table0', None, [ + [('AggrStep', 'SELECT table0.C0 FROM table0 ORDER BY table0.C0 LIMIT 10 OFFSET 10', None, [ ('FetchStep', [('Any X WHERE X is CWUser', [{'X': 'CWUser'}])], [self.ldap, self.system], {}, {'X': 'table0.C0'}, []), ]), @@ -423,7 +429,7 @@ """ # COUNT(X) is kept in sub-step and transformed into SUM(X) in the AggrStep self._test('Any COUNT(X) WHERE X is CWUser', - [('AggrStep', 'Any COUNT(X)', None, None, 'table0', None, [ + [('AggrStep', 'SELECT SUM(table0.C0) FROM table0', None, [ ('FetchStep', [('Any COUNT(X) WHERE X is CWUser', [{'X': 'CWUser'}])], [self.ldap, self.system], {}, {'COUNT(X)': 'table0.C0'}, []), ]), @@ -498,7 +504,7 @@ def test_complex_ordered(self): self._test('Any L ORDERBY L WHERE X login L', - [('AggrStep', 'Any L ORDERBY L', None, None, 'table0', None, + [('AggrStep', 'SELECT table0.C0 FROM table0 ORDER BY table0.C0', None, [('FetchStep', [('Any L WHERE X login L, X is CWUser', [{'X': 'CWUser', 'L': 'String'}])], [self.ldap, self.system], {}, {'X.login': 'table0.C0', 'L': 'table0.C0'}, []), @@ -507,7 +513,7 @@ def test_complex_ordered_limit_offset(self): self._test('Any L ORDERBY L LIMIT 10 OFFSET 10 WHERE X login L', - [('AggrStep', 'Any L ORDERBY L', 10, 10, 'table0', None, + [('AggrStep', 'SELECT table0.C0 FROM table0 ORDER BY table0.C0 LIMIT 10 OFFSET 10', None, [('FetchStep', [('Any L WHERE X login L, X is CWUser', [{'X': 'CWUser', 'L': 'String'}])], [self.ldap, self.system], {}, {'X.login': 'table0.C0', 'L': 'table0.C0'}, []), @@ -593,7 +599,7 @@ 2. return content of the table sorted """ self._test('Any X,F ORDERBY F WHERE X firstname F', - [('AggrStep', 'Any X,F ORDERBY F', None, None, 'table0', None, + [('AggrStep', 'SELECT table0.C0, table0.C1 FROM table0 ORDER BY table0.C1', None, [('FetchStep', [('Any X,F WHERE X firstname F, X is CWUser', [{'X': 'CWUser', 'F': 'String'}])], [self.ldap, self.system], {}, @@ -657,7 +663,7 @@ def test_complex_typed_aggregat(self): self._test('Any MAX(X) WHERE X is Card', - [('AggrStep', 'Any MAX(X)', None, None, 'table0', None, + [('AggrStep', 'SELECT MAX(table0.C0) FROM table0', None, [('FetchStep', [('Any MAX(X) WHERE X is Card', [{'X': 'Card'}])], [self.cards, self.system], {}, {'MAX(X)': 'table0.C0'}, []) @@ -784,10 +790,10 @@ [{'X': 'Basket'}]), ('Any X WHERE X has_text "bla", EXISTS(X owned_by 5), X is CWUser', [{'X': 'CWUser'}]), - ('Any X WHERE X has_text "bla", X is IN(Card, Comment, Division, Email, EmailThread, File, Folder, Image, Note, Personne, Societe, SubDivision, Tag)', + ('Any X WHERE X has_text "bla", X is IN(Card, Comment, Division, Email, EmailThread, File, Folder, Note, Personne, Societe, SubDivision, Tag)', [{'X': 'Card'}, {'X': 'Comment'}, {'X': 'Division'}, {'X': 'Email'}, {'X': 'EmailThread'}, - {'X': 'File'}, {'X': 'Folder'}, {'X': 'Image'}, + {'X': 'File'}, {'X': 'Folder'}, {'X': 'Note'}, {'X': 'Personne'}, {'X': 'Societe'}, {'X': 'SubDivision'}, {'X': 'Tag'}]),], None, None, [self.system], {}, []), @@ -810,10 +816,10 @@ [{'X': 'Basket'}]), ('Any X WHERE X has_text "bla", EXISTS(X owned_by 5), X is CWUser', [{'X': 'CWUser'}]), - ('Any X WHERE X has_text "bla", X is IN(Card, Comment, Division, Email, EmailThread, File, Folder, Image, Note, Personne, Societe, SubDivision, Tag)', + ('Any X WHERE X has_text "bla", X is IN(Card, Comment, Division, Email, EmailThread, File, Folder, Note, Personne, Societe, SubDivision, Tag)', [{'X': 'Card'}, {'X': 'Comment'}, {'X': 'Division'}, {'X': 'Email'}, {'X': 'EmailThread'}, - {'X': 'File'}, {'X': 'Folder'}, {'X': 'Image'}, + {'X': 'File'}, {'X': 'Folder'}, {'X': 'Note'}, {'X': 'Personne'}, {'X': 'Societe'}, {'X': 'SubDivision'}, {'X': 'Tag'}])], [self.system], {}, {'X': 'table0.C0'}, []), @@ -823,7 +829,7 @@ [{'X': 'Affaire'}, {'X': 'Basket'}, {'X': 'CWUser'}, {'X': 'Card'}, {'X': 'Comment'}, {'X': 'Division'}, {'X': 'Email'}, {'X': 'EmailThread'}, - {'X': 'File'}, {'X': 'Folder'}, {'X': 'Image'}, + {'X': 'File'}, {'X': 'Folder'}, {'X': 'Note'}, {'X': 'Personne'}, {'X': 'Societe'}, {'X': 'SubDivision'}, {'X': 'Tag'}])], 10, 10, [self.system], {'X': 'table0.C0'}, []) @@ -888,18 +894,19 @@ [{'X': 'Card'}, {'X': 'Note'}, {'X': 'State'}])], [self.cards, self.system], {}, {'X': 'table0.C0'}, []), ('FetchStep', - [('Any X WHERE X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Image, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)', + [('Any X WHERE X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWUniqueTogetherConstraint, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)', [{'X': 'BaseTransition'}, {'X': 'Bookmark'}, {'X': 'CWAttribute'}, {'X': 'CWCache'}, {'X': 'CWConstraint'}, {'X': 'CWConstraintType'}, {'X': 'CWEType'}, {'X': 'CWGroup'}, {'X': 'CWPermission'}, {'X': 'CWProperty'}, {'X': 'CWRType'}, {'X': 'CWRelation'}, + {'X': 'CWUniqueTogetherConstraint'}, {'X': 'Comment'}, {'X': 'Division'}, {'X': 'Email'}, {'X': 'EmailAddress'}, {'X': 'EmailPart'}, {'X': 'EmailThread'}, {'X': 'ExternalUri'}, {'X': 'File'}, - {'X': 'Folder'}, {'X': 'Image'}, + {'X': 'Folder'}, {'X': 'Personne'}, {'X': 'RQLExpression'}, {'X': 'Societe'}, {'X': 'SubDivision'}, {'X': 'SubWorkflowExitPoint'}, {'X': 'Tag'}, @@ -949,19 +956,21 @@ [self.system], {'X': 'table3.C0'}, {'ET': 'table0.C0', 'X': 'table0.C1'}, []), # extra UnionFetchStep could be avoided but has no cost, so don't care ('UnionFetchStep', - [('FetchStep', [('Any ET,X WHERE X is ET, ET is CWEType, X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Image, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)', + [('FetchStep', [('Any ET,X WHERE X is ET, ET is CWEType, X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWUniqueTogetherConstraint, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)', [{'X': 'BaseTransition', 'ET': 'CWEType'}, {'X': 'Bookmark', 'ET': 'CWEType'}, {'X': 'CWAttribute', 'ET': 'CWEType'}, {'X': 'CWCache', 'ET': 'CWEType'}, {'X': 'CWConstraint', 'ET': 'CWEType'}, {'X': 'CWConstraintType', 'ET': 'CWEType'}, {'X': 'CWEType', 'ET': 'CWEType'}, {'X': 'CWGroup', 'ET': 'CWEType'}, {'X': 'CWPermission', 'ET': 'CWEType'}, {'X': 'CWProperty', 'ET': 'CWEType'}, {'X': 'CWRType', 'ET': 'CWEType'}, - {'X': 'CWRelation', 'ET': 'CWEType'}, {'X': 'Comment', 'ET': 'CWEType'}, + {'X': 'CWRelation', 'ET': 'CWEType'}, + {'X': 'CWUniqueTogetherConstraint', 'ET': 'CWEType'}, + {'X': 'Comment', 'ET': 'CWEType'}, {'X': 'Division', 'ET': 'CWEType'}, {'X': 'Email', 'ET': 'CWEType'}, {'X': 'EmailAddress', 'ET': 'CWEType'}, {'X': 'EmailPart', 'ET': 'CWEType'}, {'X': 'EmailThread', 'ET': 'CWEType'}, {'X': 'ExternalUri', 'ET': 'CWEType'}, {'X': 'File', 'ET': 'CWEType'}, {'X': 'Folder', 'ET': 'CWEType'}, - {'X': 'Image', 'ET': 'CWEType'}, {'X': 'Personne', 'ET': 'CWEType'}, + {'X': 'Personne', 'ET': 'CWEType'}, {'X': 'RQLExpression', 'ET': 'CWEType'}, {'X': 'Societe', 'ET': 'CWEType'}, {'X': 'SubDivision', 'ET': 'CWEType'}, {'X': 'SubWorkflowExitPoint', 'ET': 'CWEType'}, {'X': 'Tag', 'ET': 'CWEType'}, {'X': 'TrInfo', 'ET': 'CWEType'}, @@ -1026,7 +1035,7 @@ [self.cards, self.system], None, {'X': 'table1.C0', 'X.title': 'table1.C1', 'XT': 'table1.C1'}, []), ('OneFetchStep', [('Any X,XT,U WHERE X owned_by U?, X title XT, X is Card', - [{'X': 'Card', 'XT': 'String'}])], + [{'X': 'Card', 'U': 'CWUser', 'XT': 'String'}])], None, None, [self.system], {'L': 'table0.C1', 'U': 'table0.C0', 'X': 'table1.C0', @@ -1299,9 +1308,66 @@ ]), ]) + def test_has_text_orderby_rank(self): + self._test('Any X ORDERBY FTIRANK(X) WHERE X has_text "bla", X firstname "bla"', + [('FetchStep', [('Any X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])], + [self.ldap, self.system], None, {'X': 'table0.C0'}, []), + ('AggrStep', 'SELECT table1.C1 FROM table1 ORDER BY table1.C0', None, [ + ('FetchStep', [('Any FTIRANK(X),X WHERE X has_text "bla", X is CWUser', + [{'X': 'CWUser'}])], + [self.system], {'X': 'table0.C0'}, {'FTIRANK(X)': 'table1.C0', 'X': 'table1.C1'}, []), + ('FetchStep', [('Any FTIRANK(X),X WHERE X has_text "bla", X firstname "bla", X is Personne', + [{'X': 'Personne'}])], + [self.system], {}, {'FTIRANK(X)': 'table1.C0', 'X': 'table1.C1'}, []), + ]), + ]) + + def test_security_has_text_orderby_rank(self): + # use a guest user + self.session = self.user_groups_session('guests') + self._test('Any X ORDERBY FTIRANK(X) WHERE X has_text "bla", X firstname "bla"', + [('FetchStep', [('Any X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])], + [self.ldap, self.system], None, {'X': 'table1.C0'}, []), + ('UnionFetchStep', + [('FetchStep', [('Any X WHERE X firstname "bla", X is Personne', [{'X': 'Personne'}])], + [self.system], {}, {'X': 'table0.C0'}, []), + ('FetchStep', [('Any X WHERE EXISTS(X owned_by 5), X is CWUser', [{'X': 'CWUser'}])], + [self.system], {'X': 'table1.C0'}, {'X': 'table0.C0'}, [])]), + ('OneFetchStep', [('Any X ORDERBY FTIRANK(X) WHERE X has_text "bla"', + [{'X': 'CWUser'}, {'X': 'Personne'}])], + None, None, [self.system], {'X': 'table0.C0'}, []), + ]) + + def test_has_text_select_rank(self): + self._test('Any X, FTIRANK(X) WHERE X has_text "bla", X firstname "bla"', + # XXX unecessary duplicate selection + [('FetchStep', [('Any X,X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])], + [self.ldap, self.system], None, {'X': 'table0.C1'}, []), + ('UnionStep', None, None, [ + ('OneFetchStep', [('Any X,FTIRANK(X) WHERE X has_text "bla", X is CWUser', [{'X': 'CWUser'}])], + None, None, [self.system], {'X': 'table0.C1'}, []), + ('OneFetchStep', [('Any X,FTIRANK(X) WHERE X has_text "bla", X firstname "bla", X is Personne', [{'X': 'Personne'}])], + None, None, [self.system], {}, []), + ]), + ]) + + def test_security_has_text_select_rank(self): + # use a guest user + self.session = self.user_groups_session('guests') + self._test('Any X, FTIRANK(X) WHERE X has_text "bla", X firstname "bla"', + [('FetchStep', [('Any X,X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])], + [self.ldap, self.system], None, {'X': 'table0.C1'}, []), + ('UnionStep', None, None, [ + ('OneFetchStep', [('Any X,FTIRANK(X) WHERE X has_text "bla", EXISTS(X owned_by 5), X is CWUser', [{'X': 'CWUser'}])], + None, None, [self.system], {'X': 'table0.C1'}, []), + ('OneFetchStep', [('Any X,FTIRANK(X) WHERE X has_text "bla", X firstname "bla", X is Personne', [{'X': 'Personne'}])], + None, None, [self.system], {}, []), + ]), + ]) + def test_sort_func(self): self._test('Note X ORDERBY DUMB_SORT(RF) WHERE X type RF', - [('AggrStep', 'Any X ORDERBY DUMB_SORT(RF)', None, None, 'table0', None, [ + [('AggrStep', 'SELECT table0.C0 FROM table0 ORDER BY DUMB_SORT(table0.C1)', None, [ ('FetchStep', [('Any X,RF WHERE X type RF, X is Note', [{'X': 'Note', 'RF': 'String'}])], [self.cards, self.system], {}, {'X': 'table0.C0', 'X.type': 'table0.C1', 'RF': 'table0.C1'}, []), @@ -1310,8 +1376,7 @@ def test_ambigous_sort_func(self): self._test('Any X ORDERBY DUMB_SORT(RF) WHERE X title RF, X is IN (Bookmark, Card, EmailThread)', - [('AggrStep', 'Any X ORDERBY DUMB_SORT(RF)', - None, None, 'table0', None, + [('AggrStep', 'SELECT table0.C0 FROM table0 ORDER BY DUMB_SORT(table0.C1)', None, [('FetchStep', [('Any X,RF WHERE X title RF, X is Card', [{'X': 'Card', 'RF': 'String'}])], [self.cards, self.system], {}, @@ -1380,7 +1445,7 @@ ('FetchStep', [('Any B,C WHERE B login C, B is CWUser', [{'B': 'CWUser', 'C': 'String'}])], [self.ldap, self.system], None, {'B': 'table1.C0', 'B.login': 'table1.C1', 'C': 'table1.C1'}, []), - ('OneFetchStep', [('DISTINCT Any B,C ORDERBY C WHERE A created_by B, B login C, EXISTS(B owned_by 5), B is CWUser', + ('OneFetchStep', [('DISTINCT Any B,C ORDERBY C WHERE A created_by B, B login C, EXISTS(B owned_by 5), B is CWUser, A is IN(Bookmark, Tag)', [{'A': 'Bookmark', 'B': 'CWUser', 'C': 'String'}, {'A': 'Tag', 'B': 'CWUser', 'C': 'String'}])], None, None, [self.system], @@ -1414,7 +1479,7 @@ ('FetchStep', [('Any B,C WHERE B login C, B is CWUser', [{'B': 'CWUser', 'C': 'String'}])], [self.ldap, self.system], None, {'B': 'table1.C0', 'B.login': 'table1.C1', 'C': 'table1.C1'}, []), - ('OneFetchStep', [('DISTINCT Any B,C ORDERBY C WHERE A created_by B, B login C, EXISTS(B owned_by 5), B is CWUser', + ('OneFetchStep', [('DISTINCT Any B,C ORDERBY C WHERE A created_by B, B login C, EXISTS(B owned_by 5), B is CWUser, A is IN(Card, Tag)', [{'A': 'Card', 'B': 'CWUser', 'C': 'String'}, {'A': 'Tag', 'B': 'CWUser', 'C': 'String'}])], None, None, [self.system], @@ -1480,20 +1545,11 @@ def test_crossed_relation_eid_2_needattr(self): repo._type_source_cache[999999] = ('Note', 'cards', 999999) self._test('Any Y,T WHERE X eid %(x)s, X multisource_crossed_rel Y, Y type T', - [('FetchStep', [('Any Y,T WHERE Y type T, Y is Note', [{'T': 'String', 'Y': 'Note'}])], - [self.cards, self.system], None, - {'T': 'table0.C1', 'Y': 'table0.C0', 'Y.type': 'table0.C1'}, []), - ('UnionStep', None, None, - [('OneFetchStep', [('Any Y,T WHERE 999999 multisource_crossed_rel Y, Y type T, Y is Note', - [{'T': 'String', 'Y': 'Note'}])], - None, None, [self.cards], None, - []), - ('OneFetchStep', [('Any Y,T WHERE 999999 multisource_crossed_rel Y, Y type T, Y is Note', - [{'T': 'String', 'Y': 'Note'}])], - None, None, [self.system], - {'T': 'table0.C1', 'Y': 'table0.C0', 'Y.type': 'table0.C1'}, - [])] - )], + [('OneFetchStep', [('Any Y,T WHERE 999999 multisource_crossed_rel Y, Y type T, Y is Note', + [{'T': 'String', 'Y': 'Note'}])], + None, None, [self.cards, self.system], {}, + []), + ], {'x': 999999,}) def test_crossed_relation_eid_not_1(self): @@ -1701,6 +1757,54 @@ # ]), # ]) + def test_ldap_user_related_to_invariant_and_dont_cross_rel(self): + self.repo._type_source_cache[999999] = ('Note', 'cards', 999999) + self.cards.dont_cross_relations.add('created_by') + try: + self._test('Any X,XL WHERE E eid %(x)s, E created_by X, X login XL', + [('FetchStep', [('Any X,XL WHERE X login XL, X is CWUser', + [{'X': 'CWUser', 'XL': 'String'}])], + [self.ldap, self.system], None, + {'X': 'table0.C0', 'X.login': 'table0.C1', 'XL': 'table0.C1'}, + []), + ('OneFetchStep', + [('Any X,XL WHERE 999999 created_by X, X login XL, X is CWUser', + [{'X': 'CWUser', 'XL': 'String'}])], + None, None, + [self.system], + {'X': 'table0.C0', 'X.login': 'table0.C1', 'XL': 'table0.C1'}, + [])], + {'x': 999999}) + finally: + self.cards.dont_cross_relations.remove('created_by') + + def test_ambigous_cross_relation(self): + self.repo._type_source_cache[999999] = ('Note', 'cards', 999999) + self.cards.support_relations['see_also'] = True + self.cards.cross_relations.add('see_also') + try: + self._test('Any X,AA ORDERBY AA WHERE E eid %(x)s, E see_also X, X modification_date AA', + [('AggrStep', + 'SELECT table0.C0, table0.C1 FROM table0 ORDER BY table0.C1', + None, + [('FetchStep', + [('Any X,AA WHERE 999999 see_also X, X modification_date AA, X is Note', + [{'AA': 'Datetime', 'X': 'Note'}])], [self.cards, self.system], {}, + {'AA': 'table0.C1', 'X': 'table0.C0', + 'X.modification_date': 'table0.C1'}, + []), + ('FetchStep', + [('Any X,AA WHERE 999999 see_also X, X modification_date AA, X is Bookmark', + [{'AA': 'Datetime', 'X': 'Bookmark'}])], + [self.system], {}, + {'AA': 'table0.C1', 'X': 'table0.C0', + 'X.modification_date': 'table0.C1'}, + [])])], + {'x': 999999}) + finally: + del self.cards.support_relations['see_also'] + self.cards.cross_relations.remove('see_also') + # non regression tests #################################################### def test_nonregr1(self): @@ -1718,8 +1822,9 @@ ]) def test_nonregr2(self): - self.session.user.fire_transition('deactivate') - treid = self.session.user.latest_trinfo().eid + iworkflowable = self.session.user.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') + treid = iworkflowable.latest_trinfo().eid self._test('Any X ORDERBY D DESC WHERE E eid %(x)s, E wf_info_for X, X modification_date D', [('FetchStep', [('Any X,D WHERE X modification_date D, X is Note', [{'X': 'Note', 'D': 'Datetime'}])], @@ -1727,7 +1832,7 @@ ('FetchStep', [('Any X,D WHERE X modification_date D, X is CWUser', [{'X': 'CWUser', 'D': 'Datetime'}])], [self.ldap, self.system], None, {'X': 'table1.C0', 'X.modification_date': 'table1.C1', 'D': 'table1.C1'}, []), - ('AggrStep', 'Any X ORDERBY D DESC', None, None, 'table2', None, [ + ('AggrStep', 'SELECT table2.C0 FROM table2 ORDER BY table2.C1 DESC', None, [ ('FetchStep', [('Any X,D WHERE E eid %s, E wf_info_for X, X modification_date D, E is TrInfo, X is Affaire'%treid, [{'X': 'Affaire', 'E': 'TrInfo', 'D': 'Datetime'}])], [self.system], @@ -1816,11 +1921,16 @@ def test_nonregr8(self): repo._type_source_cache[999999] = ('Note', 'cards', 999999) self._test('Any X,Z WHERE X eid %(x)s, X multisource_rel Y, Z concerne X', - [('FetchStep', [('Any WHERE 999999 multisource_rel Y, Y is Note', [{'Y': 'Note'}])], - [self.cards], None, {}, []), + [('FetchStep', [('Any 999999 WHERE 999999 multisource_rel Y, Y is Note', + [{'Y': 'Note'}])], + [self.cards], + None, {u'%(x)s': 'table0.C0'}, + []), ('OneFetchStep', [('Any 999999,Z WHERE Z concerne 999999, Z is Affaire', [{'Z': 'Affaire'}])], - None, None, [self.system], {}, [])], + None, None, [self.system], + {u'%(x)s': 'table0.C0'}, []), + ], {'x': 999999}) def test_nonregr9(self): @@ -1870,8 +1980,7 @@ [{'X': 'Note', 'Z': 'Datetime'}])], [self.cards, self.system], None, {'X': 'table0.C0', 'X.modification_date': 'table0.C1', 'Z': 'table0.C1'}, []), - ('AggrStep', 'Any X ORDERBY Z DESC', - None, None, 'table1', None, + ('AggrStep', 'SELECT table1.C0 FROM table1 ORDER BY table1.C1 DESC', None, [('FetchStep', [('Any X,Z WHERE X modification_date Z, 999999 see_also X, X is Bookmark', [{'X': 'Bookmark', 'Z': 'Datetime'}])], [self.system], {}, {'X': 'table1.C0', 'X.modification_date': 'table1.C1', @@ -1923,7 +2032,7 @@ # identity relation. BUT I think it's better to leave it as is and to # explain constraint propagation rules, and so why this should be # wrapped in exists() if used in multi-source - self.skip('take a look at me if you wish') + self.skipTest('take a look at me if you wish') self._test('Any B,U,UL GROUPBY B,U,UL WHERE B created_by U?, B is File ' 'WITH U,UL BEING (Any U,UL WHERE ME eid %(x)s, (U identity ME ' 'OR (EXISTS(U in_group G, G name IN("managers", "staff")))) ' @@ -2140,14 +2249,67 @@ {'x': 999999}) + def test_nonregr_not_is(self): + self._test("Any X WHERE X owned_by U, U login 'anon', NOT X is Comment", + [('FetchStep', [('Any X WHERE X is IN(Card, Note, State)', + [{'X': 'Note'}, {'X': 'State'}, {'X': 'Card'}])], + [self.cards, self.cards2, self.system], + None, {'X': 'table0.C0'}, []), + ('UnionStep', None, None, + [('OneFetchStep', + [(u'Any X WHERE X owned_by U, U login "anon", U is CWUser, X is IN(Affaire, BaseTransition, Basket, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWUniqueTogetherConstraint, CWUser, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)', + [{'U': 'CWUser', 'X': 'Affaire'}, + {'U': 'CWUser', 'X': 'BaseTransition'}, + {'U': 'CWUser', 'X': 'Basket'}, + {'U': 'CWUser', 'X': 'Bookmark'}, + {'U': 'CWUser', 'X': 'CWAttribute'}, + {'U': 'CWUser', 'X': 'CWCache'}, + {'U': 'CWUser', 'X': 'CWConstraint'}, + {'U': 'CWUser', 'X': 'CWConstraintType'}, + {'U': 'CWUser', 'X': 'CWEType'}, + {'U': 'CWUser', 'X': 'CWGroup'}, + {'U': 'CWUser', 'X': 'CWPermission'}, + {'U': 'CWUser', 'X': 'CWProperty'}, + {'U': 'CWUser', 'X': 'CWRType'}, + {'U': 'CWUser', 'X': 'CWRelation'}, + {'U': 'CWUser', 'X': 'CWUniqueTogetherConstraint'}, + {'U': 'CWUser', 'X': 'CWUser'}, + {'U': 'CWUser', 'X': 'Division'}, + {'U': 'CWUser', 'X': 'Email'}, + {'U': 'CWUser', 'X': 'EmailAddress'}, + {'U': 'CWUser', 'X': 'EmailPart'}, + {'U': 'CWUser', 'X': 'EmailThread'}, + {'U': 'CWUser', 'X': 'ExternalUri'}, + {'U': 'CWUser', 'X': 'File'}, + {'U': 'CWUser', 'X': 'Folder'}, + {'U': 'CWUser', 'X': 'Personne'}, + {'U': 'CWUser', 'X': 'RQLExpression'}, + {'U': 'CWUser', 'X': 'Societe'}, + {'U': 'CWUser', 'X': 'SubDivision'}, + {'U': 'CWUser', 'X': 'SubWorkflowExitPoint'}, + {'U': 'CWUser', 'X': 'Tag'}, + {'U': 'CWUser', 'X': 'TrInfo'}, + {'U': 'CWUser', 'X': 'Transition'}, + {'U': 'CWUser', 'X': 'Workflow'}, + {'U': 'CWUser', 'X': 'WorkflowTransition'}])], + None, None, + [self.system], {}, []), + ('OneFetchStep', + [(u'Any X WHERE X owned_by U, U login "anon", U is CWUser, X is IN(Card, Note, State)', + [{'U': 'CWUser', 'X': 'Note'}, + {'U': 'CWUser', 'X': 'State'}, + {'U': 'CWUser', 'X': 'Card'}])], + None, None, + [self.system], {'X': 'table0.C0'}, []) + ]) + ]) + class FakeVCSSource(AbstractSource): uri = 'ccc' support_entities = {'Card': True, 'Note': True} support_relations = {'multisource_inlined_rel': True, 'multisource_rel': True} - #dont_cross_relations = set(('fiche', 'in_state')) - #cross_relations = set(('multisource_crossed_rel',)) def syntax_tree_search(self, *args, **kwargs): return [] diff -r f4d1d5d9ccbb -r 90f2f20367bc server/test/unittest_multisources.py --- a/server/test/unittest_multisources.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/test/unittest_multisources.py Wed Nov 03 16:38:28 2010 +0100 @@ -101,39 +101,41 @@ def test_eid_comp(self): rset = self.sexecute('Card X WHERE X eid > 1') - self.assertEquals(len(rset), 4) + self.assertEqual(len(rset), 4) rset = self.sexecute('Any X,T WHERE X title T, X eid > 1') - self.assertEquals(len(rset), 4) + self.assertEqual(len(rset), 4) def test_metainformation(self): rset = self.sexecute('Card X ORDERBY T WHERE X title T') # 2 added to the system source, 2 added to the external source - self.assertEquals(len(rset), 4) + self.assertEqual(len(rset), 4) # since they are orderd by eid, we know the 3 first one is coming from the system source # and the others from external source - self.assertEquals(rset.get_entity(0, 0).metainformation(), + self.assertEqual(rset.get_entity(0, 0).cw_metainformation(), {'source': {'adapter': 'native', 'uri': 'system'}, 'type': u'Card', 'extid': None}) externent = rset.get_entity(3, 0) - metainf = externent.metainformation() - self.assertEquals(metainf['source'], {'adapter': 'pyrorql', 'base-url': 'http://extern.org/', 'uri': 'extern'}) - self.assertEquals(metainf['type'], 'Card') + metainf = externent.cw_metainformation() + self.assertEqual(metainf['source'], {'adapter': 'pyrorql', 'base-url': 'http://extern.org/', 'uri': 'extern'}) + self.assertEqual(metainf['type'], 'Card') self.assert_(metainf['extid']) etype = self.sexecute('Any ETN WHERE X is ET, ET name ETN, X eid %(x)s', {'x': externent.eid})[0][0] - self.assertEquals(etype, 'Card') + self.assertEqual(etype, 'Card') def test_order_limit_offset(self): rsetbase = self.sexecute('Any W,X ORDERBY W,X WHERE X wikiid W') - self.assertEquals(len(rsetbase), 4) - self.assertEquals(sorted(rsetbase.rows), rsetbase.rows) + self.assertEqual(len(rsetbase), 4) + self.assertEqual(sorted(rsetbase.rows), rsetbase.rows) rset = self.sexecute('Any W,X ORDERBY W,X LIMIT 2 OFFSET 2 WHERE X wikiid W') - self.assertEquals(rset.rows, rsetbase.rows[2:4]) + self.assertEqual(rset.rows, rsetbase.rows[2:4]) def test_has_text(self): self.repo.sources_by_uri['extern'].synchronize(MTIME) # in case fti_update has been run before self.failUnless(self.sexecute('Any X WHERE X has_text "affref"')) self.failUnless(self.sexecute('Affaire X WHERE X has_text "affref"')) + self.failUnless(self.sexecute('Any X ORDERBY FTIRANK(X) WHERE X has_text "affref"')) + self.failUnless(self.sexecute('Affaire X ORDERBY FTIRANK(X) WHERE X has_text "affref"')) def test_anon_has_text(self): self.repo.sources_by_uri['extern'].synchronize(MTIME) # in case fti_update has been run before @@ -145,8 +147,11 @@ cnx = self.login('anon') cu = cnx.cursor() rset = cu.execute('Any X WHERE X has_text "card"') - self.assertEquals(len(rset), 5, zip(rset.rows, rset.description)) - Connection_close(cnx) + # 5: 4 card + 1 readable affaire + self.assertEqual(len(rset), 5, zip(rset.rows, rset.description)) + rset = cu.execute('Any X ORDERBY FTIRANK(X) WHERE X has_text "card"') + self.assertEqual(len(rset), 5, zip(rset.rows, rset.description)) + Connection_close(cnx.cnx) # cnx is a TestCaseConnectionProxy def test_synchronization(self): cu = cnx2.cursor() @@ -173,14 +178,14 @@ affeid = self.sexecute('Affaire X WHERE X ref "AFFREF"')[0][0] rset = self.sexecute('Any X,AA,AB WHERE E eid %(x)s, E in_state X, X name AA, X modification_date AB', {'x': affeid}) - self.assertEquals(len(rset), 1) - self.assertEquals(rset[0][1], "pitetre") + self.assertEqual(len(rset), 1) + self.assertEqual(rset[0][1], "pitetre") def test_simplifiable_var_2(self): affeid = self.sexecute('Affaire X WHERE X ref "AFFREF"')[0][0] rset = self.sexecute('Any E WHERE E eid %(x)s, E in_state S, NOT S name "moved"', {'x': affeid, 'u': self.session.user.eid}) - self.assertEquals(len(rset), 1) + self.assertEqual(len(rset), 1) def test_sort_func(self): self.sexecute('Affaire X ORDERBY DUMB_SORT(RF) WHERE X ref RF') @@ -192,31 +197,31 @@ iec1 = self.repo.extid2eid(self.repo.sources_by_uri['extern'], str(self.ec1), 'Card', self.session) rset = self.sexecute('Any X WHERE X eid IN (%s, %s)' % (iec1, self.ic1)) - self.assertEquals(sorted(r[0] for r in rset.rows), sorted([iec1, self.ic1])) + self.assertEqual(sorted(r[0] for r in rset.rows), sorted([iec1, self.ic1])) def test_greater_eid(self): rset = self.sexecute('Any X WHERE X eid > %s' % (self.ic1 - 1)) - self.assertEquals(len(rset.rows), 2) # self.ic1 and self.ic2 + self.assertEqual(len(rset.rows), 2) # self.ic1 and self.ic2 cu = cnx2.cursor() ec2 = cu.execute('INSERT Card X: X title "glup"')[0][0] cnx2.commit() # 'X eid > something' should not trigger discovery rset = self.sexecute('Any X WHERE X eid > %s' % (self.ic1 - 1)) - self.assertEquals(len(rset.rows), 2) + self.assertEqual(len(rset.rows), 2) # trigger discovery using another query crset = self.sexecute('Card X WHERE X title "glup"') - self.assertEquals(len(crset.rows), 1) + self.assertEqual(len(crset.rows), 1) rset = self.sexecute('Any X WHERE X eid > %s' % (self.ic1 - 1)) - self.assertEquals(len(rset.rows), 3) + self.assertEqual(len(rset.rows), 3) rset = self.sexecute('Any MAX(X)') - self.assertEquals(len(rset.rows), 1) - self.assertEquals(rset.rows[0][0], crset[0][0]) + self.assertEqual(len(rset.rows), 1) + self.assertEqual(rset.rows[0][0], crset[0][0]) def test_attr_unification_1(self): n1 = self.sexecute('INSERT Note X: X type "AFFREF"')[0][0] n2 = self.sexecute('INSERT Note X: X type "AFFREU"')[0][0] rset = self.sexecute('Any X,Y WHERE X is Note, Y is Affaire, X type T, Y ref T') - self.assertEquals(len(rset), 1, rset.rows) + self.assertEqual(len(rset), 1, rset.rows) def test_attr_unification_2(self): cu = cnx2.cursor() @@ -225,7 +230,7 @@ try: c1 = self.sexecute('INSERT Card C: C title "AFFREF"')[0][0] rset = self.sexecute('Any X,Y WHERE X is Card, Y is Affaire, X title T, Y ref T') - self.assertEquals(len(rset), 2, rset.rows) + self.assertEqual(len(rset), 2, rset.rows) finally: cu.execute('DELETE Card X WHERE X eid %(x)s', {'x': ec2}) cnx2.commit() @@ -242,26 +247,26 @@ afeids = self.sexecute('Affaire X') ueids = self.sexecute('CWUser X') rset = self.sexecute('(Any X WHERE X is Affaire) UNION (Any X WHERE X is CWUser)') - self.assertEquals(sorted(r[0] for r in rset.rows), + self.assertEqual(sorted(r[0] for r in rset.rows), sorted(r[0] for r in afeids + ueids)) def test_subquery1(self): rsetbase = self.sexecute('Any W,X WITH W,X BEING (Any W,X ORDERBY W,X WHERE X wikiid W)') - self.assertEquals(len(rsetbase), 4) - self.assertEquals(sorted(rsetbase.rows), rsetbase.rows) + self.assertEqual(len(rsetbase), 4) + self.assertEqual(sorted(rsetbase.rows), rsetbase.rows) rset = self.sexecute('Any W,X LIMIT 2 OFFSET 2 WITH W,X BEING (Any W,X ORDERBY W,X WHERE X wikiid W)') - self.assertEquals(rset.rows, rsetbase.rows[2:4]) + self.assertEqual(rset.rows, rsetbase.rows[2:4]) rset = self.sexecute('Any W,X ORDERBY W,X LIMIT 2 OFFSET 2 WITH W,X BEING (Any W,X WHERE X wikiid W)') - self.assertEquals(rset.rows, rsetbase.rows[2:4]) + self.assertEqual(rset.rows, rsetbase.rows[2:4]) rset = self.sexecute('Any W,X WITH W,X BEING (Any W,X ORDERBY W,X LIMIT 2 OFFSET 2 WHERE X wikiid W)') - self.assertEquals(rset.rows, rsetbase.rows[2:4]) + self.assertEqual(rset.rows, rsetbase.rows[2:4]) def test_subquery2(self): affeid = self.sexecute('Affaire X WHERE X ref "AFFREF"')[0][0] rset = self.sexecute('Any X,AA,AB WITH X,AA,AB BEING (Any X,AA,AB WHERE E eid %(x)s, E in_state X, X name AA, X modification_date AB)', {'x': affeid}) - self.assertEquals(len(rset), 1) - self.assertEquals(rset[0][1], "pitetre") + self.assertEqual(len(rset), 1) + self.assertEqual(rset[0][1], "pitetre") def test_not_relation(self): states = set(tuple(x) for x in self.sexecute('Any S,SN WHERE S is State, S name SN')) @@ -270,22 +275,22 @@ states.remove((userstate.eid, userstate.name)) notstates = set(tuple(x) for x in self.sexecute('Any S,SN WHERE S is State, S name SN, NOT X in_state S, X eid %(x)s', {'x': self.session.user.eid})) - self.assertSetEquals(notstates, states) + self.assertSetEqual(notstates, states) aff1 = self.sexecute('Any X WHERE X is Affaire, X ref "AFFREF"')[0][0] aff1stateeid, aff1statename = self.sexecute('Any S,SN WHERE X eid %(x)s, X in_state S, S name SN', {'x': aff1})[0] - self.assertEquals(aff1statename, 'pitetre') + self.assertEqual(aff1statename, 'pitetre') states.add((userstate.eid, userstate.name)) states.remove((aff1stateeid, aff1statename)) notstates = set(tuple(x) for x in self.sexecute('Any S,SN WHERE S is State, S name SN, NOT X in_state S, X eid %(x)s', {'x': aff1})) - self.assertSetEquals(notstates, states) + self.assertSetEqual(notstates, states) def test_absolute_url_base_url(self): cu = cnx2.cursor() ceid = cu.execute('INSERT Card X: X title "without wikiid to get eid based url"')[0][0] cnx2.commit() lc = self.sexecute('Card X WHERE X title "without wikiid to get eid based url"').get_entity(0, 0) - self.assertEquals(lc.absolute_url(), 'http://extern.org/card/eid/%s' % ceid) + self.assertEqual(lc.absolute_url(), 'http://extern.org/card/eid/%s' % ceid) cu.execute('DELETE Card X WHERE X eid %(x)s', {'x':ceid}) cnx2.commit() @@ -294,7 +299,7 @@ ceid = cu.execute('INSERT Card X: X title "without wikiid to get eid based url"')[0][0] cnx3.commit() lc = self.sexecute('Card X WHERE X title "without wikiid to get eid based url"').get_entity(0, 0) - self.assertEquals(lc.absolute_url(), 'http://testing.fr/cubicweb/card/eid/%s' % lc.eid) + self.assertEqual(lc.absolute_url(), 'http://testing.fr/cubicweb/card/eid/%s' % lc.eid) cu.execute('DELETE Card X WHERE X eid %(x)s', {'x':ceid}) cnx3.commit() @@ -305,12 +310,13 @@ {'x': affaire.eid, 'u': ueid}) def test_nonregr2(self): - self.session.user.fire_transition('deactivate') - treid = self.session.user.latest_trinfo().eid + iworkflowable = self.session.user.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') + treid = iworkflowable.latest_trinfo().eid rset = self.sexecute('Any X ORDERBY D DESC WHERE E eid %(x)s, E wf_info_for X, X modification_date D', {'x': treid}) - self.assertEquals(len(rset), 1) - self.assertEquals(rset.rows[0], [self.session.user.eid]) + self.assertEqual(len(rset), 1) + self.assertEqual(rset.rows[0], [self.session.user.eid]) def test_nonregr3(self): self.sexecute('DELETE Card X WHERE X eid %(x)s, NOT X multisource_inlined_rel Y', {'x': self.ic1}) diff -r f4d1d5d9ccbb -r 90f2f20367bc server/test/unittest_querier.py --- a/server/test/unittest_querier.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/test/unittest_querier.py Wed Nov 03 16:38:28 2010 +0100 @@ -16,7 +16,7 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""unit tests for modules cubicweb.server.querier and cubicweb.server.querier_steps +"""unit tests for modules cubicweb.server.querier and cubicweb.server.ssplanner """ from datetime import date, datetime @@ -57,7 +57,7 @@ class MakeSchemaTC(TestCase): def test_known_values(self): solution = {'A': 'String', 'B': 'CWUser'} - self.assertEquals(make_schema((Variable('A'), Variable('B')), solution, + self.assertEqual(make_schema((Variable('A'), Variable('B')), solution, 'table0', TYPEMAP), ('C0 text,C1 integer', {'A': 'table0.C0', 'B': 'table0.C1'})) @@ -84,7 +84,7 @@ def test_preprocess_1(self): reid = self.execute('Any X WHERE X is CWRType, X name "owned_by"')[0][0] rqlst = self._prepare('Any COUNT(RDEF) WHERE RDEF relation_type X, X eid %(x)s', {'x': reid}) - self.assertEquals(rqlst.solutions, [{'RDEF': 'CWAttribute'}, {'RDEF': 'CWRelation'}]) + self.assertEqual(rqlst.solutions, [{'RDEF': 'CWAttribute'}, {'RDEF': 'CWRelation'}]) def test_preprocess_2(self): teid = self.execute("INSERT Tag X: X name 'tag'")[0][0] @@ -94,7 +94,7 @@ rqlst = self._prepare('Any X WHERE E eid %(x)s, E tags X', {'x': teid}) # the query may be optimized, should keep only one solution # (any one, etype will be discarded) - self.assertEquals(len(rqlst.solutions), 1) + self.assertEqual(len(rqlst.solutions), 1) def test_preprocess_security(self): plan = self._prepare_plan('Any ETN,COUNT(X) GROUPBY ETN ' @@ -102,24 +102,24 @@ plan.session = self.user_groups_session('users') union = plan.rqlst plan.preprocess(union) - self.assertEquals(len(union.children), 1) - self.assertEquals(len(union.children[0].with_), 1) + self.assertEqual(len(union.children), 1) + self.assertEqual(len(union.children[0].with_), 1) subq = union.children[0].with_[0].query - self.assertEquals(len(subq.children), 3) - self.assertEquals([t.as_string() for t in union.children[0].selection], + self.assertEqual(len(subq.children), 3) + self.assertEqual([t.as_string() for t in union.children[0].selection], ['ETN','COUNT(X)']) - self.assertEquals([t.as_string() for t in union.children[0].groupby], + self.assertEqual([t.as_string() for t in union.children[0].groupby], ['ETN']) partrqls = sorted(((rqlst.as_string(), rqlst.solutions) for rqlst in subq.children)) rql, solutions = partrqls[0] - self.assertEquals(rql, + self.assertEqual(rql, 'Any ETN,X WHERE X is ET, ET name ETN, (EXISTS(X owned_by %(B)s))' ' OR ((((EXISTS(D concerne C?, C owned_by %(B)s, X identity D, C is Division, D is Affaire))' ' OR (EXISTS(H concerne G?, G owned_by %(B)s, G is SubDivision, X identity H, H is Affaire)))' ' OR (EXISTS(I concerne F?, F owned_by %(B)s, F is Societe, X identity I, I is Affaire)))' ' OR (EXISTS(J concerne E?, E owned_by %(B)s, E is Note, X identity J, J is Affaire)))' ', ET is CWEType, X is Affaire') - self.assertEquals(solutions, [{'C': 'Division', + self.assertEqual(solutions, [{'C': 'Division', 'D': 'Affaire', 'E': 'Note', 'F': 'Societe', @@ -130,8 +130,8 @@ 'X': 'Affaire', 'ET': 'CWEType', 'ETN': 'String'}]) rql, solutions = partrqls[1] - self.assertEquals(rql, 'Any ETN,X WHERE X is ET, ET name ETN, ET is CWEType, X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWUser, Card, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Image, Note, Personne, RQLExpression, Societe, State, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)') - self.assertListEquals(sorted(solutions), + self.assertEqual(rql, 'Any ETN,X WHERE X is ET, ET name ETN, ET is CWEType, X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWUniqueTogetherConstraint, CWUser, Card, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Note, Personne, RQLExpression, Societe, State, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)') + self.assertListEqual(sorted(solutions), sorted([{'X': 'BaseTransition', 'ETN': 'String', 'ET': 'CWEType'}, {'X': 'Bookmark', 'ETN': 'String', 'ET': 'CWEType'}, {'X': 'Card', 'ETN': 'String', 'ET': 'CWEType'}, @@ -143,19 +143,19 @@ {'X': 'CWEType', 'ETN': 'String', 'ET': 'CWEType'}, {'X': 'CWAttribute', 'ETN': 'String', 'ET': 'CWEType'}, {'X': 'CWGroup', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWRelation', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWPermission', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWProperty', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWRType', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWUniqueTogetherConstraint', 'ETN': 'String', 'ET': 'CWEType'}, + {'X': 'CWUser', 'ETN': 'String', 'ET': 'CWEType'}, {'X': 'Email', 'ETN': 'String', 'ET': 'CWEType'}, {'X': 'EmailAddress', 'ETN': 'String', 'ET': 'CWEType'}, {'X': 'EmailPart', 'ETN': 'String', 'ET': 'CWEType'}, {'X': 'EmailThread', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWRelation', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWPermission', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWProperty', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWRType', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'CWUser', 'ETN': 'String', 'ET': 'CWEType'}, {'X': 'ExternalUri', 'ETN': 'String', 'ET': 'CWEType'}, {'X': 'File', 'ETN': 'String', 'ET': 'CWEType'}, {'X': 'Folder', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Image', 'ETN': 'String', 'ET': 'CWEType'}, {'X': 'Note', 'ETN': 'String', 'ET': 'CWEType'}, {'X': 'Personne', 'ETN': 'String', 'ET': 'CWEType'}, {'X': 'RQLExpression', 'ETN': 'String', 'ET': 'CWEType'}, @@ -169,10 +169,10 @@ {'X': 'Workflow', 'ETN': 'String', 'ET': 'CWEType'}, {'X': 'WorkflowTransition', 'ETN': 'String', 'ET': 'CWEType'}])) rql, solutions = partrqls[2] - self.assertEquals(rql, + self.assertEqual(rql, 'Any ETN,X WHERE X is ET, ET name ETN, EXISTS(X owned_by %(C)s), ' 'ET is CWEType, X is Basket') - self.assertEquals(solutions, [{'ET': 'CWEType', + self.assertEqual(solutions, [{'ET': 'CWEType', 'X': 'Basket', 'ETN': 'String', }]) @@ -182,45 +182,45 @@ plan.session = self.user_groups_session('users') union = plan.rqlst plan.preprocess(union) - self.assertEquals(len(union.children), 1) - self.assertEquals(len(union.children[0].with_), 1) + self.assertEqual(len(union.children), 1) + self.assertEqual(len(union.children[0].with_), 1) subq = union.children[0].with_[0].query - self.assertEquals(len(subq.children), 3) - self.assertEquals([t.as_string() for t in union.children[0].selection], + self.assertEqual(len(subq.children), 3) + self.assertEqual([t.as_string() for t in union.children[0].selection], ['MAX(X)']) def test_preprocess_nonregr(self): rqlst = self._prepare('Any S ORDERBY SI WHERE NOT S ecrit_par O, S para SI') - self.assertEquals(len(rqlst.solutions), 1) + self.assertEqual(len(rqlst.solutions), 1) def test_build_description(self): # should return an empty result set rset = self.execute('Any X WHERE X eid %(x)s', {'x': self.session.user.eid}) - self.assertEquals(rset.description[0][0], 'CWUser') + self.assertEqual(rset.description[0][0], 'CWUser') rset = self.execute('Any 1') - self.assertEquals(rset.description[0][0], 'Int') + self.assertEqual(rset.description[0][0], 'Int') rset = self.execute('Any TRUE') - self.assertEquals(rset.description[0][0], 'Boolean') + self.assertEqual(rset.description[0][0], 'Boolean') rset = self.execute('Any "hop"') - self.assertEquals(rset.description[0][0], 'String') + self.assertEqual(rset.description[0][0], 'String') rset = self.execute('Any TODAY') - self.assertEquals(rset.description[0][0], 'Date') + self.assertEqual(rset.description[0][0], 'Date') rset = self.execute('Any NOW') - self.assertEquals(rset.description[0][0], 'Datetime') + self.assertEqual(rset.description[0][0], 'Datetime') rset = self.execute('Any %(x)s', {'x': 1}) - self.assertEquals(rset.description[0][0], 'Int') + self.assertEqual(rset.description[0][0], 'Int') rset = self.execute('Any %(x)s', {'x': 1L}) - self.assertEquals(rset.description[0][0], 'Int') + self.assertEqual(rset.description[0][0], 'Int') rset = self.execute('Any %(x)s', {'x': True}) - self.assertEquals(rset.description[0][0], 'Boolean') + self.assertEqual(rset.description[0][0], 'Boolean') rset = self.execute('Any %(x)s', {'x': 1.0}) - self.assertEquals(rset.description[0][0], 'Float') + self.assertEqual(rset.description[0][0], 'Float') rset = self.execute('Any %(x)s', {'x': datetime.now()}) - self.assertEquals(rset.description[0][0], 'Datetime') + self.assertEqual(rset.description[0][0], 'Datetime') rset = self.execute('Any %(x)s', {'x': 'str'}) - self.assertEquals(rset.description[0][0], 'String') + self.assertEqual(rset.description[0][0], 'String') rset = self.execute('Any %(x)s', {'x': u'str'}) - self.assertEquals(rset.description[0][0], 'String') + self.assertEqual(rset.description[0][0], 'String') class QuerierTC(BaseQuerierTC): @@ -244,46 +244,46 @@ {'data': Binary("xxx")})[0][0] fdata = self.execute('Any D WHERE X data D, X eid %(x)s', {'x': feid})[0][0] self.assertIsInstance(fdata, Binary) - self.assertEquals(fdata.getvalue(), 'xxx') + self.assertEqual(fdata.getvalue(), 'xxx') # selection queries tests ################################################# def test_select_1(self): rset = self.execute('Any X ORDERBY X WHERE X is CWGroup') result, descr = rset.rows, rset.description - self.assertEquals(tuplify(result), [(1,), (2,), (3,), (4,)]) - self.assertEquals(descr, [('CWGroup',), ('CWGroup',), ('CWGroup',), ('CWGroup',)]) + self.assertEqual(tuplify(result), [(1,), (2,), (3,), (4,)]) + self.assertEqual(descr, [('CWGroup',), ('CWGroup',), ('CWGroup',), ('CWGroup',)]) def test_select_2(self): rset = self.execute('Any X ORDERBY N WHERE X is CWGroup, X name N') - self.assertEquals(tuplify(rset.rows), [(1,), (2,), (3,), (4,)]) - self.assertEquals(rset.description, [('CWGroup',), ('CWGroup',), ('CWGroup',), ('CWGroup',)]) + self.assertEqual(tuplify(rset.rows), [(1,), (2,), (3,), (4,)]) + self.assertEqual(rset.description, [('CWGroup',), ('CWGroup',), ('CWGroup',), ('CWGroup',)]) rset = self.execute('Any X ORDERBY N DESC WHERE X is CWGroup, X name N') - self.assertEquals(tuplify(rset.rows), [(4,), (3,), (2,), (1,)]) + self.assertEqual(tuplify(rset.rows), [(4,), (3,), (2,), (1,)]) def test_select_3(self): rset = self.execute('Any N GROUPBY N WHERE X is CWGroup, X name N') result, descr = rset.rows, rset.description result.sort() - self.assertEquals(tuplify(result), [('guests',), ('managers',), ('owners',), ('users',)]) - self.assertEquals(descr, [('String',), ('String',), ('String',), ('String',)]) + self.assertEqual(tuplify(result), [('guests',), ('managers',), ('owners',), ('users',)]) + self.assertEqual(descr, [('String',), ('String',), ('String',), ('String',)]) def test_select_is(self): rset = self.execute('Any X, TN ORDERBY TN LIMIT 10 WHERE X is T, T name TN') result, descr = rset.rows, rset.description - self.assertEquals(result[0][1], descr[0][0]) + self.assertEqual(result[0][1], descr[0][0]) def test_select_is_aggr(self): rset = self.execute('Any TN, COUNT(X) GROUPBY TN ORDERBY 2 DESC WHERE X is T, T name TN') result, descr = rset.rows, rset.description - self.assertEquals(descr[0][0], 'String') - self.assertEquals(descr[0][1], 'Int') - self.assertEquals(result[0][0], 'CWRelation') # XXX may change as schema evolve + self.assertEqual(descr[0][0], 'String') + self.assertEqual(descr[0][1], 'Int') + self.assertEqual(result[0][0], 'CWRelation') # XXX may change as schema evolve def test_select_groupby_orderby(self): rset = self.execute('Any N GROUPBY N ORDERBY N WHERE X is CWGroup, X name N') - self.assertEquals(tuplify(rset.rows), [('guests',), ('managers',), ('owners',), ('users',)]) - self.assertEquals(rset.description, [('String',), ('String',), ('String',), ('String',)]) + self.assertEqual(tuplify(rset.rows), [('guests',), ('managers',), ('owners',), ('users',)]) + self.assertEqual(rset.description, [('String',), ('String',), ('String',), ('String',)]) def test_select_complex_groupby(self): rset = self.execute('Any N GROUPBY N WHERE X name N') @@ -295,20 +295,20 @@ def test_select_complex_orderby(self): rset1 = self.execute('Any N ORDERBY N WHERE X name N') - self.assertEquals(sorted(rset1.rows), rset1.rows) + self.assertEqual(sorted(rset1.rows), rset1.rows) rset = self.execute('Any N ORDERBY N LIMIT 5 OFFSET 1 WHERE X name N') - self.assertEquals(rset.rows[0][0], rset1.rows[1][0]) - self.assertEquals(len(rset), 5) + self.assertEqual(rset.rows[0][0], rset1.rows[1][0]) + self.assertEqual(len(rset), 5) def test_select_5(self): rset = self.execute('Any X, TMP ORDERBY TMP WHERE X name TMP, X is CWGroup') - self.assertEquals(tuplify(rset.rows), [(1, 'guests',), (2, 'managers',), (3, 'owners',), (4, 'users',)]) - self.assertEquals(rset.description, [('CWGroup', 'String',), ('CWGroup', 'String',), ('CWGroup', 'String',), ('CWGroup', 'String',)]) + self.assertEqual(tuplify(rset.rows), [(1, 'guests',), (2, 'managers',), (3, 'owners',), (4, 'users',)]) + self.assertEqual(rset.description, [('CWGroup', 'String',), ('CWGroup', 'String',), ('CWGroup', 'String',), ('CWGroup', 'String',)]) def test_select_6(self): self.execute("INSERT Personne X: X nom 'bidule'")[0] rset = self.execute('Any Y where X name TMP, Y nom in (TMP, "bidule")') - #self.assertEquals(rset.description, [('Personne',), ('Personne',)]) + #self.assertEqual(rset.description, [('Personne',), ('Personne',)]) self.assert_(('Personne',) in rset.description) rset = self.execute('DISTINCT Any Y where X name TMP, Y nom in (TMP, "bidule")') self.assert_(('Personne',) in rset.description) @@ -317,17 +317,17 @@ peid = self.execute("INSERT Personne X: X nom 'bidule'")[0][0] seid = self.execute("INSERT Societe X: X nom 'chouette'")[0][0] rset = self.execute('Personne X WHERE NOT X nom "bidule"') - self.assertEquals(len(rset.rows), 0, rset.rows) + self.assertEqual(len(rset.rows), 0, rset.rows) rset = self.execute('Personne X WHERE NOT X nom "bid"') - self.assertEquals(len(rset.rows), 1, rset.rows) + self.assertEqual(len(rset.rows), 1, rset.rows) self.execute("SET P travaille S WHERE P nom 'bidule', S nom 'chouette'") rset = self.execute('Personne X WHERE NOT X travaille S') - self.assertEquals(len(rset.rows), 0, rset.rows) + self.assertEqual(len(rset.rows), 0, rset.rows) def test_select_is_in(self): self.execute("INSERT Personne X: X nom 'bidule'") self.execute("INSERT Societe X: X nom 'chouette'") - self.assertEquals(len(self.execute("Any X WHERE X is IN (Personne, Societe)")), + self.assertEqual(len(self.execute("Any X WHERE X is IN (Personne, Societe)")), 2) def test_select_not_rel(self): @@ -336,9 +336,9 @@ self.execute("INSERT Personne X: X nom 'autre'") self.execute("SET P travaille S WHERE P nom 'bidule', S nom 'chouette'") rset = self.execute('Personne X WHERE NOT X travaille S') - self.assertEquals(len(rset.rows), 1, rset.rows) + self.assertEqual(len(rset.rows), 1, rset.rows) rset = self.execute('Personne X WHERE NOT X travaille S, S nom "chouette"') - self.assertEquals(len(rset.rows), 1, rset.rows) + self.assertEqual(len(rset.rows), 1, rset.rows) def test_select_nonregr_inlined(self): self.execute("INSERT Note X: X para 'bidule'") @@ -347,15 +347,15 @@ self.execute("SET X ecrit_par P WHERE X para 'bidule', P nom 'chouette'") rset = self.execute('Any U,T ORDERBY T DESC WHERE U is CWUser, ' 'N ecrit_par U, N type T')#, {'x': self.ueid}) - self.assertEquals(len(rset.rows), 0) + self.assertEqual(len(rset.rows), 0) def test_select_nonregr_edition_not(self): groupeids = set((1, 2, 3)) groupreadperms = set(r[0] for r in self.execute('Any Y WHERE X name "CWGroup", Y eid IN(1, 2, 3), X read_permission Y')) rset = self.execute('DISTINCT Any Y WHERE X is CWEType, X name "CWGroup", Y eid IN(1, 2, 3), NOT X read_permission Y') - self.assertEquals(sorted(r[0] for r in rset.rows), sorted(groupeids - groupreadperms)) + self.assertEqual(sorted(r[0] for r in rset.rows), sorted(groupeids - groupreadperms)) rset = self.execute('DISTINCT Any Y WHERE X name "CWGroup", Y eid IN(1, 2, 3), NOT X read_permission Y') - self.assertEquals(sorted(r[0] for r in rset.rows), sorted(groupeids - groupreadperms)) + self.assertEqual(sorted(r[0] for r in rset.rows), sorted(groupeids - groupreadperms)) def test_select_outer_join(self): peid1 = self.execute("INSERT Personne X: X nom 'bidule'")[0][0] @@ -363,27 +363,27 @@ seid1 = self.execute("INSERT Societe X: X nom 'chouette'")[0][0] seid2 = self.execute("INSERT Societe X: X nom 'chouetos'")[0][0] rset = self.execute('Any X,S ORDERBY X WHERE X travaille S?') - self.assertEquals(rset.rows, [[peid1, None], [peid2, None]]) + self.assertEqual(rset.rows, [[peid1, None], [peid2, None]]) self.execute("SET P travaille S WHERE P nom 'bidule', S nom 'chouette'") rset = self.execute('Any X,S ORDERBY X WHERE X travaille S?') - self.assertEquals(rset.rows, [[peid1, seid1], [peid2, None]]) + self.assertEqual(rset.rows, [[peid1, seid1], [peid2, None]]) rset = self.execute('Any S,X ORDERBY S WHERE X? travaille S') - self.assertEquals(rset.rows, [[seid1, peid1], [seid2, None]]) + self.assertEqual(rset.rows, [[seid1, peid1], [seid2, None]]) def test_select_outer_join_optimized(self): peid1 = self.execute("INSERT Personne X: X nom 'bidule'")[0][0] rset = self.execute('Any X WHERE X eid %(x)s, P? connait X', {'x':peid1}) - self.assertEquals(rset.rows, [[peid1]]) + self.assertEqual(rset.rows, [[peid1]]) rset = self.execute('Any X WHERE X eid %(x)s, X require_permission P?', {'x':peid1}) - self.assertEquals(rset.rows, [[peid1]]) + self.assertEqual(rset.rows, [[peid1]]) def test_select_left_outer_join(self): rset = self.execute('DISTINCT Any G WHERE U? in_group G') - self.assertEquals(len(rset), 4) + self.assertEqual(len(rset), 4) rset = self.execute('DISTINCT Any G WHERE U? in_group G, U eid %(x)s', {'x': self.session.user.eid}) - self.assertEquals(len(rset), 4) + self.assertEqual(len(rset), 4) def test_select_ambigous_outer_join(self): teid = self.execute("INSERT Tag X: X name 'tag'")[0][0] @@ -395,7 +395,7 @@ self.failUnless(['users', 'tag'] in rset.rows) self.failUnless(['activated', None] in rset.rows) rset = self.execute("Any GN,TN ORDERBY GN WHERE T tags G?, T name TN, G name GN") - self.assertEquals(rset.rows, [[None, 'tagbis'], ['users', 'tag']]) + self.assertEqual(rset.rows, [[None, 'tagbis'], ['users', 'tag']]) def test_select_not_inline_rel(self): self.execute("INSERT Personne X: X nom 'bidule'") @@ -403,7 +403,7 @@ self.execute("INSERT Note X: X type 'b'") self.execute("SET X ecrit_par Y WHERE X type 'a', Y nom 'bidule'") rset = self.execute('Note X WHERE NOT X ecrit_par P') - self.assertEquals(len(rset.rows), 1, rset.rows) + self.assertEqual(len(rset.rows), 1, rset.rows) def test_select_not_unlinked_multiple_solutions(self): self.execute("INSERT Personne X: X nom 'bidule'") @@ -411,7 +411,7 @@ self.execute("INSERT Note X: X type 'b'") self.execute("SET Y evaluee X WHERE X type 'a', Y nom 'bidule'") rset = self.execute('Note X WHERE NOT Y evaluee X') - self.assertEquals(len(rset.rows), 1, rset.rows) + self.assertEqual(len(rset.rows), 1, rset.rows) def test_select_date_extraction(self): self.execute("INSERT Personne X: X nom 'foo', X datenaiss %(d)s", @@ -421,41 +421,41 @@ for funcname, result in test_data: rset = self.execute('Any %s(D) WHERE X is Personne, X datenaiss D' % funcname) - self.assertEquals(len(rset.rows), 1) - self.assertEquals(rset.rows[0][0], result) - self.assertEquals(rset.description, [('Int',)]) + self.assertEqual(len(rset.rows), 1) + self.assertEqual(rset.rows[0][0], result) + self.assertEqual(rset.description, [('Int',)]) def test_select_aggregat_count(self): rset = self.execute('Any COUNT(X)') - self.assertEquals(len(rset.rows), 1) - self.assertEquals(len(rset.rows[0]), 1) - self.assertEquals(rset.description, [('Int',)]) + self.assertEqual(len(rset.rows), 1) + self.assertEqual(len(rset.rows[0]), 1) + self.assertEqual(rset.description, [('Int',)]) def test_select_aggregat_sum(self): rset = self.execute('Any SUM(O) WHERE X ordernum O') - self.assertEquals(len(rset.rows), 1) - self.assertEquals(len(rset.rows[0]), 1) - self.assertEquals(rset.description, [('Int',)]) + self.assertEqual(len(rset.rows), 1) + self.assertEqual(len(rset.rows[0]), 1) + self.assertEqual(rset.description, [('Int',)]) def test_select_aggregat_min(self): rset = self.execute('Any MIN(X) WHERE X is Personne') - self.assertEquals(len(rset.rows), 1) - self.assertEquals(len(rset.rows[0]), 1) - self.assertEquals(rset.description, [('Personne',)]) + self.assertEqual(len(rset.rows), 1) + self.assertEqual(len(rset.rows[0]), 1) + self.assertEqual(rset.description, [('Personne',)]) rset = self.execute('Any MIN(O) WHERE X ordernum O') - self.assertEquals(len(rset.rows), 1) - self.assertEquals(len(rset.rows[0]), 1) - self.assertEquals(rset.description, [('Int',)]) + self.assertEqual(len(rset.rows), 1) + self.assertEqual(len(rset.rows[0]), 1) + self.assertEqual(rset.description, [('Int',)]) def test_select_aggregat_max(self): rset = self.execute('Any MAX(X) WHERE X is Personne') - self.assertEquals(len(rset.rows), 1) - self.assertEquals(len(rset.rows[0]), 1) - self.assertEquals(rset.description, [('Personne',)]) + self.assertEqual(len(rset.rows), 1) + self.assertEqual(len(rset.rows[0]), 1) + self.assertEqual(rset.description, [('Personne',)]) rset = self.execute('Any MAX(O) WHERE X ordernum O') - self.assertEquals(len(rset.rows), 1) - self.assertEquals(len(rset.rows[0]), 1) - self.assertEquals(rset.description, [('Int',)]) + self.assertEqual(len(rset.rows), 1) + self.assertEqual(len(rset.rows[0]), 1) + self.assertEqual(rset.description, [('Int',)]) def test_select_custom_aggregat_concat_string(self): rset = self.execute('Any GROUP_CONCAT(N) WHERE X is CWGroup, X name N') @@ -482,17 +482,17 @@ def test_select_aggregat_sort(self): rset = self.execute('Any G, COUNT(U) GROUPBY G ORDERBY 2 WHERE U in_group G') - self.assertEquals(len(rset.rows), 2) - self.assertEquals(len(rset.rows[0]), 2) - self.assertEquals(rset.description[0], ('CWGroup', 'Int',)) + self.assertEqual(len(rset.rows), 2) + self.assertEqual(len(rset.rows[0]), 2) + self.assertEqual(rset.description[0], ('CWGroup', 'Int',)) def test_select_aggregat_having(self): rset = self.execute('Any N,COUNT(RDEF) GROUPBY N ORDERBY 2,N ' 'WHERE RT name N, RDEF relation_type RT ' 'HAVING COUNT(RDEF) > 10') - self.assertListEquals(rset.rows, - [[u'description_format', 13], - [u'description', 14], + self.assertListEqual(rset.rows, + [[u'description_format', 12], + [u'description', 13], [u'name', 14], [u'created_by', 38], [u'creation_date', 38], @@ -508,13 +508,13 @@ rset = self.execute('Any U,COUNT(X) GROUPBY U ' 'WHERE U eid %(x)s, X owned_by U ' 'HAVING COUNT(X) > 10', {'x': self.ueid}) - self.assertEquals(len(rset.rows), 1) - self.assertEquals(rset.rows[0][0], self.ueid) + self.assertEqual(len(rset.rows), 1) + self.assertEqual(rset.rows[0][0], self.ueid) def test_select_having_non_aggregat_1(self): rset = self.execute('Any L WHERE X login L, X creation_date CD ' 'HAVING YEAR(CD) = %s' % date.today().year) - self.assertListEquals(rset.rows, + self.assertListEqual(rset.rows, [[u'admin'], [u'anon']]) @@ -522,7 +522,7 @@ rset = self.execute('Any L GROUPBY L WHERE X login L, X in_group G, ' 'X creation_date CD HAVING YEAR(CD) = %s OR COUNT(G) > 1' % date.today().year) - self.assertListEquals(rset.rows, + self.assertListEqual(rset.rows, [[u'admin'], [u'anon']]) @@ -531,26 +531,26 @@ rset = self.execute('Any X ORDERBY X,D LIMIT 5 WHERE X creation_date D') result = rset.rows result.sort() - self.assertEquals(tuplify(result), [(1,), (2,), (3,), (4,), (5,)]) + self.assertEqual(tuplify(result), [(1,), (2,), (3,), (4,), (5,)]) def test_select_upper(self): rset = self.execute('Any X, UPPER(L) ORDERBY L WHERE X is CWUser, X login L') - self.assertEquals(len(rset.rows), 2) - self.assertEquals(rset.rows[0][1], 'ADMIN') - self.assertEquals(rset.description[0], ('CWUser', 'String',)) - self.assertEquals(rset.rows[1][1], 'ANON') - self.assertEquals(rset.description[1], ('CWUser', 'String',)) + self.assertEqual(len(rset.rows), 2) + self.assertEqual(rset.rows[0][1], 'ADMIN') + self.assertEqual(rset.description[0], ('CWUser', 'String',)) + self.assertEqual(rset.rows[1][1], 'ANON') + self.assertEqual(rset.description[1], ('CWUser', 'String',)) eid = rset.rows[0][0] rset = self.execute('Any UPPER(L) WHERE X eid %s, X login L'%eid) - self.assertEquals(rset.rows[0][0], 'ADMIN') - self.assertEquals(rset.description, [('String',)]) + self.assertEqual(rset.rows[0][0], 'ADMIN') + self.assertEqual(rset.description, [('String',)]) ## def test_select_simplified(self): ## ueid = self.session.user.eid ## rset = self.execute('Any L WHERE %s login L'%ueid) -## self.assertEquals(rset.rows[0][0], 'admin') +## self.assertEqual(rset.rows[0][0], 'admin') ## rset = self.execute('Any L WHERE %(x)s login L', {'x':ueid}) -## self.assertEquals(rset.rows[0][0], 'admin') +## self.assertEqual(rset.rows[0][0], 'admin') def test_select_searchable_text_1(self): rset = self.execute(u"INSERT Personne X: X nom 'bidüle'") @@ -558,9 +558,9 @@ rset = self.execute("INSERT Societe X: X nom 'chouette'") self.commit() rset = self.execute('Any X where X has_text %(text)s', {'text': u'bidüle'}) - self.assertEquals(len(rset.rows), 2, rset.rows) + self.assertEqual(len(rset.rows), 2, rset.rows) rset = self.execute(u'Any N where N has_text "bidüle"') - self.assertEquals(len(rset.rows), 2, rset.rows) + self.assertEqual(len(rset.rows), 2, rset.rows) biduleeids = [r[0] for r in rset.rows] rset = self.execute(u'Any N where NOT N has_text "bidüle"') self.failIf([r[0] for r in rset.rows if r[0] in biduleeids]) @@ -573,7 +573,7 @@ rset = self.execute("INSERT Societe X: X nom 'bidule'") self.commit() rset = self.execute('Personne N where N has_text "bidule"') - self.assertEquals(len(rset.rows), 1, rset.rows) + self.assertEqual(len(rset.rows), 1, rset.rows) def test_select_searchable_text_3(self): rset = self.execute("INSERT Personne X: X nom 'bidule', X sexe 'M'") @@ -581,7 +581,7 @@ rset = self.execute("INSERT Societe X: X nom 'bidule'") self.commit() rset = self.execute('Any X where X has_text "bidule" and X sexe "M"') - self.assertEquals(len(rset.rows), 1, rset.rows) + self.assertEqual(len(rset.rows), 1, rset.rows) def test_select_multiple_searchable_text(self): self.execute(u"INSERT Personne X: X nom 'bidüle'") @@ -592,20 +592,20 @@ {'text': u'bidüle', 'text2': u'chouette',} ) - self.assertEquals(len(rset.rows), 1, rset.rows) + self.assertEqual(len(rset.rows), 1, rset.rows) def test_select_no_descr(self): rset = self.execute('Any X WHERE X is CWGroup', build_descr=0) rset.rows.sort() - self.assertEquals(tuplify(rset.rows), [(1,), (2,), (3,), (4,)]) - self.assertEquals(rset.description, ()) + self.assertEqual(tuplify(rset.rows), [(1,), (2,), (3,), (4,)]) + self.assertEqual(rset.description, ()) def test_select_limit_offset(self): rset = self.execute('CWGroup X ORDERBY N LIMIT 2 WHERE X name N') - self.assertEquals(tuplify(rset.rows), [(1,), (2,)]) - self.assertEquals(rset.description, [('CWGroup',), ('CWGroup',)]) + self.assertEqual(tuplify(rset.rows), [(1,), (2,)]) + self.assertEqual(rset.description, [('CWGroup',), ('CWGroup',)]) rset = self.execute('CWGroup X ORDERBY N LIMIT 2 OFFSET 2 WHERE X name N') - self.assertEquals(tuplify(rset.rows), [(3,), (4,)]) + self.assertEqual(tuplify(rset.rows), [(3,), (4,)]) def test_select_symmetric(self): self.execute("INSERT Personne X: X nom 'machin'") @@ -615,24 +615,24 @@ self.execute("SET X connait Y WHERE X nom 'chouette', Y nom 'bidule'") self.execute("SET X connait Y WHERE X nom 'machin', Y nom 'chouette'") rset = self.execute('Any P where P connait P2') - self.assertEquals(len(rset.rows), 3, rset.rows) + self.assertEqual(len(rset.rows), 3, rset.rows) rset = self.execute('Any P where NOT P connait P2') - self.assertEquals(len(rset.rows), 1, rset.rows) # trucmuche + self.assertEqual(len(rset.rows), 1, rset.rows) # trucmuche rset = self.execute('Any P where P connait P2, P2 nom "bidule"') - self.assertEquals(len(rset.rows), 1, rset.rows) + self.assertEqual(len(rset.rows), 1, rset.rows) rset = self.execute('Any P where P2 connait P, P2 nom "bidule"') - self.assertEquals(len(rset.rows), 1, rset.rows) + self.assertEqual(len(rset.rows), 1, rset.rows) rset = self.execute('Any P where P connait P2, P2 nom "chouette"') - self.assertEquals(len(rset.rows), 2, rset.rows) + self.assertEqual(len(rset.rows), 2, rset.rows) rset = self.execute('Any P where P2 connait P, P2 nom "chouette"') - self.assertEquals(len(rset.rows), 2, rset.rows) + self.assertEqual(len(rset.rows), 2, rset.rows) def test_select_inline(self): self.execute("INSERT Personne X: X nom 'bidule'") self.execute("INSERT Note X: X type 'a'") self.execute("SET X ecrit_par Y WHERE X type 'a', Y nom 'bidule'") rset = self.execute('Any N where N ecrit_par X, X nom "bidule"') - self.assertEquals(len(rset.rows), 1, rset.rows) + self.assertEqual(len(rset.rows), 1, rset.rows) def test_select_creation_date(self): self.execute("INSERT Personne X: X nom 'bidule'") @@ -679,19 +679,15 @@ self.assertEqual(len(rset.rows), 1, rset.rows) def test_select_ordered_distinct_1(self): - self.execute("INSERT Affaire X: X sujet 'cool', X ref '1'") - self.execute("INSERT Affaire X: X sujet 'cool', X ref '2'") - rset = self.execute('DISTINCT Any S ORDERBY R WHERE A is Affaire, A sujet S, A ref R') - self.assertEqual(rset.rows, [['cool']]) + self.assertRaises(BadRQLQuery, + self.execute, 'DISTINCT Any S ORDERBY R WHERE A is Affaire, A sujet S, A ref R') def test_select_ordered_distinct_2(self): self.execute("INSERT Affaire X: X sujet 'minor'") - self.execute("INSERT Affaire X: X sujet 'important'") - self.execute("INSERT Affaire X: X sujet 'normal'") self.execute("INSERT Affaire X: X sujet 'zou'") self.execute("INSERT Affaire X: X sujet 'abcd'") rset = self.execute('DISTINCT Any S ORDERBY S WHERE A is Affaire, A sujet S') - self.assertEqual(rset.rows, [['abcd'], ['important'], ['minor'], ['normal'], ['zou']]) + self.assertEqual(rset.rows, [['abcd'], ['minor'], ['zou']]) def test_select_ordered_distinct_3(self): rset = self.execute('DISTINCT Any N ORDERBY GROUP_SORT_VALUE(N) WHERE X is CWGroup, X name N') @@ -706,13 +702,13 @@ def test_select_explicit_eid(self): rset = self.execute('Any X,E WHERE X owned_by U, X eid E, U eid %(u)s', {'u': self.session.user.eid}) self.failUnless(rset) - self.assertEquals(rset.description[0][1], 'Int') + self.assertEqual(rset.description[0][1], 'Int') # def test_select_rewritten_optional(self): # eid = self.execute("INSERT Affaire X: X sujet 'cool'")[0][0] # rset = self.execute('Any X WHERE X eid %(x)s, EXISTS(X owned_by U) OR EXISTS(X concerne S?, S owned_by U)', # {'x': eid}, 'x') -# self.assertEquals(rset.rows, [[eid]]) +# self.assertEqual(rset.rows, [[eid]]) def test_today_bug(self): self.execute("INSERT Tag X: X name 'bidule', X creation_date NOW") @@ -733,14 +729,14 @@ def test_select_boolean(self): rset = self.execute('Any N WHERE X is CWEType, X name N, X final %(val)s', {'val': True}) - self.assertEquals(sorted(r[0] for r in rset.rows), ['Boolean', 'Bytes', + self.assertEqual(sorted(r[0] for r in rset.rows), ['Boolean', 'Bytes', 'Date', 'Datetime', 'Decimal', 'Float', 'Int', 'Interval', 'Password', 'String', 'Time']) rset = self.execute('Any N WHERE X is CWEType, X name N, X final TRUE') - self.assertEquals(sorted(r[0] for r in rset.rows), ['Boolean', 'Bytes', + self.assertEqual(sorted(r[0] for r in rset.rows), ['Boolean', 'Bytes', 'Date', 'Datetime', 'Decimal', 'Float', 'Int', 'Interval', @@ -749,17 +745,17 @@ def test_select_constant(self): rset = self.execute('Any X, "toto" ORDERBY X WHERE X is CWGroup') - self.assertEquals(rset.rows, + self.assertEqual(rset.rows, map(list, zip((1,2,3,4), ('toto','toto','toto','toto',)))) self.assertIsInstance(rset[0][1], unicode) - self.assertEquals(rset.description, + self.assertEqual(rset.description, zip(('CWGroup', 'CWGroup', 'CWGroup', 'CWGroup'), ('String', 'String', 'String', 'String',))) rset = self.execute('Any X, %(value)s ORDERBY X WHERE X is CWGroup', {'value': 'toto'}) - self.assertEquals(rset.rows, + self.assertEqual(rset.rows, map(list, zip((1,2,3,4), ('toto','toto','toto','toto',)))) self.assertIsInstance(rset[0][1], unicode) - self.assertEquals(rset.description, + self.assertEqual(rset.description, zip(('CWGroup', 'CWGroup', 'CWGroup', 'CWGroup'), ('String', 'String', 'String', 'String',))) rset = self.execute('Any X,GN WHERE X is CWUser, G is CWGroup, X login "syt", X in_group G, G name GN') @@ -770,9 +766,9 @@ ' UNION ' '(Any X,N WHERE X name N, X state_of WF, WF workflow_of E, E name %(name)s))', {'name': 'CWUser'}) - self.assertEquals([x[1] for x in rset.rows], + self.assertEqual([x[1] for x in rset.rows], ['activate', 'activated', 'deactivate', 'deactivated']) - self.assertEquals(rset.description, + self.assertEqual(rset.description, [('Transition', 'String'), ('State', 'String'), ('Transition', 'String'), ('State', 'String')]) @@ -792,13 +788,13 @@ '((Any N,COUNT(X) GROUPBY N WHERE X name N, X is State HAVING COUNT(X)>1)' ' UNION ' '(Any N,COUNT(X) GROUPBY N WHERE X name N, X is Transition HAVING COUNT(X)>1))') - self.assertEquals(rset.rows, [[u'hop', 2], [u'hop', 2]]) + self.assertEqual(rset.rows, [[u'hop', 2], [u'hop', 2]]) def test_select_union_selection_with_diff_variables(self): rset = self.execute('(Any N WHERE X name N, X is State)' ' UNION ' '(Any NN WHERE XX name NN, XX is Transition)') - self.assertEquals(sorted(r[0] for r in rset.rows), + self.assertEqual(sorted(r[0] for r in rset.rows), ['abort', 'activate', 'activated', 'ben non', 'deactivate', 'deactivated', 'done', 'en cours', 'end', 'finie', 'markasdone', 'pitetre', 'redoit', @@ -811,7 +807,7 @@ ' UNION ' '(Any Y WHERE Y eid %(y)s)', {'x': eid1, 'y': eid2}) - self.assertEquals(rset.description[:], [('CWGroup',), ('CWUser',)]) + self.assertEqual(rset.description[:], [('CWGroup',), ('CWUser',)]) def test_exists(self): geid = self.execute("INSERT CWGroup X: X name 'lulufanclub'")[0][0] @@ -819,15 +815,15 @@ peid = self.execute("INSERT Personne X: X prenom 'lulu', X nom 'petit'")[0][0] rset = self.execute("Any X WHERE X prenom 'lulu'," "EXISTS (U in_group G, G name 'lulufanclub' OR G name 'managers');") - self.assertEquals(rset.rows, [[peid]]) + self.assertEqual(rset.rows, [[peid]]) def test_identity(self): eid = self.execute('Any X WHERE X identity Y, Y eid 1')[0][0] - self.assertEquals(eid, 1) + self.assertEqual(eid, 1) eid = self.execute('Any X WHERE Y identity X, Y eid 1')[0][0] - self.assertEquals(eid, 1) + self.assertEqual(eid, 1) login = self.execute('Any L WHERE X login "admin", X identity Y, Y login L')[0][0] - self.assertEquals(login, 'admin') + self.assertEqual(login, 'admin') def test_select_date_mathexp(self): rset = self.execute('Any X, TODAY - CD WHERE X is CWUser, X creation_date CD') @@ -837,62 +833,73 @@ rset = self.execute('Any X, NOW - CD WHERE X is Personne, X creation_date CD') self.failUnlessEqual(rset.description[0][1], 'Interval') - def test_select_subquery_aggregat(self): + def test_select_subquery_aggregat_1(self): # percent users by groups self.execute('SET X in_group G WHERE G name "users"') rset = self.execute('Any GN, COUNT(X)*100/T GROUPBY GN ORDERBY 2,1' ' WHERE G name GN, X in_group G' ' WITH T BEING (Any COUNT(U) WHERE U is CWUser)') - self.assertEquals(rset.rows, [[u'guests', 50], [u'managers', 50], [u'users', 100]]) - self.assertEquals(rset.description, [('String', 'Int'), ('String', 'Int'), ('String', 'Int')]) + self.assertEqual(rset.rows, [[u'guests', 50], [u'managers', 50], [u'users', 100]]) + self.assertEqual(rset.description, [('String', 'Int'), ('String', 'Int'), ('String', 'Int')]) + + def test_select_subquery_aggregat_2(self): + expected = self.execute('Any X, 0, COUNT(T) GROUPBY X ' + 'WHERE X is Workflow, T transition_of X').rows + rset = self.execute(''' +Any P1,B,E WHERE P1 identity P2 WITH + P1,B BEING (Any P,COUNT(T) GROUPBY P WHERE P is Workflow, T is Transition, + T? transition_of P, T type "auto"), + P2,E BEING (Any P,COUNT(T) GROUPBY P WHERE P is Workflow, T is Transition, + T? transition_of P, T type "normal")''') + self.assertEqual(sorted(rset.rows), sorted(expected)) def test_select_subquery_const(self): rset = self.execute('Any X WITH X BEING ((Any NULL) UNION (Any "toto"))') - self.assertEquals(rset.rows, [[None], ['toto']]) - self.assertEquals(rset.description, [(None,), ('String',)]) + self.assertEqual(rset.rows, [[None], ['toto']]) + self.assertEqual(rset.description, [(None,), ('String',)]) # insertion queries tests ################################################# def test_insert_is(self): eid, = self.execute("INSERT Personne X: X nom 'bidule'")[0] etype, = self.execute("Any TN WHERE X is T, X eid %s, T name TN" % eid)[0] - self.assertEquals(etype, 'Personne') + self.assertEqual(etype, 'Personne') self.execute("INSERT Personne X: X nom 'managers'") def test_insert_1(self): rset = self.execute("INSERT Personne X: X nom 'bidule'") - self.assertEquals(len(rset.rows), 1) - self.assertEquals(rset.description, [('Personne',)]) + self.assertEqual(len(rset.rows), 1) + self.assertEqual(rset.description, [('Personne',)]) rset = self.execute('Personne X WHERE X nom "bidule"') self.assert_(rset.rows) - self.assertEquals(rset.description, [('Personne',)]) + self.assertEqual(rset.description, [('Personne',)]) def test_insert_1_multiple(self): self.execute("INSERT Personne X: X nom 'bidule'") self.execute("INSERT Personne X: X nom 'chouette'") rset = self.execute("INSERT Societe Y: Y nom N, P travaille Y WHERE P nom N") - self.assertEquals(len(rset.rows), 2) - self.assertEquals(rset.description, [('Societe',), ('Societe',)]) + self.assertEqual(len(rset.rows), 2) + self.assertEqual(rset.description, [('Societe',), ('Societe',)]) def test_insert_2(self): rset = self.execute("INSERT Personne X, Personne Y: X nom 'bidule', Y nom 'tutu'") - self.assertEquals(rset.description, [('Personne', 'Personne')]) + self.assertEqual(rset.description, [('Personne', 'Personne')]) rset = self.execute('Personne X WHERE X nom "bidule" or X nom "tutu"') self.assert_(rset.rows) - self.assertEquals(rset.description, [('Personne',), ('Personne',)]) + self.assertEqual(rset.description, [('Personne',), ('Personne',)]) def test_insert_3(self): self.execute("INSERT Personne X: X nom Y WHERE U login 'admin', U login Y") rset = self.execute('Personne X WHERE X nom "admin"') self.assert_(rset.rows) - self.assertEquals(rset.description, [('Personne',)]) + self.assertEqual(rset.description, [('Personne',)]) def test_insert_4(self): self.execute("INSERT Societe Y: Y nom 'toto'") self.execute("INSERT Personne X: X nom 'bidule', X travaille Y WHERE Y nom 'toto'") rset = self.execute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y') self.assert_(rset.rows) - self.assertEquals(rset.description, [('Personne', 'Societe',)]) + self.assertEqual(rset.description, [('Personne', 'Societe',)]) def test_insert_4bis(self): peid = self.execute("INSERT Personne X: X nom 'bidule'")[0][0] @@ -917,7 +924,7 @@ self.execute("INSERT Societe Y: Y nom 'toto', X travaille Y WHERE X nom 'bidule'") rset = self.execute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y') self.assert_(rset.rows) - self.assertEquals(rset.description, [('Personne', 'Societe',)]) + self.assertEqual(rset.description, [('Personne', 'Societe',)]) def test_insert_5bis(self): peid = self.execute("INSERT Personne X: X nom 'bidule'")[0][0] @@ -925,45 +932,45 @@ {'x': peid}) rset = self.execute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y') self.assert_(rset.rows) - self.assertEquals(rset.description, [('Personne', 'Societe',)]) + self.assertEqual(rset.description, [('Personne', 'Societe',)]) def test_insert_6(self): self.execute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto', X travaille Y") rset = self.execute('Any X, Y WHERE X nom "bidule", Y nom "toto", X travaille Y') self.assert_(rset.rows) - self.assertEquals(rset.description, [('Personne', 'Societe',)]) + self.assertEqual(rset.description, [('Personne', 'Societe',)]) def test_insert_7(self): self.execute("INSERT Personne X, Societe Y: X nom N, Y nom 'toto', X travaille Y WHERE U login 'admin', U login N") rset = self.execute('Any X, Y WHERE X nom "admin", Y nom "toto", X travaille Y') self.assert_(rset.rows) - self.assertEquals(rset.description, [('Personne', 'Societe',)]) + self.assertEqual(rset.description, [('Personne', 'Societe',)]) def test_insert_7_2(self): self.execute("INSERT Personne X, Societe Y: X nom N, Y nom 'toto', X travaille Y WHERE U login N") rset = self.execute('Any X, Y WHERE Y nom "toto", X travaille Y') - self.assertEquals(len(rset), 2) - self.assertEquals(rset.description, [('Personne', 'Societe',), + self.assertEqual(len(rset), 2) + self.assertEqual(rset.description, [('Personne', 'Societe',), ('Personne', 'Societe',)]) def test_insert_8(self): self.execute("INSERT Societe Y, Personne X: Y nom N, X nom 'toto', X travaille Y WHERE U login 'admin', U login N") rset = self.execute('Any X, Y WHERE X nom "toto", Y nom "admin", X travaille Y') self.assert_(rset.rows) - self.assertEquals(rset.description, [('Personne', 'Societe',)]) + self.assertEqual(rset.description, [('Personne', 'Societe',)]) def test_insert_9(self): self.execute("INSERT Societe X: X nom 'Lo'") self.execute("INSERT Societe X: X nom 'Gi'") self.execute("INSERT SubDivision X: X nom 'Lab'") rset = self.execute("INSERT Personne X: X nom N, X travaille Y, X travaille_subdivision Z WHERE Y is Societe, Z is SubDivision, Y nom N") - self.assertEquals(len(rset), 2) - self.assertEquals(rset.description, [('Personne',), ('Personne',)]) - # self.assertSetEquals(set(x.nom for x in rset.entities()), + self.assertEqual(len(rset), 2) + self.assertEqual(rset.description, [('Personne',), ('Personne',)]) + # self.assertSetEqual(set(x.nom for x in rset.entities()), # ['Lo', 'Gi']) - # self.assertSetEquals(set(y.nom for x in rset.entities() for y in x.travaille), + # self.assertSetEqual(set(y.nom for x in rset.entities() for y in x.travaille), # ['Lo', 'Gi']) - # self.assertEquals([y.nom for x in rset.entities() for y in x.travaille_subdivision], + # self.assertEqual([y.nom for x in rset.entities() for y in x.travaille_subdivision], # ['Lab', 'Lab']) def test_insert_query_error(self): @@ -985,7 +992,7 @@ rset = self.execute('INSERT CWUser E, EmailAddress EM: E login "X", E upassword "X", ' 'E primary_email EM, EM address "X", E in_group G ' 'WHERE G name "managers"') - self.assertEquals(list(rset.description[0]), ['CWUser', 'EmailAddress']) + self.assertEqual(list(rset.description[0]), ['CWUser', 'EmailAddress']) # deletion queries tests ################################################## @@ -1000,10 +1007,10 @@ def test_delete_2(self): rset = self.execute("INSERT Personne X, Personne Y, Societe Z : X nom 'syt', Y nom 'adim', Z nom 'Logilab', X travaille Z, Y travaille Z") - self.assertEquals(len(rset), 1) - self.assertEquals(len(rset[0]), 3) - self.assertEquals(rset.description[0], ('Personne', 'Personne', 'Societe')) - self.assertEquals(self.execute('Any N WHERE X nom N, X eid %s'% rset[0][0])[0][0], 'syt') + self.assertEqual(len(rset), 1) + self.assertEqual(len(rset[0]), 3) + self.assertEqual(rset.description[0], ('Personne', 'Personne', 'Societe')) + self.assertEqual(self.execute('Any N WHERE X nom N, X eid %s'% rset[0][0])[0][0], 'syt') rset = self.execute('Personne X WHERE X travaille Y, Y nom "Logilab"') self.assertEqual(len(rset.rows), 2, rset.rows) self.execute("DELETE X travaille Y WHERE X is Personne, Y nom 'Logilabo'") @@ -1029,16 +1036,16 @@ teid2 = self.execute("INSERT Folder T: T name 'tutu'")[0][0] self.execute('SET X see_also Y WHERE X eid %s, Y eid %s' % (teid1, teid2)) rset = self.execute('Any X,Y WHERE X see_also Y') - self.assertEquals(len(rset) , 2, rset.rows) + self.assertEqual(len(rset) , 2, rset.rows) self.execute('DELETE X see_also Y WHERE X eid %s, Y eid %s' % (teid1, teid2)) rset = self.execute('Any X,Y WHERE X see_also Y') - self.assertEquals(len(rset) , 0) + self.assertEqual(len(rset) , 0) self.execute('SET X see_also Y WHERE X eid %s, Y eid %s' % (teid1, teid2)) rset = self.execute('Any X,Y WHERE X see_also Y') - self.assertEquals(len(rset) , 2) + self.assertEqual(len(rset) , 2) self.execute('DELETE X see_also Y WHERE X eid %s, Y eid %s' % (teid2, teid1)) rset = self.execute('Any X,Y WHERE X see_also Y') - self.assertEquals(len(rset) , 0) + self.assertEqual(len(rset) , 0) def test_nonregr_delete_cache(self): """test that relations are properly cleaned when an entity is deleted @@ -1053,28 +1060,28 @@ self.o.execute(s, "DELETE Email X") sqlc = s.pool['system'] sqlc.execute('SELECT * FROM recipients_relation') - self.assertEquals(len(sqlc.fetchall()), 0) + self.assertEqual(len(sqlc.fetchall()), 0) sqlc.execute('SELECT * FROM owned_by_relation WHERE eid_from=%s'%eeid) - self.assertEquals(len(sqlc.fetchall()), 0) + self.assertEqual(len(sqlc.fetchall()), 0) def test_nonregr_delete_cache2(self): eid = self.execute("INSERT Folder T: T name 'toto'")[0][0] self.commit() # fill the cache self.execute("Any X WHERE X eid %(x)s", {'x': eid}) - self.execute("Any X WHERE X eid %s" %eid) + self.execute("Any X WHERE X eid %s" % eid) self.execute("Folder X WHERE X eid %(x)s", {'x': eid}) - self.execute("Folder X WHERE X eid %s" %eid) - self.execute("DELETE Folder T WHERE T eid %s"%eid) + self.execute("Folder X WHERE X eid %s" % eid) + self.execute("DELETE Folder T WHERE T eid %s" % eid) self.commit() rset = self.execute("Any X WHERE X eid %(x)s", {'x': eid}) - self.assertEquals(rset.rows, []) - rset = self.execute("Any X WHERE X eid %s" %eid) - self.assertEquals(rset.rows, []) + self.assertEqual(rset.rows, []) + rset = self.execute("Any X WHERE X eid %s" % eid) + self.assertEqual(rset.rows, []) rset = self.execute("Folder X WHERE X eid %(x)s", {'x': eid}) - self.assertEquals(rset.rows, []) + self.assertEqual(rset.rows, []) rset = self.execute("Folder X WHERE X eid %s" %eid) - self.assertEquals(rset.rows, []) + self.assertEqual(rset.rows, []) # update queries tests #################################################### @@ -1090,7 +1097,7 @@ def test_update_2(self): peid, seid = self.execute("INSERT Personne X, Societe Y: X nom 'bidule', Y nom 'toto'")[0] rset = self.execute("SET X travaille Y WHERE X nom 'bidule', Y nom 'toto'") - self.assertEquals(tuplify(rset.rows), [(peid, seid)]) + self.assertEqual(tuplify(rset.rows), [(peid, seid)]) rset = self.execute('Any X, Y WHERE X travaille Y') self.assertEqual(len(rset.rows), 1) @@ -1118,8 +1125,8 @@ peid1 = self.execute("INSERT Personne Y: Y nom 'tutu'")[0][0] peid2 = self.execute("INSERT Personne Y: Y nom 'toto'")[0][0] self.execute("SET X nom 'tutu', Y nom 'toto' WHERE X nom 'toto', Y nom 'tutu'") - self.assertEquals(self.execute('Any X WHERE X nom "toto"').rows, [[peid1]]) - self.assertEquals(self.execute('Any X WHERE X nom "tutu"').rows, [[peid2]]) + self.assertEqual(self.execute('Any X WHERE X nom "toto"').rows, [[peid1]]) + self.assertEqual(self.execute('Any X WHERE X nom "tutu"').rows, [[peid2]]) def test_update_multiple2(self): ueid = self.execute("INSERT CWUser X: X login 'bob', X upassword 'toto'")[0][0] @@ -1142,13 +1149,13 @@ {'order': orders[splitidx]}) orders2 = [r[0] for r in self.execute('Any O ORDERBY O WHERE ST name "Personne", X from_entity ST, X ordernum O')] orders = orders[:splitidx] + [o+1 for o in orders[splitidx:]] - self.assertEquals(orders2, orders) + self.assertEqual(orders2, orders) def test_update_string_concat(self): beid = self.execute("INSERT Bookmark Y: Y title 'toto', Y path '/view'")[0][0] self.execute('SET X title XN + %(suffix)s WHERE X is Bookmark, X title XN', {'suffix': u'-moved'}) newname = self.execute('Any XN WHERE X eid %(x)s, X title XN', {'x': beid})[0][0] - self.assertEquals(newname, 'toto-moved') + self.assertEqual(newname, 'toto-moved') def test_update_query_error(self): self.execute("INSERT Personne Y: Y nom 'toto'") @@ -1156,39 +1163,38 @@ self.assertRaises(QueryError, self.execute, "SET X nom 'toto', X has_text 'tutu' WHERE X is Personne") self.assertRaises(QueryError, self.execute, "SET X login 'tutu', X eid %s" % cnx.user(self.session).eid) - # upassword encryption tests ################################################# def test_insert_upassword(self): rset = self.execute("INSERT CWUser X: X login 'bob', X upassword 'toto'") - self.assertEquals(len(rset.rows), 1) - self.assertEquals(rset.description, [('CWUser',)]) + self.assertEqual(len(rset.rows), 1) + self.assertEqual(rset.description, [('CWUser',)]) self.assertRaises(Unauthorized, self.execute, "Any P WHERE X is CWUser, X login 'bob', X upassword P") cursor = self.pool['system'] cursor.execute("SELECT %supassword from %sCWUser WHERE %slogin='bob'" % (SQL_PREFIX, SQL_PREFIX, SQL_PREFIX)) passwd = str(cursor.fetchone()[0]) - self.assertEquals(passwd, crypt_password('toto', passwd[:2])) + self.assertEqual(passwd, crypt_password('toto', passwd[:2])) rset = self.execute("Any X WHERE X is CWUser, X login 'bob', X upassword %(pwd)s", {'pwd': Binary(passwd)}) - self.assertEquals(len(rset.rows), 1) - self.assertEquals(rset.description, [('CWUser',)]) + self.assertEqual(len(rset.rows), 1) + self.assertEqual(rset.description, [('CWUser',)]) def test_update_upassword(self): cursor = self.pool['system'] rset = self.execute("INSERT CWUser X: X login 'bob', X upassword %(pwd)s", {'pwd': 'toto'}) - self.assertEquals(rset.description[0][0], 'CWUser') + self.assertEqual(rset.description[0][0], 'CWUser') rset = self.execute("SET X upassword %(pwd)s WHERE X is CWUser, X login 'bob'", {'pwd': 'tutu'}) cursor.execute("SELECT %supassword from %sCWUser WHERE %slogin='bob'" % (SQL_PREFIX, SQL_PREFIX, SQL_PREFIX)) passwd = str(cursor.fetchone()[0]) - self.assertEquals(passwd, crypt_password('tutu', passwd[:2])) + self.assertEqual(passwd, crypt_password('tutu', passwd[:2])) rset = self.execute("Any X WHERE X is CWUser, X login 'bob', X upassword %(pwd)s", {'pwd': Binary(passwd)}) - self.assertEquals(len(rset.rows), 1) - self.assertEquals(rset.description, [('CWUser',)]) + self.assertEqual(len(rset.rows), 1) + self.assertEqual(rset.description, [('CWUser',)]) # non regression tests #################################################### @@ -1196,11 +1202,11 @@ teid = self.execute("INSERT Tag X: X name 'tag'")[0][0] self.execute("SET X tags Y WHERE X name 'tag', Y is State, Y name 'activated'") rset = self.execute('Any X WHERE T tags X') - self.assertEquals(len(rset.rows), 1, rset.rows) + self.assertEqual(len(rset.rows), 1, rset.rows) rset = self.execute('Any T WHERE T tags X, X is State') - self.assertEquals(rset.rows, [[teid]]) + self.assertEqual(rset.rows, [[teid]]) rset = self.execute('Any T WHERE T tags X') - self.assertEquals(rset.rows, [[teid]]) + self.assertEqual(rset.rows, [[teid]]) def test_nonregr_2(self): teid = self.execute("INSERT Tag X: X name 'tag'")[0][0] @@ -1209,7 +1215,7 @@ {'g': geid, 't': teid}) rset = self.execute('Any X WHERE E eid %(x)s, E tags X', {'x': teid}) - self.assertEquals(rset.rows, [[geid]]) + self.assertEqual(rset.rows, [[geid]]) def test_nonregr_3(self): """bad sql generated on the second query (destination_state is not @@ -1217,7 +1223,7 @@ """ rset = self.execute('Any S,ES,T WHERE S state_of WF, WF workflow_of ET, ET name "CWUser",' 'ES allowed_transition T, T destination_state S') - self.assertEquals(len(rset.rows), 2) + self.assertEqual(len(rset.rows), 2) def test_nonregr_4(self): # fix variables'type, else we get (nb of entity types with a 'name' attribute)**3 @@ -1225,7 +1231,7 @@ # by the server (or client lib) rset = self.execute('Any ER,SE,OE WHERE SE name "Comment", ER name "comments", OE name "Comment",' 'ER is CWRType, SE is CWEType, OE is CWEType') - self.assertEquals(len(rset), 1) + self.assertEqual(len(rset), 1) def test_nonregr_5(self): # jpl #15505: equivalent queries returning different result sets @@ -1245,9 +1251,9 @@ rset4 = self.execute('Any N,U WHERE N todo_by U, T eid %s,' 'N filed_under T, W concerne N,' 'W filed_under A, A eid %s' % (teid1, teid2)) - self.assertEquals(rset1.rows, rset2.rows) - self.assertEquals(rset1.rows, rset3.rows) - self.assertEquals(rset1.rows, rset4.rows) + self.assertEqual(rset1.rows, rset2.rows) + self.assertEqual(rset1.rows, rset3.rows) + self.assertEqual(rset1.rows, rset4.rows) def test_nonregr_6(self): self.execute('Any N,COUNT(S) GROUPBY N ORDERBY COUNT(N) WHERE S name N, S is State') @@ -1265,7 +1271,7 @@ rset = self.execute('Any lower(N) ORDERBY LOWER(N) WHERE X is Tag, X name N,' 'X owned_by U, U eid %(x)s', {'x':self.session.user.eid}) - self.assertEquals(rset.rows, [[u'\xe9name0']]) + self.assertEqual(rset.rows, [[u'\xe9name0']]) def test_nonregr_description(self): @@ -1279,8 +1285,8 @@ self.execute("SET X in_basket B WHERE X is Personne") self.execute("SET X in_basket B WHERE X is Societe") rset = self.execute('Any X WHERE X in_basket B, B eid %s' % beid) - self.assertEquals(len(rset), 2) - self.assertEquals(rset.description, [('Personne',), ('Societe',)]) + self.assertEqual(len(rset), 2) + self.assertEqual(rset.description, [('Personne',), ('Societe',)]) def test_nonregr_cache_1(self): @@ -1290,19 +1296,19 @@ {'y': beid}) rset = self.execute("Any X WHERE X in_basket B, B eid %(x)s", {'x': beid}) - self.assertEquals(rset.rows, [[peid]]) + self.assertEqual(rset.rows, [[peid]]) rset = self.execute("Any X WHERE X in_basket B, B eid %(x)s", {'x': beid}) - self.assertEquals(rset.rows, [[peid]]) + self.assertEqual(rset.rows, [[peid]]) def test_nonregr_has_text_cache(self): eid1 = self.execute("INSERT Personne X: X nom 'bidule'")[0][0] eid2 = self.execute("INSERT Personne X: X nom 'tag'")[0][0] self.commit() rset = self.execute("Any X WHERE X has_text %(text)s", {'text': 'bidule'}) - self.assertEquals(rset.rows, [[eid1]]) + self.assertEqual(rset.rows, [[eid1]]) rset = self.execute("Any X WHERE X has_text %(text)s", {'text': 'tag'}) - self.assertEquals(rset.rows, [[eid2]]) + self.assertEqual(rset.rows, [[eid2]]) def test_nonregr_sortterm_management(self): """Error: Variable has no attribute 'sql' in rql2sql.py (visit_variable) @@ -1327,16 +1333,16 @@ self.execute("SET X todo_by Y WHERE X is Note, Y eid %s" % ueid) rset = self.execute('Any N WHERE N todo_by U, N is Note, U eid %s, N filed_under T, T eid %s' % (ueid, teid1)) - self.assertEquals(len(rset), 1) + self.assertEqual(len(rset), 1) def test_nonregr_XXX(self): teid = self.execute('Transition S WHERE S name "deactivate"')[0][0] rset = self.execute('Any O WHERE O is State, ' 'S eid %(x)s, S transition_of ET, O state_of ET', {'x': teid}) - self.assertEquals(len(rset), 2) + self.assertEqual(len(rset), 2) rset = self.execute('Any O WHERE O is State, NOT S destination_state O, ' 'S eid %(x)s, S transition_of ET, O state_of ET', {'x': teid}) - self.assertEquals(len(rset), 1) + self.assertEqual(len(rset), 1) def test_nonregr_set_datetime(self): @@ -1353,9 +1359,9 @@ ueid = self.execute("INSERT CWUser X: X login 'bob', X upassword 'toto', X in_group G " "WHERE G name 'users'")[0][0] rset = self.execute("CWUser U") - self.assertEquals(len(rset), 3) # bob + admin + anon + self.assertEqual(len(rset), 3) # bob + admin + anon rset = self.execute("Any U WHERE NOT U owned_by U") - self.assertEquals(len(rset), 0) # even admin created at repo initialization time should belong to itself + self.assertEqual(len(rset), 0) # even admin created at repo initialization time should belong to itself def test_nonreg_update_index(self): # this is the kind of queries generated by "cubicweb-ctl db-check -ry" @@ -1370,11 +1376,11 @@ self.execute('Any X,S, MAX(T) GROUPBY X,S ORDERBY S WHERE X is CWUser, T tags X, S eid IN(%s), X in_state S' % seid) def test_nonregr_solution_cache(self): - self.skip('XXX should be fixed or documented') # (doesn't occur if cache key is provided.) + self.skipTest('XXX should be fixed or documented') # (doesn't occur if cache key is provided.) rset = self.execute('Any X WHERE X is CWUser, X eid %(x)s', {'x':self.ueid}) - self.assertEquals(len(rset), 1) + self.assertEqual(len(rset), 1) rset = self.execute('Any X WHERE X is CWUser, X eid %(x)s', {'x':12345}) - self.assertEquals(len(rset), 0) + self.assertEqual(len(rset), 0) def test_nonregr_final_norestr(self): self.assertRaises(BadRQLQuery, self.execute, 'Date X') diff -r f4d1d5d9ccbb -r 90f2f20367bc server/test/unittest_repository.py --- a/server/test/unittest_repository.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/test/unittest_repository.py Wed Nov 03 16:38:28 2010 +0100 @@ -32,8 +32,8 @@ from yams.constraints import UniqueConstraint from cubicweb import (BadConnectionId, RepositoryError, ValidationError, - UnknownEid, AuthenticationError) -from cubicweb.selectors import implements + UnknownEid, AuthenticationError, Unauthorized, QueryError) +from cubicweb.selectors import is_instance from cubicweb.schema import CubicWebSchema, RQLConstraint from cubicweb.dbapi import connect, multiple_connections_unfix from cubicweb.devtools.testlib import CubicWebTC @@ -65,18 +65,36 @@ self.session.set_pool() cu = self.session.system_sql('SELECT %s FROM %s WHERE %s is NULL' % ( namecol, table, finalcol)) - self.assertEquals(cu.fetchall(), []) + self.assertEqual(cu.fetchall(), []) cu = self.session.system_sql('SELECT %s FROM %s WHERE %s=%%(final)s ORDER BY %s' % (namecol, table, finalcol, namecol), {'final': 'TRUE'}) - self.assertEquals(cu.fetchall(), [(u'Boolean',), (u'Bytes',), + self.assertEqual(cu.fetchall(), [(u'Boolean',), (u'Bytes',), (u'Date',), (u'Datetime',), (u'Decimal',),(u'Float',), (u'Int',), (u'Interval',), (u'Password',), (u'String',), (u'Time',)]) + sql = ("SELECT etype.cw_eid, etype.cw_name, cstr.cw_eid, rel.eid_to " + "FROM cw_CWUniqueTogetherConstraint as cstr, " + " relations_relation as rel, " + " cw_CWEType as etype " + "WHERE cstr.cw_eid = rel.eid_from " + " AND cstr.cw_constraint_of = etype.cw_eid " + " AND etype.cw_name = 'Personne' " + ";") + cu = self.session.system_sql(sql) + rows = cu.fetchall() + self.assertEqual(len(rows), 3) + self.test_unique_together() finally: self.repo.set_schema(origshema) + def test_unique_together(self): + person = self.repo.schema.eschema('Personne') + self.assertEqual(len(person._unique_together), 1) + self.assertItemsEqual(person._unique_together[0], + ('nom', 'prenom', 'inline2')) + def test_schema_has_owner(self): repo = self.repo cnxid = repo.connect(self.admlogin, password=self.admpassword) @@ -118,15 +136,47 @@ repo.close(cnxid) self.assert_(repo.connect(u"barnabé", password=u"héhéhé".encode('UTF8'))) - def test_invalid_entity_rollback(self): + def test_rollback_on_commit_error(self): cnxid = self.repo.connect(self.admlogin, password=self.admpassword) - # no group self.repo.execute(cnxid, 'INSERT CWUser X: X login %(login)s, X upassword %(passwd)s', {'login': u"tutetute", 'passwd': 'tutetute'}) self.assertRaises(ValidationError, self.repo.commit, cnxid) self.failIf(self.repo.execute(cnxid, 'CWUser X WHERE X login "tutetute"')) + def test_rollback_on_execute_validation_error(self): + class ValidationErrorAfterHook(Hook): + __regid__ = 'valerror-after-hook' + __select__ = Hook.__select__ & is_instance('CWGroup') + events = ('after_update_entity',) + def __call__(self): + raise ValidationError(self.entity.eid, {}) + with self.temporary_appobjects(ValidationErrorAfterHook): + self.assertRaises(ValidationError, + self.execute, 'SET X name "toto" WHERE X is CWGroup, X name "guests"') + self.failUnless(self.execute('Any X WHERE X is CWGroup, X name "toto"')) + ex = self.assertRaises(QueryError, self.commit) + self.assertEqual(str(ex), 'transaction must be rollbacked') + self.rollback() + self.failIf(self.execute('Any X WHERE X is CWGroup, X name "toto"')) + + def test_rollback_on_execute_unauthorized(self): + class UnauthorizedAfterHook(Hook): + __regid__ = 'unauthorized-after-hook' + __select__ = Hook.__select__ & is_instance('CWGroup') + events = ('after_update_entity',) + def __call__(self): + raise Unauthorized() + with self.temporary_appobjects(UnauthorizedAfterHook): + self.assertRaises(Unauthorized, + self.execute, 'SET X name "toto" WHERE X is CWGroup, X name "guests"') + self.failUnless(self.execute('Any X WHERE X is CWGroup, X name "toto"')) + ex = self.assertRaises(QueryError, self.commit) + self.assertEqual(str(ex), 'transaction must be rollbacked') + self.rollback() + self.failIf(self.execute('Any X WHERE X is CWGroup, X name "toto"')) + + def test_close(self): repo = self.repo cnxid = repo.connect(self.admlogin, password=self.admpassword) @@ -143,14 +193,14 @@ cnxid = repo.connect(self.admlogin, password=self.admpassword) repo.set_shared_data(cnxid, 'data', 4) cnxid2 = repo.connect(self.admlogin, password=self.admpassword) - self.assertEquals(repo.get_shared_data(cnxid, 'data'), 4) - self.assertEquals(repo.get_shared_data(cnxid2, 'data'), None) + self.assertEqual(repo.get_shared_data(cnxid, 'data'), 4) + self.assertEqual(repo.get_shared_data(cnxid2, 'data'), None) repo.set_shared_data(cnxid2, 'data', 5) - self.assertEquals(repo.get_shared_data(cnxid, 'data'), 4) - self.assertEquals(repo.get_shared_data(cnxid2, 'data'), 5) + self.assertEqual(repo.get_shared_data(cnxid, 'data'), 4) + self.assertEqual(repo.get_shared_data(cnxid2, 'data'), 5) repo.get_shared_data(cnxid2, 'data', pop=True) - self.assertEquals(repo.get_shared_data(cnxid, 'data'), 4) - self.assertEquals(repo.get_shared_data(cnxid2, 'data'), None) + self.assertEqual(repo.get_shared_data(cnxid, 'data'), 4) + self.assertEqual(repo.get_shared_data(cnxid2, 'data'), None) repo.close(cnxid) repo.close(cnxid2) self.assertRaises(BadConnectionId, repo.get_shared_data, cnxid, 'data') @@ -161,7 +211,7 @@ def test_check_session(self): repo = self.repo cnxid = repo.connect(self.admlogin, password=self.admpassword) - self.assertEquals(repo.check_session(cnxid), None) + self.assertEqual(repo.check_session(cnxid), None) repo.close(cnxid) self.assertRaises(BadConnectionId, repo.check_session, cnxid) @@ -170,19 +220,19 @@ cnxid = repo.connect(self.admlogin, password=self.admpassword) # check db state result = repo.execute(cnxid, 'Personne X') - self.assertEquals(result.rowcount, 0) + self.assertEqual(result.rowcount, 0) # rollback entity insertion repo.execute(cnxid, "INSERT Personne X: X nom 'bidule'") result = repo.execute(cnxid, 'Personne X') - self.assertEquals(result.rowcount, 1) + self.assertEqual(result.rowcount, 1) repo.rollback(cnxid) result = repo.execute(cnxid, 'Personne X') - self.assertEquals(result.rowcount, 0, result.rows) + self.assertEqual(result.rowcount, 0, result.rows) # commit repo.execute(cnxid, "INSERT Personne X: X nom 'bidule'") repo.commit(cnxid) result = repo.execute(cnxid, 'Personne X') - self.assertEquals(result.rowcount, 1) + self.assertEqual(result.rowcount, 1) def test_transaction_base2(self): repo = self.repo @@ -190,10 +240,10 @@ # rollback relation insertion repo.execute(cnxid, "SET U in_group G WHERE U login 'admin', G name 'guests'") result = repo.execute(cnxid, "Any U WHERE U in_group G, U login 'admin', G name 'guests'") - self.assertEquals(result.rowcount, 1) + self.assertEqual(result.rowcount, 1) repo.rollback(cnxid) result = repo.execute(cnxid, "Any U WHERE U in_group G, U login 'admin', G name 'guests'") - self.assertEquals(result.rowcount, 0, result.rows) + self.assertEqual(result.rowcount, 0, result.rows) def test_transaction_base3(self): repo = self.repo @@ -202,15 +252,15 @@ session = repo._get_session(cnxid) session.set_pool() user = session.user - user.fire_transition('deactivate') + user.cw_adapt_to('IWorkflowable').fire_transition('deactivate') rset = repo.execute(cnxid, 'TrInfo T WHERE T wf_info_for X, X eid %(x)s', {'x': user.eid}) - self.assertEquals(len(rset), 1) + self.assertEqual(len(rset), 1) repo.rollback(cnxid) rset = repo.execute(cnxid, 'TrInfo T WHERE T wf_info_for X, X eid %(x)s', {'x': user.eid}) - self.assertEquals(len(rset), 0) + self.assertEqual(len(rset), 0) def test_transaction_interleaved(self): - self.skip('implement me') + self.skipTest('implement me') def test_close_kill_processing_request(self): repo = self.repo @@ -228,14 +278,14 @@ repo.commit(cnxid) try: ex = self.assertRaises(Exception, run_transaction) - self.assertEquals(str(ex), 'try to access pool on a closed session') + self.assertEqual(str(ex), 'try to access pool on a closed session') finally: t.join() def test_initial_schema(self): schema = self.repo.schema # check order of attributes is respected - self.assertListEquals([r.type for r in schema.eschema('CWAttribute').ordered_relations() + self.assertListEqual([r.type for r in schema.eschema('CWAttribute').ordered_relations() if not r.type in ('eid', 'is', 'is_instance_of', 'identity', 'creation_date', 'modification_date', 'cwuri', 'owned_by', 'created_by', @@ -248,11 +298,11 @@ 'indexed', 'fulltextindexed', 'internationalizable', 'defaultval', 'description', 'description_format']) - self.assertEquals(schema.eschema('CWEType').main_attribute(), 'name') - self.assertEquals(schema.eschema('State').main_attribute(), 'name') + self.assertEqual(schema.eschema('CWEType').main_attribute(), 'name') + self.assertEqual(schema.eschema('State').main_attribute(), 'name') constraints = schema.rschema('name').rdef('CWEType', 'String').constraints - self.assertEquals(len(constraints), 2) + self.assertEqual(len(constraints), 2) for cstr in constraints[:]: if isinstance(cstr, UniqueConstraint): constraints.remove(cstr) @@ -260,17 +310,17 @@ else: self.fail('unique constraint not found') sizeconstraint = constraints[0] - self.assertEquals(sizeconstraint.min, None) - self.assertEquals(sizeconstraint.max, 64) + self.assertEqual(sizeconstraint.min, None) + self.assertEqual(sizeconstraint.max, 64) constraints = schema.rschema('relation_type').rdef('CWAttribute', 'CWRType').constraints - self.assertEquals(len(constraints), 1) + self.assertEqual(len(constraints), 1) cstr = constraints[0] self.assert_(isinstance(cstr, RQLConstraint)) - self.assertEquals(cstr.restriction, 'O final TRUE') + self.assertEqual(cstr.restriction, 'O final TRUE') ownedby = schema.rschema('owned_by') - self.assertEquals(ownedby.objects('CWEType'), ('CWUser',)) + self.assertEqual(ownedby.objects('CWEType'), ('CWUser',)) def test_pyro(self): import Pyro @@ -301,7 +351,7 @@ schema = cnx.get_schema() self.failUnless(cnx.vreg) self.failUnless('etypes'in cnx.vreg) - self.assertEquals(schema.__hashmode__, None) + self.assertEqual(schema.__hashmode__, None) cu = cnx.cursor() rset = cu.execute('Any U,G WHERE U in_group G') user = iter(rset.entities()).next() @@ -319,25 +369,25 @@ repo = self.repo cnxid = repo.connect(self.admlogin, password=self.admpassword) session = repo._get_session(cnxid, setpool=True) - self.assertEquals(repo.type_and_source_from_eid(1, session), + self.assertEqual(repo.type_and_source_from_eid(1, session), ('CWGroup', 'system', None)) - self.assertEquals(repo.type_from_eid(1, session), 'CWGroup') - self.assertEquals(repo.source_from_eid(1, session).uri, 'system') - self.assertEquals(repo.eid2extid(repo.system_source, 1, session), None) + self.assertEqual(repo.type_from_eid(1, session), 'CWGroup') + self.assertEqual(repo.source_from_eid(1, session).uri, 'system') + self.assertEqual(repo.eid2extid(repo.system_source, 1, session), None) class dummysource: uri = 'toto' self.assertRaises(UnknownEid, repo.eid2extid, dummysource, 1, session) def test_public_api(self): - self.assertEquals(self.repo.get_schema(), self.repo.schema) - self.assertEquals(self.repo.source_defs(), {'system': {'adapter': 'native', 'uri': 'system'}}) + self.assertEqual(self.repo.get_schema(), self.repo.schema) + self.assertEqual(self.repo.source_defs(), {'system': {'adapter': 'native', 'uri': 'system'}}) # .properties() return a result set - self.assertEquals(self.repo.properties().rql, 'Any K,V WHERE P is CWProperty,P pkey K, P value V, NOT P for_user U') + self.assertEqual(self.repo.properties().rql, 'Any K,V WHERE P is CWProperty,P pkey K, P value V, NOT P for_user U') def test_session_api(self): repo = self.repo cnxid = repo.connect(self.admlogin, password=self.admpassword) - self.assertEquals(repo.user_info(cnxid), (5, 'admin', set([u'managers']), {})) - self.assertEquals(repo.describe(cnxid, 1), (u'CWGroup', u'system', None)) + self.assertEqual(repo.user_info(cnxid), (5, 'admin', set([u'managers']), {})) + self.assertEqual(repo.describe(cnxid, 1), (u'CWGroup', u'system', None)) repo.close(cnxid) self.assertRaises(BadConnectionId, repo.user_info, cnxid) self.assertRaises(BadConnectionId, repo.describe, cnxid, 1) @@ -345,12 +395,12 @@ def test_shared_data_api(self): repo = self.repo cnxid = repo.connect(self.admlogin, password=self.admpassword) - self.assertEquals(repo.get_shared_data(cnxid, 'data'), None) + self.assertEqual(repo.get_shared_data(cnxid, 'data'), None) repo.set_shared_data(cnxid, 'data', 4) - self.assertEquals(repo.get_shared_data(cnxid, 'data'), 4) + self.assertEqual(repo.get_shared_data(cnxid, 'data'), 4) repo.get_shared_data(cnxid, 'data', pop=True) repo.get_shared_data(cnxid, 'whatever', pop=True) - self.assertEquals(repo.get_shared_data(cnxid, 'data'), None) + self.assertEqual(repo.get_shared_data(cnxid, 'data'), None) repo.close(cnxid) self.assertRaises(BadConnectionId, repo.set_shared_data, cnxid, 'data', 0) self.assertRaises(BadConnectionId, repo.get_shared_data, cnxid, 'data') @@ -376,21 +426,28 @@ {'x': note.eid, 'p': p1.eid}) rset = self.execute('Any P WHERE A todo_by P, A eid %(x)s', {'x': note.eid}) - self.assertEquals(len(rset), 1) + self.assertEqual(len(rset), 1) p2 = self.request().create_entity('Personne', nom=u'tutu') self.execute('SET A todo_by P WHERE A eid %(x)s, P eid %(p)s', {'x': note.eid, 'p': p2.eid}) rset = self.execute('Any P WHERE A todo_by P, A eid %(x)s', {'x': note.eid}) - self.assertEquals(len(rset), 1) - self.assertEquals(rset.rows[0][0], p2.eid) + self.assertEqual(len(rset), 1) + self.assertEqual(rset.rows[0][0], p2.eid) + def test_delete_if_object_inlined_singlecard(self): + req = self.request() + c = req.create_entity('Card', title=u'Carte') + req.create_entity('Personne', nom=u'Vincent', fiche=c) + req.create_entity('Personne', nom=u'Florent', fiche=c) + self.commit() + self.assertEqual(len(c.reverse_fiche), 1) def test_set_attributes_in_before_update(self): # local hook class DummyBeforeHook(Hook): __regid__ = 'dummy-before-hook' - __select__ = Hook.__select__ & implements('EmailAddress') + __select__ = Hook.__select__ & is_instance('EmailAddress') events = ('before_update_entity',) def __call__(self): # safety belt: avoid potential infinite recursion if the test @@ -405,13 +462,13 @@ addr.set_attributes(address=u'a@b.com') rset = self.execute('Any A,AA WHERE X eid %(x)s, X address A, X alias AA', {'x': addr.eid}) - self.assertEquals(rset.rows, [[u'a@b.com', u'foo']]) + self.assertEqual(rset.rows, [[u'a@b.com', u'foo']]) def test_set_attributes_in_before_add(self): # local hook class DummyBeforeHook(Hook): __regid__ = 'dummy-before-hook' - __select__ = Hook.__select__ & implements('EmailAddress') + __select__ = Hook.__select__ & is_instance('EmailAddress') events = ('before_add_entity',) def __call__(self): # set_attributes is forbidden within before_add_entity() @@ -430,7 +487,7 @@ class DummyBeforeHook(Hook): _test = self # keep reference to test instance __regid__ = 'dummy-before-hook' - __select__ = Hook.__select__ & implements('Affaire') + __select__ = Hook.__select__ & is_instance('Affaire') events = ('before_update_entity',) def __call__(self): # invoiced attribute shouldn't be considered "edited" before the hook @@ -452,7 +509,7 @@ def test_source_from_eid(self): self.session.set_pool() - self.assertEquals(self.repo.source_from_eid(1, self.session), + self.assertEqual(self.repo.source_from_eid(1, self.session), self.repo.sources_by_uri['system']) def test_source_from_eid_raise(self): @@ -461,7 +518,7 @@ def test_type_from_eid(self): self.session.set_pool() - self.assertEquals(self.repo.type_from_eid(1, self.session), 'CWGroup') + self.assertEqual(self.repo.type_from_eid(1, self.session), 'CWGroup') def test_type_from_eid_raise(self): self.session.set_pool() @@ -478,12 +535,12 @@ self.assertIsInstance(data[0][3], datetime) data[0] = list(data[0]) data[0][3] = None - self.assertEquals(tuplify(data), [(-1, 'Personne', 'system', None, None)]) + self.assertEqual(tuplify(data), [(-1, 'Personne', 'system', None, None)]) self.repo.delete_info(self.session, entity, 'system', None) #self.repo.commit() cu = self.session.system_sql('SELECT * FROM entities WHERE eid = -1') data = cu.fetchall() - self.assertEquals(data, []) + self.assertEqual(data, []) class FTITC(CubicWebTC): @@ -493,7 +550,7 @@ eidp = self.execute('INSERT Personne X: X nom "toto", X prenom "tutu"')[0][0] self.commit() ts = datetime.now() - self.assertEquals(len(self.execute('Personne X WHERE X has_text "tutu"')), 1) + self.assertEqual(len(self.execute('Personne X WHERE X has_text "tutu"')), 1) self.session.set_pool() cu = self.session.system_sql('SELECT mtime, eid FROM entities WHERE eid = %s' % eidp) omtime = cu.fetchone()[0] @@ -502,23 +559,23 @@ time.sleep(1 - (ts.second - int(ts.second))) self.execute('SET X nom "tata" WHERE X eid %(x)s', {'x': eidp}) self.commit() - self.assertEquals(len(self.execute('Personne X WHERE X has_text "tutu"')), 1) + self.assertEqual(len(self.execute('Personne X WHERE X has_text "tutu"')), 1) self.session.set_pool() cu = self.session.system_sql('SELECT mtime FROM entities WHERE eid = %s' % eidp) mtime = cu.fetchone()[0] self.failUnless(omtime < mtime) self.commit() date, modified, deleted = self.repo.entities_modified_since(('Personne',), omtime) - self.assertEquals(modified, [('Personne', eidp)]) - self.assertEquals(deleted, []) + self.assertEqual(modified, [('Personne', eidp)]) + self.assertEqual(deleted, []) date, modified, deleted = self.repo.entities_modified_since(('Personne',), mtime) - self.assertEquals(modified, []) - self.assertEquals(deleted, []) + self.assertEqual(modified, []) + self.assertEqual(deleted, []) self.execute('DELETE Personne X WHERE X eid %(x)s', {'x': eidp}) self.commit() date, modified, deleted = self.repo.entities_modified_since(('Personne',), omtime) - self.assertEquals(modified, []) - self.assertEquals(deleted, [('Personne', eidp)]) + self.assertEqual(modified, []) + self.assertEqual(deleted, [('Personne', eidp)]) def test_fulltext_container_entity(self): assert self.schema.rschema('use_email').fulltext_container == 'subject' @@ -526,27 +583,27 @@ toto = req.create_entity('EmailAddress', address=u'toto@logilab.fr') self.commit() rset = req.execute('Any X WHERE X has_text %(t)s', {'t': 'toto'}) - self.assertEquals(rset.rows, []) + self.assertEqual(rset.rows, []) req.user.set_relations(use_email=toto) self.commit() rset = req.execute('Any X WHERE X has_text %(t)s', {'t': 'toto'}) - self.assertEquals(rset.rows, [[req.user.eid]]) + self.assertEqual(rset.rows, [[req.user.eid]]) req.execute('DELETE X use_email Y WHERE X login "admin", Y eid %(y)s', {'y': toto.eid}) self.commit() rset = req.execute('Any X WHERE X has_text %(t)s', {'t': 'toto'}) - self.assertEquals(rset.rows, []) + self.assertEqual(rset.rows, []) tutu = req.create_entity('EmailAddress', address=u'tutu@logilab.fr') req.user.set_relations(use_email=tutu) self.commit() rset = req.execute('Any X WHERE X has_text %(t)s', {'t': 'tutu'}) - self.assertEquals(rset.rows, [[req.user.eid]]) + self.assertEqual(rset.rows, [[req.user.eid]]) tutu.set_attributes(address=u'hip@logilab.fr') self.commit() rset = req.execute('Any X WHERE X has_text %(t)s', {'t': 'tutu'}) - self.assertEquals(rset.rows, []) + self.assertEqual(rset.rows, []) rset = req.execute('Any X WHERE X has_text %(t)s', {'t': 'hip'}) - self.assertEquals(rset.rows, [[req.user.eid]]) + self.assertEqual(rset.rows, [[req.user.eid]]) def test_no_uncessary_ftiindex_op(self): req = self.request() @@ -559,7 +616,7 @@ def test_versions_inserted(self): inserted = [r[0] for r in self.execute('Any K ORDERBY K WHERE P pkey K, P pkey ~= "system.version.%"')] - self.assertEquals(inserted, + self.assertEqual(inserted, [u'system.version.basket', u'system.version.card', u'system.version.comment', u'system.version.cubicweb', u'system.version.email', u'system.version.file', u'system.version.folder', @@ -591,17 +648,29 @@ eidp = self.execute('INSERT Personne X: X nom "toto"')[0][0] eidn = self.execute('INSERT Note X: X type "T"')[0][0] self.execute('SET N ecrit_par Y WHERE N type "T", Y nom "toto"') - self.assertEquals(CALLED, [('before_add_relation', eidn, 'ecrit_par', eidp), + self.assertEqual(CALLED, [('before_add_relation', eidn, 'ecrit_par', eidp), ('after_add_relation', eidn, 'ecrit_par', eidp)]) CALLED[:] = () self.execute('DELETE N ecrit_par Y WHERE N type "T", Y nom "toto"') - self.assertEquals(CALLED, [('before_delete_relation', eidn, 'ecrit_par', eidp), + self.assertEqual(CALLED, [('before_delete_relation', eidn, 'ecrit_par', eidp), ('after_delete_relation', eidn, 'ecrit_par', eidp)]) CALLED[:] = () eidn = self.execute('INSERT Note N: N ecrit_par P WHERE P nom "toto"')[0][0] - self.assertEquals(CALLED, [('before_add_relation', eidn, 'ecrit_par', eidp), + self.assertEqual(CALLED, [('before_add_relation', eidn, 'ecrit_par', eidp), ('after_add_relation', eidn, 'ecrit_par', eidp)]) + def test_unique_contraint(self): + req = self.request() + toto = req.create_entity('Personne', nom=u'toto') + a01 = req.create_entity('Affaire', ref=u'A01', todo_by=toto) + req.cnx.commit() + req = self.request() + req.create_entity('Note', type=u'todo', inline1=a01) + req.cnx.commit() + req = self.request() + req.create_entity('Note', type=u'todo', inline1=a01) + ex = self.assertRaises(ValidationError, req.cnx.commit) + self.assertEqual(ex.errors, {'inline1-subject': u'RQLUniqueConstraint S type T, S inline1 A1, A1 todo_by C, Y type T, Y inline1 A2, A2 todo_by C failed'}) if __name__ == '__main__': unittest_main() diff -r f4d1d5d9ccbb -r 90f2f20367bc server/test/unittest_rql2sql.py --- a/server/test/unittest_rql2sql.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/test/unittest_rql2sql.py Wed Nov 03 16:38:28 2010 +0100 @@ -22,11 +22,13 @@ from logilab.common.testlib import TestCase, unittest_main, mock_object from rql import BadRQLQuery +from rql.utils import register_function, FunctionDescr -#from cubicweb.server.sources.native import remove_unused_solutions -from cubicweb.server.sources.rql2sql import SQLGenerator, remove_unused_solutions +from cubicweb.devtools import TestServerConfiguration +from cubicweb.devtools.repotest import RQLGeneratorTC +from cubicweb.server.sources.rql2sql import remove_unused_solutions -from rql.utils import register_function, FunctionDescr + # add a dumb registered procedure class stockproc(FunctionDescr): supported_backends = ('postgres', 'sqlite', 'mysql') @@ -35,8 +37,6 @@ except AssertionError, ex: pass # already registered -from cubicweb.devtools import TestServerConfiguration -from cubicweb.devtools.repotest import RQLGeneratorTC config = TestServerConfiguration('data') config.bootstrap_cubes() @@ -271,7 +271,7 @@ ('Any O WHERE NOT S ecrit_par O, S eid 1, S inline1 P, O inline2 P', '''SELECT _O.cw_eid FROM cw_Note AS _S, cw_Personne AS _O -WHERE NOT (_S.cw_ecrit_par=_O.cw_eid) AND _S.cw_eid=1 AND _S.cw_inline1 IS NOT NULL AND _O.cw_inline2=_S.cw_inline1'''), +WHERE (_S.cw_ecrit_par IS NULL OR _S.cw_ecrit_par!=_O.cw_eid) AND _S.cw_eid=1 AND _S.cw_inline1 IS NOT NULL AND _O.cw_inline2=_S.cw_inline1'''), ('DISTINCT Any S ORDERBY stockproc(SI) WHERE NOT S ecrit_par O, S para SI', '''SELECT T1.C0 FROM (SELECT DISTINCT _S.cw_eid AS C0, STOCKPROC(_S.cw_para) AS C1 @@ -424,26 +424,15 @@ GROUP BY T1.C1'''), ('Any MAX(X)+MIN(LENGTH(D)), N GROUPBY N ORDERBY 1, N, DF WHERE X data_name N, X data D, X data_format DF;', - '''SELECT (MAX(T1.C1) + MIN(LENGTH(T1.C0))), T1.C2 FROM (SELECT _X.cw_data AS C0, _X.cw_eid AS C1, _X.cw_data_name AS C2, _X.cw_data_format AS C3 + '''SELECT (MAX(_X.cw_eid) + MIN(LENGTH(_X.cw_data))), _X.cw_data_name FROM cw_File AS _X -UNION ALL -SELECT _X.cw_data AS C0, _X.cw_eid AS C1, _X.cw_data_name AS C2, _X.cw_data_format AS C3 -FROM cw_Image AS _X) AS T1 -GROUP BY T1.C2,T1.C3 -ORDER BY 1,2,T1.C3'''), - - ('DISTINCT Any S ORDERBY R WHERE A is Affaire, A sujet S, A ref R', - '''SELECT T1.C0 FROM (SELECT DISTINCT _A.cw_sujet AS C0, _A.cw_ref AS C1 -FROM cw_Affaire AS _A -ORDER BY 2) AS T1'''), +GROUP BY _X.cw_data_name,_X.cw_data_format +ORDER BY 1,2,_X.cw_data_format'''), ('DISTINCT Any MAX(X)+MIN(LENGTH(D)), N GROUPBY N ORDERBY 2, DF WHERE X data_name N, X data D, X data_format DF;', - '''SELECT T1.C0,T1.C1 FROM (SELECT DISTINCT (MAX(T1.C1) + MIN(LENGTH(T1.C0))) AS C0, T1.C2 AS C1, T1.C3 AS C2 FROM (SELECT DISTINCT _X.cw_data AS C0, _X.cw_eid AS C1, _X.cw_data_name AS C2, _X.cw_data_format AS C3 + '''SELECT T1.C0,T1.C1 FROM (SELECT DISTINCT (MAX(_X.cw_eid) + MIN(LENGTH(_X.cw_data))) AS C0, _X.cw_data_name AS C1, _X.cw_data_format AS C2 FROM cw_File AS _X -UNION -SELECT DISTINCT _X.cw_data AS C0, _X.cw_eid AS C1, _X.cw_data_name AS C2, _X.cw_data_format AS C3 -FROM cw_Image AS _X) AS T1 -GROUP BY T1.C2,T1.C3 +GROUP BY _X.cw_data_name,_X.cw_data_format ORDER BY 2,3) AS T1 '''), @@ -578,6 +567,11 @@ GROUP BY T1.C0,T1.C2 ORDER BY T1.C2'''), + ('Any 1 WHERE X in_group G, X is CWUser', + '''SELECT 1 +FROM in_group_relation AS rel_in_group0'''), + + ] @@ -838,7 +832,7 @@ ('Any O,AD WHERE NOT S inline1 O, S eid 123, O todo_by AD?', '''SELECT _O.cw_eid, rel_todo_by0.eid_to FROM cw_Affaire AS _O LEFT OUTER JOIN todo_by_relation AS rel_todo_by0 ON (rel_todo_by0.eid_from=_O.cw_eid), cw_Note AS _S -WHERE NOT (_S.cw_inline1=_O.cw_eid) AND _S.cw_eid=123''') +WHERE (_S.cw_inline1 IS NULL OR _S.cw_inline1!=_O.cw_eid) AND _S.cw_eid=123''') ] VIRTUAL_VARS = [ @@ -988,7 +982,7 @@ ('Any N WHERE NOT N ecrit_par P, P nom "toto"', '''SELECT _N.cw_eid FROM cw_Note AS _N, cw_Personne AS _P -WHERE NOT (_N.cw_ecrit_par=_P.cw_eid) AND _P.cw_nom=toto'''), +WHERE (_N.cw_ecrit_par IS NULL OR _N.cw_ecrit_par!=_P.cw_eid) AND _P.cw_nom=toto'''), ('Any P WHERE NOT N ecrit_par P, P nom "toto"', '''SELECT _P.cw_eid @@ -1008,7 +1002,7 @@ ('Any P WHERE NOT N ecrit_par P, P is Personne, N eid 512', '''SELECT _P.cw_eid FROM cw_Note AS _N, cw_Personne AS _P -WHERE NOT (_N.cw_ecrit_par=_P.cw_eid) AND _N.cw_eid=512'''), +WHERE (_N.cw_ecrit_par IS NULL OR _N.cw_ecrit_par!=_P.cw_eid) AND _N.cw_eid=512'''), ('Any S,ES,T WHERE S state_of ET, ET name "CWUser", ES allowed_transition T, T destination_state S', # XXX "_T.cw_destination_state IS NOT NULL" could be avoided here but it's not worth it @@ -1036,11 +1030,12 @@ ('DISTINCT Any X WHERE X from_entity OET, NOT X from_entity NET, OET name "Image", NET eid 1', '''SELECT DISTINCT _X.cw_eid FROM cw_CWAttribute AS _X, cw_CWEType AS _OET -WHERE _X.cw_from_entity=_OET.cw_eid AND NOT (_X.cw_from_entity=1) AND _OET.cw_name=Image +WHERE _X.cw_from_entity=_OET.cw_eid AND (_X.cw_from_entity IS NULL OR _X.cw_from_entity!=1) AND _OET.cw_name=Image UNION SELECT DISTINCT _X.cw_eid FROM cw_CWEType AS _OET, cw_CWRelation AS _X -WHERE _X.cw_from_entity=_OET.cw_eid AND NOT (_X.cw_from_entity=1) AND _OET.cw_name=Image'''), +WHERE _X.cw_from_entity=_OET.cw_eid AND (_X.cw_from_entity IS NULL OR _X.cw_from_entity!=1) AND _OET.cw_name=Image'''), + ] INTERSECT = [ @@ -1082,11 +1077,9 @@ WHERE rel_is0.eid_to=2'''), ] -from logilab.database import get_db_helper - class CWRQLTC(RQLGeneratorTC): schema = schema - + backend = 'sqlite' def test_nonregr_sol(self): delete = self.rqlhelper.parse( 'DELETE X read_permission READ_PERMISSIONSUBJECT,X add_permission ADD_PERMISSIONSUBJECT,' @@ -1103,21 +1096,19 @@ for sol in delete.solutions: s.add(sol.get(var)) return s - self.assertEquals(var_sols('FROM_ENTITYOBJECT'), set(('CWAttribute', 'CWRelation'))) - self.assertEquals(var_sols('FROM_ENTITYOBJECT'), delete.defined_vars['FROM_ENTITYOBJECT'].stinfo['possibletypes']) - self.assertEquals(var_sols('ISOBJECT'), + self.assertEqual(var_sols('FROM_ENTITYOBJECT'), set(('CWAttribute', 'CWRelation'))) + self.assertEqual(var_sols('FROM_ENTITYOBJECT'), delete.defined_vars['FROM_ENTITYOBJECT'].stinfo['possibletypes']) + self.assertEqual(var_sols('ISOBJECT'), set(x.type for x in self.schema.entities() if not x.final)) - self.assertEquals(var_sols('ISOBJECT'), delete.defined_vars['ISOBJECT'].stinfo['possibletypes']) + self.assertEqual(var_sols('ISOBJECT'), delete.defined_vars['ISOBJECT'].stinfo['possibletypes']) + +def strip(text): + return '\n'.join(l.strip() for l in text.strip().splitlines()) class PostgresSQLGeneratorTC(RQLGeneratorTC): schema = schema - - #capture = True - def setUp(self): - RQLGeneratorTC.setUp(self) - dbhelper = get_db_helper('postgres') - self.o = SQLGenerator(schema, dbhelper) + backend = 'postgres' def _norm_sql(self, sql): return sql.strip() @@ -1130,7 +1121,7 @@ r, nargs, cbs = self.o.generate(union, args, varmap=varmap) args.update(nargs) - self.assertLinesEquals((r % args).strip(), self._norm_sql(sql), striplines=True) + self.assertMultiLineEqual(strip(r % args), self._norm_sql(sql)) except Exception, ex: if 'r' in locals(): try: @@ -1212,7 +1203,7 @@ def test_is_null_transform(self): union = self._prepare('Any X WHERE X login %(login)s') r, args, cbs = self.o.generate(union, {'login': None}) - self.assertLinesEquals((r % args).strip(), + self.assertMultiLineEqual((r % args).strip(), '''SELECT _X.cw_eid FROM cw_CWUser AS _X WHERE _X.cw_login IS NULL''') @@ -1377,13 +1368,53 @@ UNION ALL SELECT _X.cw_eid FROM appears AS appears0, cw_Folder AS _X -WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu -"""), +WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu"""), ('Personne X where X has_text %(text)s, X travaille S, S has_text %(text)s', """SELECT _X.eid FROM appears AS appears0, appears AS appears2, entities AS _X, travaille_relation AS rel_travaille1 WHERE appears0.words @@ to_tsquery('default', 'hip&hop&momo') AND appears0.uid=_X.eid AND _X.type='Personne' AND _X.eid=rel_travaille1.eid_from AND appears2.uid=rel_travaille1.eid_to AND appears2.words @@ to_tsquery('default', 'hip&hop&momo')"""), + + ('Any X ORDERBY FTIRANK(X) DESC WHERE X has_text "toto tata"', + """SELECT appears0.uid +FROM appears AS appears0 +WHERE appears0.words @@ to_tsquery('default', 'toto&tata') +ORDER BY ts_rank(appears0.words, to_tsquery('default', 'toto&tata'))*appears0.weight DESC"""), + + ('Personne X ORDERBY FTIRANK(X) WHERE X has_text "toto tata"', + """SELECT _X.eid +FROM appears AS appears0, entities AS _X +WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=_X.eid AND _X.type='Personne' +ORDER BY ts_rank(appears0.words, to_tsquery('default', 'toto&tata'))*appears0.weight"""), + + ('Personne X ORDERBY FTIRANK(X) WHERE X has_text %(text)s', + """SELECT _X.eid +FROM appears AS appears0, entities AS _X +WHERE appears0.words @@ to_tsquery('default', 'hip&hop&momo') AND appears0.uid=_X.eid AND _X.type='Personne' +ORDER BY ts_rank(appears0.words, to_tsquery('default', 'hip&hop&momo'))*appears0.weight"""), + + ('Any X ORDERBY FTIRANK(X) WHERE X has_text "toto tata", X name "tutu", X is IN (Basket,Folder)', + """SELECT T1.C0 FROM (SELECT _X.cw_eid AS C0, ts_rank(appears0.words, to_tsquery('default', 'toto&tata'))*appears0.weight AS C1 +FROM appears AS appears0, cw_Basket AS _X +WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu +UNION ALL +SELECT _X.cw_eid AS C0, ts_rank(appears0.words, to_tsquery('default', 'toto&tata'))*appears0.weight AS C1 +FROM appears AS appears0, cw_Folder AS _X +WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu +ORDER BY 2) AS T1"""), + + ('Personne X ORDERBY FTIRANK(X),FTIRANK(S) WHERE X has_text %(text)s, X travaille S, S has_text %(text)s', + """SELECT _X.eid +FROM appears AS appears0, appears AS appears2, entities AS _X, travaille_relation AS rel_travaille1 +WHERE appears0.words @@ to_tsquery('default', 'hip&hop&momo') AND appears0.uid=_X.eid AND _X.type='Personne' AND _X.eid=rel_travaille1.eid_from AND appears2.uid=rel_travaille1.eid_to AND appears2.words @@ to_tsquery('default', 'hip&hop&momo') +ORDER BY ts_rank(appears0.words, to_tsquery('default', 'hip&hop&momo'))*appears0.weight,ts_rank(appears2.words, to_tsquery('default', 'hip&hop&momo'))*appears2.weight"""), + + + ('Any X, FTIRANK(X) WHERE X has_text "toto tata"', + """SELECT appears0.uid, ts_rank(appears0.words, to_tsquery('default', 'toto&tata'))*appears0.weight +FROM appears AS appears0 +WHERE appears0.words @@ to_tsquery('default', 'toto&tata')"""), + )): yield t @@ -1426,8 +1457,8 @@ try: union = self._prepare('Any R WHERE X ref R') r, nargs, cbs = self.o.generate(union, args={}) - self.assertLinesEquals(r.strip(), 'SELECT _X.cw_ref\nFROM cw_Affaire AS _X') - self.assertEquals(cbs, {0: [cb]}) + self.assertMultiLineEqual(r.strip(), 'SELECT _X.cw_ref\nFROM cw_Affaire AS _X') + self.assertEqual(cbs, {0: [cb]}) finally: self.o.attr_map.clear() @@ -1443,13 +1474,18 @@ FROM cw_CWUser AS _X WHERE ((CAST(EXTRACT(YEAR from _X.cw_creation_date) AS INTEGER)=2010) OR (_X.cw_creation_date IS NULL))''') + def test_not_no_where(self): + # XXX will check if some in_group relation exists, that's it. + # We can't actually know if we want to check if there are some + # X without in_group relation, or some G without it. + self._check('Any 1 WHERE NOT X in_group G, X is CWUser', + '''SELECT 1 +WHERE NOT (EXISTS(SELECT 1 FROM in_group_relation AS rel_in_group0))''') + + class SqliteSQLGeneratorTC(PostgresSQLGeneratorTC): - - def setUp(self): - RQLGeneratorTC.setUp(self) - dbhelper = get_db_helper('sqlite') - self.o = SQLGenerator(schema, dbhelper) + backend = 'sqlite' def _norm_sql(self, sql): return sql.strip().replace(' ILIKE ', ' LIKE ') @@ -1547,6 +1583,26 @@ FROM appears AS appears0, cw_Folder AS _X WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu """), + + ('Any X ORDERBY FTIRANK(X) WHERE X has_text "toto tata"', + """SELECT DISTINCT appears0.uid +FROM appears AS appears0 +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata'))"""), + + ('Any X ORDERBY FTIRANK(X) WHERE X has_text "toto tata", X name "tutu", X is IN (Basket,Folder)', + """SELECT DISTINCT _X.cw_eid +FROM appears AS appears0, cw_Basket AS _X +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu +UNION +SELECT DISTINCT _X.cw_eid +FROM appears AS appears0, cw_Folder AS _X +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu +"""), + + ('Any X, FTIRANK(X) WHERE X has_text "toto tata"', + """SELECT DISTINCT appears0.uid, 1.0 +FROM appears AS appears0 +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata'))"""), )): yield t @@ -1560,11 +1616,7 @@ class MySQLGenerator(PostgresSQLGeneratorTC): - - def setUp(self): - RQLGeneratorTC.setUp(self) - dbhelper = get_db_helper('mysql') - self.o = SQLGenerator(schema, dbhelper) + backend = 'mysql' def _norm_sql(self, sql): sql = sql.strip().replace(' ILIKE ', ' LIKE ').replace('TRUE', '1').replace('FALSE', '0') @@ -1651,12 +1703,19 @@ WHERE ((EXTRACT(YEAR from _X.cw_creation_date)=2010) OR (_X.cw_creation_date IS NULL))''') + def test_not_no_where(self): + self._check('Any 1 WHERE NOT X in_group G, X is CWUser', + '''SELECT 1 +FROM (SELECT 1) AS _T +WHERE NOT (EXISTS(SELECT 1 FROM in_group_relation AS rel_in_group0))''') + + class removeUnsusedSolutionsTC(TestCase): def test_invariant_not_varying(self): rqlst = mock_object(defined_vars={}) rqlst.defined_vars['A'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=True) rqlst.defined_vars['B'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=False) - self.assertEquals(remove_unused_solutions(rqlst, [{'A': 'RugbyGroup', 'B': 'RugbyTeam'}, + self.assertEqual(remove_unused_solutions(rqlst, [{'A': 'RugbyGroup', 'B': 'RugbyTeam'}, {'A': 'FootGroup', 'B': 'FootTeam'}], {}, None), ([{'A': 'RugbyGroup', 'B': 'RugbyTeam'}, {'A': 'FootGroup', 'B': 'FootTeam'}], @@ -1667,10 +1726,11 @@ rqlst = mock_object(defined_vars={}) rqlst.defined_vars['A'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=True) rqlst.defined_vars['B'] = mock_object(scope=rqlst, stinfo={}, _q_invariant=False) - self.assertEquals(remove_unused_solutions(rqlst, [{'A': 'RugbyGroup', 'B': 'RugbyTeam'}, + self.assertEqual(remove_unused_solutions(rqlst, [{'A': 'RugbyGroup', 'B': 'RugbyTeam'}, {'A': 'FootGroup', 'B': 'RugbyTeam'}], {}, None), ([{'A': 'RugbyGroup', 'B': 'RugbyTeam'}], {}, set()) ) + if __name__ == '__main__': unittest_main() diff -r f4d1d5d9ccbb -r 90f2f20367bc server/test/unittest_rqlannotation.py --- a/server/test/unittest_rqlannotation.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/test/unittest_rqlannotation.py Wed Nov 03 16:38:28 2010 +0100 @@ -41,256 +41,256 @@ def test_0_1(self): rqlst = self._prepare('Any SEN,RN,OEN WHERE X from_entity SE, SE eid 44, X relation_type R, R eid 139, X to_entity OE, OE eid 42, R name RN, SE name SEN, OE name OEN') - self.assertEquals(rqlst.defined_vars['SE']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['OE']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['R']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['SE'].stinfo['attrvar'], None) - self.assertEquals(rqlst.defined_vars['OE'].stinfo['attrvar'], None) - self.assertEquals(rqlst.defined_vars['R'].stinfo['attrvar'], None) + self.assertEqual(rqlst.defined_vars['SE']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['OE']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['R']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['SE'].stinfo['attrvar'], None) + self.assertEqual(rqlst.defined_vars['OE'].stinfo['attrvar'], None) + self.assertEqual(rqlst.defined_vars['R'].stinfo['attrvar'], None) def test_0_2(self): rqlst = self._prepare('Any O WHERE NOT S ecrit_par O, S eid 1, S inline1 P, O inline2 P') - self.assertEquals(rqlst.defined_vars['P']._q_invariant, True) - self.assertEquals(rqlst.defined_vars['O'].stinfo['attrvar'], None) + self.assertEqual(rqlst.defined_vars['P']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['O'].stinfo['attrvar'], None) def test_0_4(self): rqlst = self._prepare('Any A,B,C WHERE A eid 12,A comment B, A ?wf_info_for C') - self.assertEquals(rqlst.defined_vars['A']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['A']._q_invariant, False) self.assert_(rqlst.defined_vars['B'].stinfo['attrvar']) - self.assertEquals(rqlst.defined_vars['C']._q_invariant, False) - self.assertEquals(rqlst.solutions, [{'A': 'TrInfo', 'B': 'String', 'C': 'Affaire'}, + self.assertEqual(rqlst.defined_vars['C']._q_invariant, False) + self.assertEqual(rqlst.solutions, [{'A': 'TrInfo', 'B': 'String', 'C': 'Affaire'}, {'A': 'TrInfo', 'B': 'String', 'C': 'CWUser'}, {'A': 'TrInfo', 'B': 'String', 'C': 'Note'}]) def test_0_5(self): rqlst = self._prepare('Any P WHERE N ecrit_par P, N eid 0') - self.assertEquals(rqlst.defined_vars['N']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['P']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['N']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['P']._q_invariant, True) def test_0_6(self): rqlst = self._prepare('Any P WHERE NOT N ecrit_par P, N eid 512') - self.assertEquals(rqlst.defined_vars['P']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['P']._q_invariant, False) def test_0_7(self): rqlst = self._prepare('Personne X,Y where X nom NX, Y nom NX, X eid XE, not Y eid XE') - self.assertEquals(rqlst.defined_vars['X']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['Y']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) self.assert_(rqlst.defined_vars['XE'].stinfo['attrvar']) def test_0_8(self): rqlst = self._prepare('Any P WHERE X eid 0, NOT X connait P') - self.assertEquals(rqlst.defined_vars['P']._q_invariant, False) - #self.assertEquals(rqlst.defined_vars['X']._q_invariant, True) - self.assertEquals(len(rqlst.solutions), 1, rqlst.solutions) + self.assertEqual(rqlst.defined_vars['P']._q_invariant, False) + #self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + self.assertEqual(len(rqlst.solutions), 1, rqlst.solutions) def test_0_10(self): rqlst = self._prepare('Any X WHERE X concerne Y, Y is Note') - self.assertEquals(rqlst.defined_vars['X']._q_invariant, True) - self.assertEquals(rqlst.defined_vars['Y']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) def test_0_11(self): rqlst = self._prepare('Any X WHERE X todo_by Y, X is Affaire') - self.assertEquals(rqlst.defined_vars['X']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['Y']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) def test_0_12(self): rqlst = self._prepare('Personne P WHERE P concerne A, A concerne S, S nom "Logilab"') - self.assertEquals(rqlst.defined_vars['P']._q_invariant, True) - self.assertEquals(rqlst.defined_vars['A']._q_invariant, True) - self.assertEquals(rqlst.defined_vars['S']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['P']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['A']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['S']._q_invariant, False) def test_1_0(self): rqlst = self._prepare('Any X,Y WHERE X created_by Y, X eid 5, NOT Y eid 6') - self.assertEquals(rqlst.defined_vars['Y']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) def test_1_1(self): rqlst = self._prepare('Any X,Y WHERE X created_by Y, X eid 5, NOT Y eid IN (6,7)') - self.assertEquals(rqlst.defined_vars['Y']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) def test_2(self): rqlst = self._prepare('Any X WHERE X identity Y, Y eid 1') - self.assertEquals(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) def test_7(self): rqlst = self._prepare('Personne X,Y where X nom NX, Y nom NX, X eid XE, not Y eid XE') - self.assertEquals(rqlst.defined_vars['X']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['Y']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) def test_8(self): # DISTINCT Any P WHERE P require_group %(g)s, NOT %(u)s has_group_permission P, P is CWPermission rqlst = self._prepare('DISTINCT Any X WHERE A concerne X, NOT N migrated_from X, ' 'X is Note, N eid 1') - self.assertEquals(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) def test_diff_scope_identity_deamb(self): rqlst = self._prepare('Any X WHERE X concerne Y, Y is Note, EXISTS(Y identity Z, Z migrated_from N)') - self.assertEquals(rqlst.defined_vars['Z']._q_invariant, True) - self.assertEquals(rqlst.defined_vars['Y']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['Z']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) def test_optional_inlined(self): rqlst = self._prepare('Any X,S where X from_state S?') - self.assertEquals(rqlst.defined_vars['X']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['S']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['S']._q_invariant, True) def test_optional_inlined_2(self): rqlst = self._prepare('Any N,A WHERE N? inline1 A') - self.assertEquals(rqlst.defined_vars['N']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['A']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['N']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['A']._q_invariant, False) def test_optional_1(self): rqlst = self._prepare('Any X,S WHERE X travaille S?') - self.assertEquals(rqlst.defined_vars['X']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['S']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['S']._q_invariant, True) def test_greater_eid(self): rqlst = self._prepare('Any X WHERE X eid > 5') - self.assertEquals(rqlst.defined_vars['X']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) def test_greater_eid_typed(self): rqlst = self._prepare('Any X WHERE X eid > 5, X is Note') - self.assertEquals(rqlst.defined_vars['X']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) def test_max_eid(self): rqlst = self._prepare('Any MAX(X)') - self.assertEquals(rqlst.defined_vars['X']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) def test_max_eid_typed(self): rqlst = self._prepare('Any MAX(X) WHERE X is Note') - self.assertEquals(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) def test_all_entities(self): rqlst = self._prepare('Any X') - self.assertEquals(rqlst.defined_vars['X']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) def test_all_typed_entity(self): rqlst = self._prepare('Any X WHERE X is Note') - self.assertEquals(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) def test_has_text_1(self): rqlst = self._prepare('Any X WHERE X has_text "toto tata"') - self.assertEquals(rqlst.defined_vars['X']._q_invariant, True) - self.assertEquals(rqlst.defined_vars['X'].stinfo['principal'].r_type, 'has_text') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['X'].stinfo['principal'].r_type, 'has_text') def test_has_text_2(self): rqlst = self._prepare('Any X WHERE X is Personne, X has_text "coucou"') - self.assertEquals(rqlst.defined_vars['X']._q_invariant, True) - self.assertEquals(rqlst.defined_vars['X'].stinfo['principal'].r_type, 'has_text') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['X'].stinfo['principal'].r_type, 'has_text') def test_not_relation_1(self): # P can't be invariant since deambiguification caused by "NOT X require_permission P" # is not considered by generated sql (NOT EXISTS(...)) rqlst = self._prepare('Any P,G WHERE P require_group G, NOT X require_permission P') - self.assertEquals(rqlst.defined_vars['P']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['G']._q_invariant, True) - self.assertEquals(rqlst.defined_vars['X']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['P']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['G']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) def test_not_relation_2(self): rqlst = self._prepare('TrInfo X WHERE X eid 2, NOT X from_state Y, Y is State') - self.assertEquals(rqlst.defined_vars['Y']._q_invariant, True) - self.assertEquals(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) def test_not_relation_3(self): rqlst = self._prepare('Any X, Y WHERE X eid 1, Y eid in (2, 3)') - self.assertEquals(rqlst.defined_vars['Y']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) def test_not_relation_4_1(self): rqlst = self._prepare('Note X WHERE NOT Y evaluee X') - self.assertEquals(rqlst.defined_vars['X']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['Y']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) def test_not_relation_4_2(self): rqlst = self._prepare('Any X WHERE NOT Y evaluee X') - self.assertEquals(rqlst.defined_vars['X']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['Y']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) def test_not_relation_4_3(self): rqlst = self._prepare('Any Y WHERE NOT Y evaluee X') - self.assertEquals(rqlst.defined_vars['X']._q_invariant, True) - self.assertEquals(rqlst.defined_vars['Y']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) def test_not_relation_4_4(self): rqlst = self._prepare('Any X WHERE NOT Y evaluee X, Y is CWUser') - self.assertEquals(rqlst.defined_vars['X']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['Y']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) def test_not_relation_4_5(self): rqlst = self._prepare('Any X WHERE NOT Y evaluee X, Y eid %s, X is Note' % self.ueid) - self.assertEquals(rqlst.defined_vars['X']._q_invariant, False) - self.assertEquals(rqlst.solutions, [{'X': 'Note'}]) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.solutions, [{'X': 'Note'}]) def test_not_relation_5_1(self): rqlst = self._prepare('Any X,Y WHERE X name "CWGroup", Y eid IN(1, 2, 3), NOT X read_permission Y') - self.assertEquals(rqlst.defined_vars['X']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['Y']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) def test_not_relation_5_2(self): rqlst = self._prepare('DISTINCT Any X,Y WHERE X name "CWGroup", Y eid IN(1, 2, 3), NOT X read_permission Y') - self.assertEquals(rqlst.defined_vars['X']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['Y']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) def test_not_relation_6(self): rqlst = self._prepare('Personne P where NOT P concerne A') - self.assertEquals(rqlst.defined_vars['P']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['A']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['P']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['A']._q_invariant, True) def test_not_relation_7(self): rqlst = self._prepare('Any K,V WHERE P is CWProperty, P pkey K, P value V, NOT P for_user U') - self.assertEquals(rqlst.defined_vars['P']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['U']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['P']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['U']._q_invariant, True) def test_exists_1(self): rqlst = self._prepare('Any U WHERE U eid IN (1,2), EXISTS(X owned_by U)') - self.assertEquals(rqlst.defined_vars['U']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['X']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['U']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) def test_exists_2(self): rqlst = self._prepare('Any U WHERE EXISTS(U eid IN (1,2), X owned_by U)') - self.assertEquals(rqlst.defined_vars['U']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['X']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['U']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) def test_exists_3(self): rqlst = self._prepare('Any U WHERE EXISTS(X owned_by U, X bookmarked_by U)') - self.assertEquals(rqlst.defined_vars['U']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['X']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['U']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) def test_exists_4(self): rqlst = self._prepare('Any X,Y WHERE X name "CWGroup", Y eid IN(1, 2, 3), EXISTS(X read_permission Y)') - self.assertEquals(rqlst.defined_vars['X']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['Y']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) def test_exists_5(self): rqlst = self._prepare('DISTINCT Any X,Y WHERE X name "CWGroup", Y eid IN(1, 2, 3), EXISTS(X read_permission Y)') - self.assertEquals(rqlst.defined_vars['X']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['Y']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, True) def test_not_exists_1(self): rqlst = self._prepare('Any U WHERE NOT EXISTS(X owned_by U, X bookmarked_by U)') - self.assertEquals(rqlst.defined_vars['U']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['X']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['U']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) def test_not_exists_2(self): rqlst = self._prepare('Any X,Y WHERE X name "CWGroup", Y eid IN(1, 2, 3), NOT EXISTS(X read_permission Y)') - self.assertEquals(rqlst.defined_vars['Y']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) def test_not_exists_distinct_1(self): rqlst = self._prepare('DISTINCT Any X,Y WHERE X name "CWGroup", Y eid IN(1, 2, 3), NOT EXISTS(X read_permission Y)') - self.assertEquals(rqlst.defined_vars['Y']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['Y']._q_invariant, False) def test_or_1(self): rqlst = self._prepare('Any X WHERE X concerne B OR C concerne X, B eid 12, C eid 13') - self.assertEquals(rqlst.defined_vars['X']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, False) def test_or_2(self): rqlst = self._prepare('Any X WHERE X created_by U, X concerne B OR C concerne X, B eid 12, C eid 13') - self.assertEquals(rqlst.defined_vars['X']._q_invariant, True) - self.assertEquals(rqlst.defined_vars['U']._q_invariant, True) - self.assertEquals(rqlst.defined_vars['X'].stinfo['principal'].r_type, 'created_by') + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['U']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['X'].stinfo['principal'].r_type, 'created_by') def test_or_3(self): rqlst = self._prepare('Any N WHERE A evaluee N or EXISTS(N todo_by U)') - self.assertEquals(rqlst.defined_vars['N']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['A']._q_invariant, True) - self.assertEquals(rqlst.defined_vars['U']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['N']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['A']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['U']._q_invariant, True) def test_or_exists_1(self): # query generated by security rewriting @@ -300,30 +300,30 @@ 'OR (EXISTS(I concerne H?, H owned_by D, H is Societe, A identity I, I is Affaire))) ' 'OR (EXISTS(J concerne G?, G owned_by D, G is SubDivision, A identity J, J is Affaire))) ' 'OR (EXISTS(K concerne F?, F owned_by D, F is Division, A identity K, K is Affaire)))') - self.assertEquals(rqlst.defined_vars['A']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['S']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['A']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['S']._q_invariant, False) def test_or_exists_2(self): rqlst = self._prepare('Any U WHERE EXISTS(U in_group G, G name "managers") OR EXISTS(X owned_by U, X bookmarked_by U)') - self.assertEquals(rqlst.defined_vars['U']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['G']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['X']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['U']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['G']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['X']._q_invariant, True) def test_or_exists_3(self): rqlst = self._prepare('Any COUNT(S),CS GROUPBY CS ORDERBY 1 DESC LIMIT 10 ' 'WHERE C is Societe, S concerne C, C nom CS, ' '(EXISTS(S owned_by D)) OR (EXISTS(S documented_by N, N title "published"))') - self.assertEquals(rqlst.defined_vars['S']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['S']._q_invariant, True) rqlst = self._prepare('Any COUNT(S),CS GROUPBY CS ORDERBY 1 DESC LIMIT 10 ' 'WHERE S is Affaire, C is Societe, S concerne C, C nom CS, ' '(EXISTS(S owned_by D)) OR (EXISTS(S documented_by N, N title "published"))') - self.assertEquals(rqlst.defined_vars['S']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['S']._q_invariant, True) def test_nonregr_ambiguity(self): rqlst = self._prepare('Note N WHERE N attachment F') # N may be an image as well, not invariant - self.assertEquals(rqlst.defined_vars['N']._q_invariant, False) - self.assertEquals(rqlst.defined_vars['F']._q_invariant, True) + self.assertEqual(rqlst.defined_vars['N']._q_invariant, False) + self.assertEqual(rqlst.defined_vars['F']._q_invariant, True) if __name__ == '__main__': from logilab.common.testlib import unittest_main diff -r f4d1d5d9ccbb -r 90f2f20367bc server/test/unittest_schemaserial.py --- a/server/test/unittest_schemaserial.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/test/unittest_schemaserial.py Wed Nov 03 16:38:28 2010 +0100 @@ -47,7 +47,7 @@ class Schema2RQLTC(TestCase): def test_eschema2rql1(self): - self.assertListEquals(list(eschema2rql(schema.eschema('CWAttribute'))), + self.assertListEqual(list(eschema2rql(schema.eschema('CWAttribute'))), [ ('INSERT CWEType X: X description %(description)s,X final %(final)s,X name %(name)s', {'description': u'define a final relation: link a final relation type from a non final entity to a final entity type. used to build the instance schema', @@ -55,26 +55,24 @@ ]) def test_eschema2rql2(self): - self.assertListEquals(list(eschema2rql(schema.eschema('String'))), [ + self.assertListEqual(list(eschema2rql(schema.eschema('String'))), [ ('INSERT CWEType X: X description %(description)s,X final %(final)s,X name %(name)s', {'description': u'', 'final': True, 'name': u'String'})]) def test_eschema2rql_specialization(self): # x: None since eschema.eid are None - self.assertListEquals(sorted(specialize2rql(schema)), + self.assertListEqual(sorted(specialize2rql(schema)), [('SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s', {'et': None, 'x': None}), ('SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s', {'et': None, 'x': None}), ('SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s', {'et': None, 'x': None}), - # ('SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s', - # {'et': 'File', 'x': 'Image'}), ('SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s', {'et': None, 'x': None})]) def test_rschema2rql1(self): - self.assertListEquals(list(rschema2rql(schema.rschema('relation_type'), cstrtypemap)), + self.assertListEqual(list(rschema2rql(schema.rschema('relation_type'), cstrtypemap)), [ ('INSERT CWRType X: X description %(description)s,X final %(final)s,X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,X name %(name)s,X symmetric %(symmetric)s', {'description': u'link a relation definition to its relation type', 'symmetric': False, 'name': u'relation_type', 'final' : False, 'fulltext_container': None, 'inlined': True}), @@ -95,7 +93,7 @@ ]) def test_rschema2rql2(self): - self.assertListEquals(list(rschema2rql(schema.rschema('add_permission'), cstrtypemap)), + self.assertListEqual(list(rschema2rql(schema.rschema('add_permission'), cstrtypemap)), [ ('INSERT CWRType X: X description %(description)s,X final %(final)s,X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,X name %(name)s,X symmetric %(symmetric)s', {'description': u'', 'symmetric': False, 'name': u'add_permission', 'final': False, 'fulltext_container': None, 'inlined': False}), @@ -115,7 +113,7 @@ ]) def test_rschema2rql3(self): - self.assertListEquals(list(rschema2rql(schema.rschema('cardinality'), cstrtypemap)), + self.assertListEqual(list(rschema2rql(schema.rschema('cardinality'), cstrtypemap)), [ ('INSERT CWRType X: X description %(description)s,X final %(final)s,X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,X name %(name)s,X symmetric %(symmetric)s', {'description': u'', 'symmetric': False, 'name': u'cardinality', 'final': True, 'fulltext_container': None, 'inlined': False}), @@ -138,7 +136,7 @@ ]) def test_rdef2rql(self): - self.assertListEquals(list(rdef2rql(schema['description_format'].rdefs[('CWRType', 'String')], cstrtypemap)), + self.assertListEqual(list(rdef2rql(schema['description_format'].rdefs[('CWRType', 'String')], cstrtypemap)), [ ('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,X description %(description)s,X fulltextindexed %(fulltextindexed)s,X indexed %(indexed)s,X internationalizable %(internationalizable)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s', {'se': None, 'rt': None, 'oe': None, @@ -150,19 +148,19 @@ def test_updateeschema2rql1(self): - self.assertListEquals(list(updateeschema2rql(schema.eschema('CWAttribute'), 1)), + self.assertListEqual(list(updateeschema2rql(schema.eschema('CWAttribute'), 1)), [('SET X description %(description)s,X final %(final)s,X name %(name)s WHERE X eid %(x)s', {'description': u'define a final relation: link a final relation type from a non final entity to a final entity type. used to build the instance schema', 'x': 1, 'final': False, 'name': u'CWAttribute'}), ]) def test_updateeschema2rql2(self): - self.assertListEquals(list(updateeschema2rql(schema.eschema('String'), 1)), + self.assertListEqual(list(updateeschema2rql(schema.eschema('String'), 1)), [('SET X description %(description)s,X final %(final)s,X name %(name)s WHERE X eid %(x)s', {'description': u'', 'x': 1, 'final': True, 'name': u'String'}) ]) def test_updaterschema2rql1(self): - self.assertListEquals(list(updaterschema2rql(schema.rschema('relation_type'), 1)), + self.assertListEqual(list(updaterschema2rql(schema.rschema('relation_type'), 1)), [ ('SET X description %(description)s,X final %(final)s,X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,X name %(name)s,X symmetric %(symmetric)s WHERE X eid %(x)s', {'x': 1, 'symmetric': False, @@ -178,7 +176,7 @@ 'inlined': False, 'name': u'add_permission'}) ] for i, (rql, args) in enumerate(updaterschema2rql(schema.rschema('add_permission'), 1)): - yield self.assertEquals, (rql, args), expected[i] + yield self.assertEqual, (rql, args), expected[i] class Perms2RQLTC(TestCase): GROUP_MAPPING = { @@ -189,7 +187,7 @@ } def test_eperms2rql1(self): - self.assertListEquals([(rql, kwargs) for rql, kwargs in erperms2rql(schema.eschema('CWEType'), self.GROUP_MAPPING)], + self.assertListEqual([(rql, kwargs) for rql, kwargs in erperms2rql(schema.eschema('CWEType'), self.GROUP_MAPPING)], [('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0}), ('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 1}), ('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 2}), @@ -199,7 +197,7 @@ ]) def test_rperms2rql2(self): - self.assertListEquals([(rql, kwargs) for rql, kwargs in erperms2rql(schema.rschema('read_permission').rdef('CWEType', 'CWGroup'), self.GROUP_MAPPING)], + self.assertListEqual([(rql, kwargs) for rql, kwargs in erperms2rql(schema.rschema('read_permission').rdef('CWEType', 'CWGroup'), self.GROUP_MAPPING)], [('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0}), ('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 1}), ('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 2}), @@ -208,7 +206,7 @@ ]) def test_rperms2rql3(self): - self.assertListEquals([(rql, kwargs) for rql, kwargs in erperms2rql(schema.rschema('name').rdef('CWEType', 'String'), self.GROUP_MAPPING)], + self.assertListEqual([(rql, kwargs) for rql, kwargs in erperms2rql(schema.rschema('name').rdef('CWEType', 'String'), self.GROUP_MAPPING)], [('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 0}), ('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 1}), ('SET X read_permission Y WHERE Y eid %(g)s, X eid %(x)s', {'g': 2}), @@ -216,7 +214,7 @@ ]) #def test_perms2rql(self): - # self.assertListEquals(perms2rql(schema, self.GROUP_MAPPING), + # self.assertListEqual(perms2rql(schema, self.GROUP_MAPPING), # ['INSERT CWEType X: X name 'Societe', X final FALSE']) diff -r f4d1d5d9ccbb -r 90f2f20367bc server/test/unittest_security.py --- a/server/test/unittest_security.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/test/unittest_security.py Wed Nov 03 16:38:28 2010 +0100 @@ -22,7 +22,7 @@ from logilab.common.testlib import unittest_main, TestCase from cubicweb.devtools.testlib import CubicWebTC -from cubicweb import Unauthorized, ValidationError +from cubicweb import Unauthorized, ValidationError, QueryError from cubicweb.server.querier import check_read_access class BaseSecurityTC(CubicWebTC): @@ -81,10 +81,10 @@ cnx = self.login('iaminusersgrouponly') self.hijack_source_execute() self.execute('Any U WHERE NOT A todo_by U, A is Affaire') - self.assertEquals(self.query[0][1].as_string(), + self.assertEqual(self.query[0][1].as_string(), 'Any U WHERE NOT EXISTS(A todo_by U), A is Affaire') self.execute('Any U WHERE NOT EXISTS(A todo_by U), A is Affaire') - self.assertEquals(self.query[0][1].as_string(), + self.assertEqual(self.query[0][1].as_string(), 'Any U WHERE NOT EXISTS(A todo_by U), A is Affaire') class SecurityTC(BaseSecurityTC): @@ -103,7 +103,7 @@ cu = cnx.cursor() cu.execute("INSERT Personne X: X nom 'bidule'") self.assertRaises(Unauthorized, cnx.commit) - self.assertEquals(cu.execute('Personne X').rowcount, 1) + self.assertEqual(cu.execute('Personne X').rowcount, 1) def test_insert_rql_permission(self): # test user can only add une affaire related to a societe he owns @@ -113,7 +113,7 @@ self.assertRaises(Unauthorized, cnx.commit) # test nothing has actually been inserted self.restore_connection() - self.assertEquals(self.execute('Affaire X').rowcount, 1) + self.assertEqual(self.execute('Affaire X').rowcount, 1) cnx = self.login('iaminusersgrouponly') cu = cnx.cursor() cu.execute("INSERT Affaire X: X sujet 'cool'") @@ -128,7 +128,7 @@ cu.execute( "SET X nom 'bidulechouette' WHERE X is Personne") self.assertRaises(Unauthorized, cnx.commit) self.restore_connection() - self.assertEquals(self.execute('Personne X WHERE X nom "bidulechouette"').rowcount, 0) + self.assertEqual(self.execute('Personne X WHERE X nom "bidulechouette"').rowcount, 0) def test_update_security_2(self): cnx = self.login('anon') @@ -139,7 +139,7 @@ #self.assertRaises(Unauthorized, cnx.commit) # test nothing has actually been inserted self.restore_connection() - self.assertEquals(self.execute('Personne X WHERE X nom "bidulechouette"').rowcount, 0) + self.assertEqual(self.execute('Personne X WHERE X nom "bidulechouette"').rowcount, 0) def test_update_security_3(self): cnx = self.login('iaminusersgrouponly') @@ -189,6 +189,8 @@ cnx.commit() # to actually get Unauthorized exception, try to delete an entity we can read self.assertRaises(Unauthorized, cu.execute, "DELETE Societe S") + self.assertRaises(QueryError, cnx.commit) # can't commit anymore + cnx.rollback() # required after Unauthorized cu.execute("INSERT Affaire X: X sujet 'pascool'") cu.execute("INSERT Societe X: X nom 'chouette'") cu.execute("SET A concerne S WHERE A sujet 'pascool', S nom 'chouette'") @@ -210,15 +212,16 @@ cnx.commit() # to actually get Unauthorized exception, try to insert a relation were we can read both entities rset = cu.execute('Personne P') - self.assertEquals(len(rset), 1) + self.assertEqual(len(rset), 1) ent = rset.get_entity(0, 0) session.set_pool() # necessary - self.assertRaises(Unauthorized, - ent.e_schema.check_perm, session, 'update', eid=ent.eid) + self.assertRaises(Unauthorized, ent.cw_check_perm, 'update') self.assertRaises(Unauthorized, cu.execute, "SET P travaille S WHERE P is Personne, S is Societe") + self.assertRaises(QueryError, cnx.commit) # can't commit anymore + cnx.rollback() # test nothing has actually been inserted: - self.assertEquals(cu.execute('Any P,S WHERE P travaille S,P is Personne, S is Societe').rowcount, 0) + self.assertEqual(cu.execute('Any P,S WHERE P travaille S,P is Personne, S is Societe').rowcount, 0) cu.execute("INSERT Societe X: X nom 'chouette'") cu.execute("SET A concerne S WHERE A is Affaire, S nom 'chouette'") cnx.commit() @@ -240,6 +243,8 @@ cnx = self.login('iaminusersgrouponly') cu = cnx.cursor() self.assertRaises(Unauthorized, cu.execute, "DELETE A concerne S") + self.assertRaises(QueryError, cnx.commit) # can't commit anymore + cnx.rollback() # required after Unauthorized cu.execute("INSERT Societe X: X nom 'chouette'") cu.execute("SET A concerne S WHERE A is Affaire, S nom 'chouette'") cnx.commit() @@ -279,7 +284,7 @@ cnx = self.login('iaminusersgrouponly') cu = cnx.cursor() rset = cu.execute('Affaire X') - self.assertEquals(rset.rows, []) + self.assertEqual(rset.rows, []) self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x': eid}) # cache test self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x': eid}) @@ -288,12 +293,12 @@ cu.execute("SET A concerne S WHERE A is Affaire, S is Societe") cnx.commit() rset = cu.execute('Any X WHERE X eid %(x)s', {'x': aff2}) - self.assertEquals(rset.rows, [[aff2]]) + self.assertEqual(rset.rows, [[aff2]]) # more cache test w/ NOT eid rset = cu.execute('Affaire X WHERE NOT X eid %(x)s', {'x': eid}) - self.assertEquals(rset.rows, [[aff2]]) + self.assertEqual(rset.rows, [[aff2]]) rset = cu.execute('Affaire X WHERE NOT X eid %(x)s', {'x': aff2}) - self.assertEquals(rset.rows, []) + self.assertEqual(rset.rows, []) # test can't update an attribute of an entity that can't be readen self.assertRaises(Unauthorized, cu.execute, 'SET X sujet "hacked" WHERE X eid %(x)s', {'x': eid}) @@ -310,7 +315,7 @@ self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':aff2})) # XXX would be nice if it worked rset = cu.execute("Affaire X WHERE X sujet 'cool'") - self.assertEquals(len(rset), 0) + self.assertEqual(len(rset), 0) finally: affschema.set_action_permissions('read', origperms) cnx.close() @@ -330,7 +335,7 @@ self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':aff2})) self.failUnless(cu.execute('Any X WHERE X eid %(x)s', {'x':card1})) rset = cu.execute("Any X WHERE X has_text 'cool'") - self.assertEquals(sorted(eid for eid, in rset.rows), + self.assertEqual(sorted(eid for eid, in rset.rows), [card1, aff2]) def test_read_erqlexpr_has_text2(self): @@ -341,9 +346,9 @@ cnx = self.login('iaminusersgrouponly') cu = cnx.cursor() rset = cu.execute('Any N WHERE N has_text "bidule"') - self.assertEquals(len(rset.rows), 1, rset.rows) + self.assertEqual(len(rset.rows), 1, rset.rows) rset = cu.execute('Any N WITH N BEING (Any N WHERE N has_text "bidule")') - self.assertEquals(len(rset.rows), 1, rset.rows) + self.assertEqual(len(rset.rows), 1, rset.rows) def test_read_erqlexpr_optional_rel(self): self.execute("INSERT Personne X: X nom 'bidule'") @@ -353,7 +358,7 @@ cnx = self.login('anon') cu = cnx.cursor() rset = cu.execute('Any N,U WHERE N has_text "bidule", N owned_by U?') - self.assertEquals(len(rset.rows), 1, rset.rows) + self.assertEqual(len(rset.rows), 1, rset.rows) def test_read_erqlexpr_aggregat(self): self.execute("INSERT Affaire X: X sujet 'cool'")[0][0] @@ -361,22 +366,22 @@ cnx = self.login('iaminusersgrouponly') cu = cnx.cursor() rset = cu.execute('Any COUNT(X) WHERE X is Affaire') - self.assertEquals(rset.rows, [[0]]) + self.assertEqual(rset.rows, [[0]]) aff2 = cu.execute("INSERT Affaire X: X sujet 'cool'")[0][0] soc1 = cu.execute("INSERT Societe X: X nom 'chouette'")[0][0] cu.execute("SET A concerne S WHERE A is Affaire, S is Societe") cnx.commit() rset = cu.execute('Any COUNT(X) WHERE X is Affaire') - self.assertEquals(rset.rows, [[1]]) + self.assertEqual(rset.rows, [[1]]) rset = cu.execute('Any ETN, COUNT(X) GROUPBY ETN WHERE X is ET, ET name ETN') values = dict(rset) - self.assertEquals(values['Affaire'], 1) - self.assertEquals(values['Societe'], 2) + self.assertEqual(values['Affaire'], 1) + self.assertEqual(values['Societe'], 2) rset = cu.execute('Any ETN, COUNT(X) GROUPBY ETN WHERE X is ET, ET name ETN WITH X BEING ((Affaire X) UNION (Societe X))') - self.assertEquals(len(rset), 2) + self.assertEqual(len(rset), 2) values = dict(rset) - self.assertEquals(values['Affaire'], 1) - self.assertEquals(values['Societe'], 2) + self.assertEqual(values['Affaire'], 1) + self.assertEqual(values['Societe'], 2) def test_attribute_security(self): @@ -405,7 +410,7 @@ # Note.para attribute editable by managers or if the note is in "todo" state note = self.execute("INSERT Note X: X para 'bidule'").get_entity(0, 0) self.commit() - note.fire_transition('markasdone') + note.cw_adapt_to('IWorkflowable').fire_transition('markasdone') self.execute('SET X para "truc" WHERE X eid %(x)s', {'x': note.eid}) self.commit() cnx = self.login('iaminusersgrouponly') @@ -414,13 +419,13 @@ self.assertRaises(Unauthorized, cnx.commit) note2 = cu.execute("INSERT Note X: X para 'bidule'").get_entity(0, 0) cnx.commit() - note2.fire_transition('markasdone') + note2.cw_adapt_to('IWorkflowable').fire_transition('markasdone') cnx.commit() - self.assertEquals(len(cu.execute('Any X WHERE X in_state S, S name "todo", X eid %(x)s', {'x': note2.eid})), + self.assertEqual(len(cu.execute('Any X WHERE X in_state S, S name "todo", X eid %(x)s', {'x': note2.eid})), 0) cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid}) self.assertRaises(Unauthorized, cnx.commit) - note2.fire_transition('redoit') + note2.cw_adapt_to('IWorkflowable').fire_transition('redoit') cnx.commit() cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid}) cnx.commit() @@ -434,11 +439,11 @@ rset = cu.execute('CWUser X') self.failUnless(rset) x = rset.get_entity(0, 0) - self.assertEquals(x.login, None) + self.assertEqual(x.login, None) self.failUnless(x.creation_date) x = rset.get_entity(1, 0) x.complete() - self.assertEquals(x.login, None) + self.assertEqual(x.login, None) self.failUnless(x.creation_date) cnx.rollback() @@ -455,11 +460,11 @@ cnx.commit() self.restore_connection() affaire = self.execute('Any X WHERE X ref "ARCT01"').get_entity(0, 0) - affaire.fire_transition('abort') + affaire.cw_adapt_to('IWorkflowable').fire_transition('abort') self.commit() - self.assertEquals(len(self.execute('TrInfo X WHERE X wf_info_for A, A ref "ARCT01"')), + self.assertEqual(len(self.execute('TrInfo X WHERE X wf_info_for A, A ref "ARCT01"')), 1) - self.assertEquals(len(self.execute('TrInfo X WHERE X wf_info_for A, A ref "ARCT01",' + self.assertEqual(len(self.execute('TrInfo X WHERE X wf_info_for A, A ref "ARCT01",' 'X owned_by U, U login "admin"')), 1) # TrInfo at the above state change cnx = self.login('iaminusersgrouponly') @@ -474,9 +479,9 @@ cu = cnx.cursor() # anonymous user can only read itself rset = cu.execute('Any L WHERE X owned_by U, U login L') - self.assertEquals(rset.rows, [['anon']]) + self.assertEqual(rset.rows, [['anon']]) rset = cu.execute('CWUser X') - self.assertEquals(rset.rows, [[anon.eid]]) + self.assertEqual(rset.rows, [[anon.eid]]) # anonymous user can read groups (necessary to check allowed transitions for instance) self.assert_(cu.execute('CWGroup X')) # should only be able to read the anonymous user, not another one @@ -489,7 +494,7 @@ # {'x': self.user.eid}) rset = cu.execute('CWUser X WHERE X eid %(x)s', {'x': anon.eid}) - self.assertEquals(rset.rows, [[anon.eid]]) + self.assertEqual(rset.rows, [[anon.eid]]) # but can't modify it cu.execute('SET X login "toto" WHERE X eid %(x)s', {'x': anon.eid}) self.assertRaises(Unauthorized, cnx.commit) @@ -517,14 +522,14 @@ cnx = self.login('anon') cu = cnx.cursor() anoneid = self.session.user.eid - self.assertEquals(cu.execute('Any T,P ORDERBY lower(T) WHERE B is Bookmark,B title T,B path P,' + self.assertEqual(cu.execute('Any T,P ORDERBY lower(T) WHERE B is Bookmark,B title T,B path P,' 'B bookmarked_by U, U eid %s' % anoneid).rows, [['index', '?vid=index']]) - self.assertEquals(cu.execute('Any T,P ORDERBY lower(T) WHERE B is Bookmark,B title T,B path P,' + self.assertEqual(cu.execute('Any T,P ORDERBY lower(T) WHERE B is Bookmark,B title T,B path P,' 'B bookmarked_by U, U eid %(x)s', {'x': anoneid}).rows, [['index', '?vid=index']]) # can read others bookmarks as well - self.assertEquals(cu.execute('Any B where B is Bookmark, NOT B bookmarked_by U').rows, + self.assertEqual(cu.execute('Any B where B is Bookmark, NOT B bookmarked_by U').rows, [[beid1]]) self.assertRaises(Unauthorized, cu.execute,'DELETE B bookmarked_by U') self.assertRaises(Unauthorized, @@ -536,7 +541,7 @@ cnx = self.login('anon') cu = cnx.cursor() names = [t for t, in cu.execute('Any N ORDERBY lower(N) WHERE X name N')] - self.assertEquals(names, sorted(names, key=lambda x: x.lower())) + self.assertEqual(names, sorted(names, key=lambda x: x.lower())) def test_in_state_without_update_perm(self): """check a user change in_state without having update permission on the @@ -557,14 +562,15 @@ cu = cnx.cursor() self.schema['Affaire'].set_action_permissions('read', ('users',)) aff = cu.execute('Any X WHERE X ref "ARCT01"').get_entity(0, 0) - aff.fire_transition('abort') + aff.cw_adapt_to('IWorkflowable').fire_transition('abort') cnx.commit() # though changing a user state (even logged user) is reserved to managers user = cnx.user(self.session) # XXX wether it should raise Unauthorized or ValidationError is not clear # the best would probably ValidationError if the transition doesn't exist # from the current state but Unauthorized if it exists but user can't pass it - self.assertRaises(ValidationError, user.fire_transition, 'deactivate') + self.assertRaises(ValidationError, + user.cw_adapt_to('IWorkflowable').fire_transition, 'deactivate') finally: # restore orig perms for action, perms in affaire_perms.iteritems(): @@ -572,18 +578,19 @@ def test_trinfo_security(self): aff = self.execute('INSERT Affaire X: X ref "ARCT01"').get_entity(0, 0) + iworkflowable = aff.cw_adapt_to('IWorkflowable') self.commit() - aff.fire_transition('abort') + iworkflowable.fire_transition('abort') self.commit() # can change tr info comment self.execute('SET TI comment %(c)s WHERE TI wf_info_for X, X ref "ARCT01"', {'c': u'bouh!'}) self.commit() - aff.clear_related_cache('wf_info_for', 'object') - trinfo = aff.latest_trinfo() - self.assertEquals(trinfo.comment, 'bouh!') + aff.cw_clear_relation_cache('wf_info_for', 'object') + trinfo = iworkflowable.latest_trinfo() + self.assertEqual(trinfo.comment, 'bouh!') # but not from_state/to_state - aff.clear_related_cache('wf_info_for', role='object') + aff.cw_clear_relation_cache('wf_info_for', role='object') self.assertRaises(Unauthorized, self.execute, 'SET TI from_state S WHERE TI eid %(ti)s, S name "ben non"', {'ti': trinfo.eid}) diff -r f4d1d5d9ccbb -r 90f2f20367bc server/test/unittest_session.py --- a/server/test/unittest_session.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/test/unittest_session.py Wed Nov 03 16:38:28 2010 +0100 @@ -43,7 +43,7 @@ class MakeDescriptionTC(TestCase): def test_known_values(self): solution = {'A': 'Int', 'B': 'CWUser'} - self.assertEquals(_make_description((Function('max', 'A'), Variable('B')), {}, solution), + self.assertEqual(_make_description((Function('max', 'A'), Variable('B')), {}, solution), ['Int','CWUser']) class InternalSessionTC(CubicWebTC): diff -r f4d1d5d9ccbb -r 90f2f20367bc server/test/unittest_sqlutils.py --- a/server/test/unittest_sqlutils.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/test/unittest_sqlutils.py Wed Nov 03 16:38:28 2010 +0100 @@ -36,13 +36,13 @@ def test_init(self): o = SQLAdapterMixIn(BASE_CONFIG) - self.assertEquals(o.dbhelper.dbencoding, 'UTF-8') + self.assertEqual(o.dbhelper.dbencoding, 'UTF-8') def test_init_encoding(self): config = BASE_CONFIG.copy() config['db-encoding'] = 'ISO-8859-1' o = SQLAdapterMixIn(config) - self.assertEquals(o.dbhelper.dbencoding, 'ISO-8859-1') + self.assertEqual(o.dbhelper.dbencoding, 'ISO-8859-1') if __name__ == '__main__': unittest_main() diff -r f4d1d5d9ccbb -r 90f2f20367bc server/test/unittest_storage.py --- a/server/test/unittest_storage.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/test/unittest_storage.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,13 +15,11 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""unit tests for module cubicweb.server.sources.storages - -""" +"""unit tests for module cubicweb.server.sources.storages""" from __future__ import with_statement -from logilab.common.testlib import unittest_main, tag +from logilab.common.testlib import unittest_main, tag, Tags from cubicweb.devtools.testlib import CubicWebTC import os.path as osp @@ -29,13 +27,13 @@ import tempfile from cubicweb import Binary, QueryError -from cubicweb.selectors import implements +from cubicweb.selectors import is_instance from cubicweb.server.sources import storages from cubicweb.server.hook import Hook, Operation class DummyBeforeHook(Hook): __regid__ = 'dummy-before-hook' - __select__ = Hook.__select__ & implements('File') + __select__ = Hook.__select__ & is_instance('File') events = ('before_add_entity',) def __call__(self): @@ -44,7 +42,7 @@ class DummyAfterHook(Hook): __regid__ = 'dummy-after-hook' - __select__ = Hook.__select__ & implements('File') + __select__ = Hook.__select__ & is_instance('File') events = ('after_add_entity',) def __call__(self): @@ -54,6 +52,8 @@ class StorageTC(CubicWebTC): + tags = CubicWebTC.tags | Tags('Storage', 'BFSS') + def setup_database(self): self.tempdir = tempfile.mkdtemp() bfs_storage = storages.BytesFileSystemStorage(self.tempdir) @@ -80,34 +80,34 @@ expected_filepath = osp.join(self.tempdir, '%s_data_%s' % (f1.eid, f1.data_name)) self.failUnless(osp.isfile(expected_filepath)) - self.assertEquals(file(expected_filepath).read(), 'the-data') + self.assertEqual(file(expected_filepath).read(), 'the-data') self.rollback() self.failIf(osp.isfile(expected_filepath)) f1 = self.create_file() self.commit() - self.assertEquals(file(expected_filepath).read(), 'the-data') + self.assertEqual(file(expected_filepath).read(), 'the-data') f1.set_attributes(data=Binary('the new data')) self.rollback() - self.assertEquals(file(expected_filepath).read(), 'the-data') - f1.delete() + self.assertEqual(file(expected_filepath).read(), 'the-data') + f1.cw_delete() self.failUnless(osp.isfile(expected_filepath)) self.rollback() self.failUnless(osp.isfile(expected_filepath)) - f1.delete() + f1.cw_delete() self.commit() self.failIf(osp.isfile(expected_filepath)) def test_bfss_sqlite_fspath(self): f1 = self.create_file() expected_filepath = osp.join(self.tempdir, '%s_data_%s' % (f1.eid, f1.data_name)) - self.assertEquals(self.fspath(f1), expected_filepath) + self.assertEqual(self.fspath(f1), expected_filepath) def test_bfss_fs_importing_doesnt_touch_path(self): self.session.transaction_data['fs_importing'] = True filepath = osp.abspath(__file__) f1 = self.session.create_entity('File', data=Binary(filepath), data_format=u'text/plain', data_name=u'foo') - self.assertEquals(self.fspath(f1), filepath) + self.assertEqual(self.fspath(f1), filepath) def test_source_storage_transparency(self): with self.temporary_appobjects(DummyBeforeHook, DummyAfterHook): @@ -116,11 +116,11 @@ def test_source_mapped_attribute_error_cases(self): ex = self.assertRaises(QueryError, self.execute, 'Any X WHERE X data ~= "hop", X is File') - self.assertEquals(str(ex), 'can\'t use File.data (X data ILIKE "hop") in restriction') + self.assertEqual(str(ex), 'can\'t use File.data (X data ILIKE "hop") in restriction') ex = self.assertRaises(QueryError, self.execute, 'Any X, Y WHERE X data D, Y data D, ' 'NOT X identity Y, X is File, Y is File') - self.assertEquals(str(ex), "can't use D as a restriction variable") + self.assertEqual(str(ex), "can't use D as a restriction variable") # query returning mix of mapped / regular attributes (only file.data # mapped, not image.data for instance) ex = self.assertRaises(QueryError, self.execute, @@ -129,15 +129,21 @@ ' UNION ' ' (Any D WHERE X data D, X is File)' ')') - self.assertEquals(str(ex), 'query fetch some source mapped attribute, some not') + self.assertEqual(str(ex), 'query fetch some source mapped attribute, some not') ex = self.assertRaises(QueryError, self.execute, '(Any D WHERE X data D, X is File)' ' UNION ' - '(Any D WHERE X data D, X is Image)') - self.assertEquals(str(ex), 'query fetch some source mapped attribute, some not') - ex = self.assertRaises(QueryError, - self.execute, 'Any D WHERE X data D') - self.assertEquals(str(ex), 'query fetch some source mapped attribute, some not') + '(Any D WHERE X title D, X is Bookmark)') + self.assertEqual(str(ex), 'query fetch some source mapped attribute, some not') + + storages.set_attribute_storage(self.repo, 'State', 'name', + storages.BytesFileSystemStorage(self.tempdir)) + try: + ex = self.assertRaises(QueryError, + self.execute, 'Any D WHERE X name D, X is IN (State, Transition)') + self.assertEqual(str(ex), 'query fetch some source mapped attribute, some not') + finally: + storages.unset_attribute_storage(self.repo, 'State', 'name') def test_source_mapped_attribute_advanced(self): f1 = self.create_file() @@ -146,30 +152,30 @@ ' UNION ' ' (Any D, X WHERE X eid %(x)s, X data D)' ')', {'x': f1.eid}) - self.assertEquals(len(rset), 2) - self.assertEquals(rset[0][0], f1.eid) - self.assertEquals(rset[1][0], f1.eid) - self.assertEquals(rset[0][1].getvalue(), 'the-data') - self.assertEquals(rset[1][1].getvalue(), 'the-data') + self.assertEqual(len(rset), 2) + self.assertEqual(rset[0][0], f1.eid) + self.assertEqual(rset[1][0], f1.eid) + self.assertEqual(rset[0][1].getvalue(), 'the-data') + self.assertEqual(rset[1][1].getvalue(), 'the-data') rset = self.execute('Any X,LENGTH(D) WHERE X eid %(x)s, X data D', {'x': f1.eid}) - self.assertEquals(len(rset), 1) - self.assertEquals(rset[0][0], f1.eid) - self.assertEquals(rset[0][1], len('the-data')) + self.assertEqual(len(rset), 1) + self.assertEqual(rset[0][0], f1.eid) + self.assertEqual(rset[0][1], len('the-data')) rset = self.execute('Any X,LENGTH(D) WITH D,X BEING (' ' (Any D, X WHERE X eid %(x)s, X data D)' ' UNION ' ' (Any D, X WHERE X eid %(x)s, X data D)' ')', {'x': f1.eid}) - self.assertEquals(len(rset), 2) - self.assertEquals(rset[0][0], f1.eid) - self.assertEquals(rset[1][0], f1.eid) - self.assertEquals(rset[0][1], len('the-data')) - self.assertEquals(rset[1][1], len('the-data')) + self.assertEqual(len(rset), 2) + self.assertEqual(rset[0][0], f1.eid) + self.assertEqual(rset[1][0], f1.eid) + self.assertEqual(rset[0][1], len('the-data')) + self.assertEqual(rset[1][1], len('the-data')) ex = self.assertRaises(QueryError, self.execute, 'Any X,UPPER(D) WHERE X eid %(x)s, X data D', {'x': f1.eid}) - self.assertEquals(str(ex), 'UPPER can not be called on mapped attribute') + self.assertEqual(str(ex), 'UPPER can not be called on mapped attribute') def test_bfss_fs_importing_transparency(self): @@ -177,10 +183,10 @@ filepath = osp.abspath(__file__) f1 = self.session.create_entity('File', data=Binary(filepath), data_format=u'text/plain', data_name=u'foo') - self.assertEquals(f1.data.getvalue(), file(filepath).read(), + self.assertEqual(f1.data.getvalue(), file(filepath).read(), 'files content differ') - @tag('Storage', 'BFSS', 'update') + @tag('update') def test_bfss_update_with_existing_data(self): # use self.session to use server-side cache f1 = self.session.create_entity('File', data=Binary('some data'), @@ -189,12 +195,12 @@ # update f1's local dict. We want the pure rql version to work self.execute('SET F data %(d)s WHERE F eid %(f)s', {'d': Binary('some other data'), 'f': f1.eid}) - self.assertEquals(f1.data.getvalue(), 'some other data') + self.assertEqual(f1.data.getvalue(), 'some other data') self.commit() f2 = self.execute('Any F WHERE F eid %(f)s, F is File', {'f': f1.eid}).get_entity(0, 0) - self.assertEquals(f2.data.getvalue(), 'some other data') + self.assertEqual(f2.data.getvalue(), 'some other data') - @tag('Storage', 'BFSS', 'update', 'extension', 'commit') + @tag('update', 'extension', 'commit') def test_bfss_update_with_different_extension_commited(self): # use self.session to use server-side cache f1 = self.session.create_entity('File', data=Binary('some data'), @@ -204,7 +210,7 @@ self.commit() old_path = self.fspath(f1) self.failUnless(osp.isfile(old_path)) - self.assertEquals(osp.splitext(old_path)[1], '.txt') + self.assertEqual(osp.splitext(old_path)[1], '.txt') self.execute('SET F data %(d)s, F data_name %(dn)s, F data_format %(df)s WHERE F eid %(f)s', {'d': Binary('some other data'), 'f': f1.eid, 'dn': u'bar.jpg', 'df': u'image/jpeg'}) self.commit() @@ -214,9 +220,9 @@ new_path = self.fspath(f2) self.failIf(osp.isfile(old_path)) self.failUnless(osp.isfile(new_path)) - self.assertEquals(osp.splitext(new_path)[1], '.jpg') + self.assertEqual(osp.splitext(new_path)[1], '.jpg') - @tag('Storage', 'BFSS', 'update', 'extension', 'rollback') + @tag('update', 'extension', 'rollback') def test_bfss_update_with_different_extension_rollbacked(self): # use self.session to use server-side cache f1 = self.session.create_entity('File', data=Binary('some data'), @@ -227,7 +233,7 @@ old_path = self.fspath(f1) old_data = f1.data.getvalue() self.failUnless(osp.isfile(old_path)) - self.assertEquals(osp.splitext(old_path)[1], '.txt') + self.assertEqual(osp.splitext(old_path)[1], '.txt') self.execute('SET F data %(d)s, F data_name %(dn)s, F data_format %(df)s WHERE F eid %(f)s', {'d': Binary('some other data'), 'f': f1.eid, 'dn': u'bar.jpg', 'df': u'image/jpeg'}) self.rollback() @@ -237,10 +243,11 @@ new_path = self.fspath(f2) new_data = f2.data.getvalue() self.failUnless(osp.isfile(new_path)) - self.assertEquals(osp.splitext(new_path)[1], '.txt') - self.assertEquals(old_path, new_path) - self.assertEquals(old_data, new_data) + self.assertEqual(osp.splitext(new_path)[1], '.txt') + self.assertEqual(old_path, new_path) + self.assertEqual(old_data, new_data) + @tag('fs_importing', 'update') def test_bfss_update_with_fs_importing(self): # use self.session to use server-side cache f1 = self.session.create_entity('File', data=Binary('some data'), @@ -252,10 +259,39 @@ self.execute('SET F data %(d)s WHERE F eid %(f)s', {'d': Binary(new_fspath), 'f': f1.eid}) self.commit() - self.assertEquals(f1.data.getvalue(), 'the new data') - self.assertEquals(self.fspath(f1), new_fspath) + self.assertEqual(f1.data.getvalue(), 'the new data') + self.assertEqual(self.fspath(f1), new_fspath) self.failIf(osp.isfile(old_fspath)) + @tag('fsimport') + def test_clean(self): + fsimport = storages.fsimport + td = self.session.transaction_data + self.assertNotIn('fs_importing', td) + with fsimport(self.session): + self.assertIn('fs_importing', td) + self.assertTrue(td['fs_importing']) + self.assertNotIn('fs_importing', td) + + @tag('fsimport') + def test_true(self): + fsimport = storages.fsimport + td = self.session.transaction_data + td['fs_importing'] = True + with fsimport(self.session): + self.assertIn('fs_importing', td) + self.assertTrue(td['fs_importing']) + self.assertTrue(td['fs_importing']) + + @tag('fsimport') + def test_False(self): + fsimport = storages.fsimport + td = self.session.transaction_data + td['fs_importing'] = False + with fsimport(self.session): + self.assertIn('fs_importing', td) + self.assertTrue(td['fs_importing']) + self.assertFalse(td['fs_importing']) if __name__ == '__main__': unittest_main() diff -r f4d1d5d9ccbb -r 90f2f20367bc server/test/unittest_undo.py --- a/server/test/unittest_undo.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/test/unittest_undo.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,6 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -""" - -""" from __future__ import with_statement from cubicweb import ValidationError @@ -57,36 +54,36 @@ self.cnx.undo_transaction, 'hop') txinfo = self.cnx.transaction_info(self.txuuid) self.failUnless(txinfo.datetime) - self.assertEquals(txinfo.user_eid, self.session.user.eid) - self.assertEquals(txinfo.user().login, 'admin') + self.assertEqual(txinfo.user_eid, self.session.user.eid) + self.assertEqual(txinfo.user().login, 'admin') actions = txinfo.actions_list() - self.assertEquals(len(actions), 2) + self.assertEqual(len(actions), 2) actions = txinfo.actions_list(public=False) - self.assertEquals(len(actions), 6) + self.assertEqual(len(actions), 6) a1 = actions[0] - self.assertEquals(a1.action, 'C') - self.assertEquals(a1.eid, self.toto.eid) - self.assertEquals(a1.etype,'CWUser') - self.assertEquals(a1.changes, None) - self.assertEquals(a1.public, True) - self.assertEquals(a1.order, 1) + self.assertEqual(a1.action, 'C') + self.assertEqual(a1.eid, self.toto.eid) + self.assertEqual(a1.etype,'CWUser') + self.assertEqual(a1.changes, None) + self.assertEqual(a1.public, True) + self.assertEqual(a1.order, 1) a4 = actions[3] - self.assertEquals(a4.action, 'A') - self.assertEquals(a4.rtype, 'in_group') - self.assertEquals(a4.eid_from, self.toto.eid) - self.assertEquals(a4.eid_to, self.toto.in_group[0].eid) - self.assertEquals(a4.order, 4) + self.assertEqual(a4.action, 'A') + self.assertEqual(a4.rtype, 'in_group') + self.assertEqual(a4.eid_from, self.toto.eid) + self.assertEqual(a4.eid_to, self.toto.in_group[0].eid) + self.assertEqual(a4.order, 4) for i, rtype in ((1, 'owned_by'), (2, 'owned_by'), (4, 'in_state'), (5, 'created_by')): a = actions[i] - self.assertEquals(a.action, 'A') - self.assertEquals(a.eid_from, self.toto.eid) - self.assertEquals(a.rtype, rtype) - self.assertEquals(a.order, i+1) + self.assertEqual(a.action, 'A') + self.assertEqual(a.eid_from, self.toto.eid) + self.assertEqual(a.rtype, rtype) + self.assertEqual(a.order, i+1) # test undoable_transactions txs = self.cnx.undoable_transactions() - self.assertEquals(len(txs), 1) - self.assertEquals(txs[0].uuid, self.txuuid) + self.assertEqual(len(txs), 1) + self.assertEqual(txs[0].uuid, self.txuuid) # test transaction_info / undoable_transactions security cnx = self.login('anon') self.assertRaises(NoSuchTransaction, @@ -96,7 +93,7 @@ self.assertRaises(NoSuchTransaction, cnx.undo_transaction, self.txuuid) txs = cnx.undoable_transactions() - self.assertEquals(len(txs), 0) + self.assertEqual(len(txs), 0) def test_undoable_transactions(self): toto = self.toto @@ -104,35 +101,35 @@ address=u'toto@logilab.org', reverse_use_email=toto) txuuid1 = self.commit() - toto.delete() + toto.cw_delete() txuuid2 = self.commit() undoable_transactions = self.cnx.undoable_transactions txs = undoable_transactions(action='D') - self.assertEquals(len(txs), 1, txs) - self.assertEquals(txs[0].uuid, txuuid2) + self.assertEqual(len(txs), 1, txs) + self.assertEqual(txs[0].uuid, txuuid2) txs = undoable_transactions(action='C') - self.assertEquals(len(txs), 2, txs) - self.assertEquals(txs[0].uuid, txuuid1) - self.assertEquals(txs[1].uuid, self.txuuid) + self.assertEqual(len(txs), 2, txs) + self.assertEqual(txs[0].uuid, txuuid1) + self.assertEqual(txs[1].uuid, self.txuuid) txs = undoable_transactions(eid=toto.eid) - self.assertEquals(len(txs), 3) - self.assertEquals(txs[0].uuid, txuuid2) - self.assertEquals(txs[1].uuid, txuuid1) - self.assertEquals(txs[2].uuid, self.txuuid) + self.assertEqual(len(txs), 3) + self.assertEqual(txs[0].uuid, txuuid2) + self.assertEqual(txs[1].uuid, txuuid1) + self.assertEqual(txs[2].uuid, self.txuuid) txs = undoable_transactions(etype='CWUser') - self.assertEquals(len(txs), 2) + self.assertEqual(len(txs), 2) txs = undoable_transactions(etype='CWUser', action='C') - self.assertEquals(len(txs), 1) - self.assertEquals(txs[0].uuid, self.txuuid) + self.assertEqual(len(txs), 1) + self.assertEqual(txs[0].uuid, self.txuuid) txs = undoable_transactions(etype='EmailAddress', action='D') - self.assertEquals(len(txs), 0) + self.assertEqual(len(txs), 0) txs = undoable_transactions(etype='EmailAddress', action='D', public=False) - self.assertEquals(len(txs), 1) - self.assertEquals(txs[0].uuid, txuuid2) + self.assertEqual(len(txs), 1) + self.assertEqual(txs[0].uuid, txuuid2) txs = undoable_transactions(eid=toto.eid, action='R', public=False) - self.assertEquals(len(txs), 1) - self.assertEquals(txs[0].uuid, txuuid2) + self.assertEqual(len(txs), 1) + self.assertEqual(txs[0].uuid, txuuid2) def test_undo_deletion_base(self): toto = self.toto @@ -146,34 +143,34 @@ for_user=toto) self.commit() txs = self.cnx.undoable_transactions() - self.assertEquals(len(txs), 2) - toto.delete() + self.assertEqual(len(txs), 2) + toto.cw_delete() txuuid = self.commit() actions = self.cnx.transaction_info(txuuid).actions_list() - self.assertEquals(len(actions), 1) + self.assertEqual(len(actions), 1) toto.clear_all_caches() e.clear_all_caches() errors = self.cnx.undo_transaction(txuuid) undotxuuid = self.commit() - self.assertEquals(undotxuuid, None) # undo not undoable - self.assertEquals(errors, []) + self.assertEqual(undotxuuid, None) # undo not undoable + self.assertEqual(errors, []) self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': toto.eid})) self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': e.eid})) self.failUnless(self.execute('Any X WHERE X has_text "toto@logilab"')) - self.assertEquals(toto.state, 'activated') - self.assertEquals(toto.get_email(), 'toto@logilab.org') - self.assertEquals([(p.pkey, p.value) for p in toto.reverse_for_user], + self.assertEqual(toto.cw_adapt_to('IWorkflowable').state, 'activated') + self.assertEqual(toto.cw_adapt_to('IEmailable').get_email(), 'toto@logilab.org') + self.assertEqual([(p.pkey, p.value) for p in toto.reverse_for_user], [('ui.default-text-format', 'text/rest')]) - self.assertEquals([g.name for g in toto.in_group], + self.assertEqual([g.name for g in toto.in_group], ['users']) - self.assertEquals([et.name for et in toto.related('is', entities=True)], + self.assertEqual([et.name for et in toto.related('is', entities=True)], ['CWUser']) - self.assertEquals([et.name for et in toto.is_instance_of], + self.assertEqual([et.name for et in toto.is_instance_of], ['CWUser']) # undoing shouldn't be visble in undoable transaction, and the undoed # transaction should be removed txs = self.cnx.undoable_transactions() - self.assertEquals(len(txs), 2) + self.assertEqual(len(txs), 2) self.assertRaises(NoSuchTransaction, self.cnx.transaction_info, txuuid) self.check_transaction_deleted(txuuid) @@ -186,7 +183,7 @@ c = session.create_entity('Card', title=u'hop', content=u'hop') p = session.create_entity('Personne', nom=u'louis', fiche=c) self.commit() - c.delete() + c.cw_delete() txuuid = self.commit() c2 = session.create_entity('Card', title=u'hip', content=u'hip') p.set_relations(fiche=c2) @@ -194,9 +191,9 @@ errors = self.cnx.undo_transaction(txuuid) self.commit() p.clear_all_caches() - self.assertEquals(p.fiche[0].eid, c2.eid) - self.assertEquals(len(errors), 1) - self.assertEquals(errors[0], + self.assertEqual(p.fiche[0].eid, c2.eid) + self.assertEqual(len(errors), 1) + self.assertEqual(errors[0], "Can't restore object relation fiche to entity " "%s which is already linked using this relation." % p.eid) @@ -207,17 +204,17 @@ session.execute('DELETE U in_group G WHERE U eid %(x)s', {'x': self.toto.eid}) self.toto.set_relations(in_group=g) self.commit() - self.toto.delete() + self.toto.cw_delete() txuuid = self.commit() - g.delete() + g.cw_delete() self.commit() errors = self.cnx.undo_transaction(txuuid) - self.assertEquals(errors, + self.assertEqual(errors, [u"Can't restore relation in_group, object entity " "%s doesn't exist anymore." % g.eid]) ex = self.assertRaises(ValidationError, self.commit) - self.assertEquals(ex.entity, self.toto.eid) - self.assertEquals(ex.errors, + self.assertEqual(ex.entity, self.toto.eid) + self.assertEqual(ex.errors, {'in_group-subject': u'at least one relation in_group is ' 'required on CWUser (%s)' % self.toto.eid}) @@ -257,8 +254,8 @@ self.commit() ex = self.assertRaises(ValidationError, self.cnx.undo_transaction, txuuid) - self.assertEquals(ex.entity, tutu.eid) - self.assertEquals(ex.errors, + self.assertEqual(ex.entity, tutu.eid) + self.assertEqual(ex.errors, {None: 'some later transaction(s) touch entity, undo them first'}) def test_undo_creation_integrity_2(self): @@ -270,15 +267,15 @@ self.commit() ex = self.assertRaises(ValidationError, self.cnx.undo_transaction, txuuid) - self.assertEquals(ex.entity, g.eid) - self.assertEquals(ex.errors, + self.assertEqual(ex.entity, g.eid) + self.assertEqual(ex.errors, {None: 'some later transaction(s) touch entity, undo them first'}) - # self.assertEquals(errors, + # self.assertEqual(errors, # [u"Can't restore relation in_group, object entity " # "%s doesn't exist anymore." % g.eid]) # ex = self.assertRaises(ValidationError, self.commit) - # self.assertEquals(ex.entity, self.toto.eid) - # self.assertEquals(ex.errors, + # self.assertEqual(ex.entity, self.toto.eid) + # self.assertEqual(ex.errors, # {'in_group-subject': u'at least one relation in_group is ' # 'required on CWUser (%s)' % self.toto.eid}) diff -r f4d1d5d9ccbb -r 90f2f20367bc server/utils.py --- a/server/utils.py Tue Jul 27 12:36:03 2010 +0200 +++ b/server/utils.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,7 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""Some utilities for the CubicWeb server. - -""" +"""Some utilities for the CubicWeb server.""" __docformat__ = "restructuredtext en" import sys @@ -62,7 +60,7 @@ """recursive looping function""" if seqin: # any more sequences to process? for item in seqin[0]: - newcomb = comb + [item] # add next item to current combination + newcomb = comb + [item] # add next item to current combination # call rloop w/ remaining seqs, newcomb for item in rloop(seqin[1:], newcomb): yield item # seqs and newcomb @@ -119,6 +117,14 @@ sconfig.input_config(inputlevel=inputlevel) return sconfig +_MARKER=object() +def func_name(func): + name = getattr(func, '__name__', _MARKER) + if name is _MARKER: + name = getattr(func, 'func_name', _MARKER) + if name is _MARKER: + name = repr(func) + return name class LoopTask(object): """threaded task restarting itself once executed""" @@ -126,7 +132,7 @@ if interval <= 0: raise ValueError('Loop task interval must be > 0 ' '(current value: %f for %s)' % \ - (interval, func.__name__)) + (interval, func_name(func))) self.interval = interval def auto_restart_func(self=self, func=func, args=args): try: @@ -134,7 +140,7 @@ finally: self.start() self.func = auto_restart_func - self.name = func.__name__ + self.name = func_name(func) def __str__(self): return '%s (%s seconds)' % (self.name, self.interval) @@ -164,7 +170,7 @@ self.running_threads.remove(self) Thread.__init__(self, target=auto_remove_func) self.running_threads = running_threads - self._name = target.__name__ + self._name = func_name(target) def start(self): self.running_threads.append(self) diff -r f4d1d5d9ccbb -r 90f2f20367bc setup.py --- a/setup.py Tue Jul 27 12:36:03 2010 +0200 +++ b/setup.py Wed Nov 03 16:38:28 2010 +0100 @@ -36,6 +36,7 @@ from distutils.core import setup from distutils.command import install_lib USE_SETUPTOOLS = False +from distutils.command import install_data # import required features from __pkginfo__ import modname, version, license, description, web, \ @@ -47,7 +48,7 @@ import __pkginfo__ if USE_SETUPTOOLS: requires = {} - for entry in ("__depends__", "__recommends__"): + for entry in ("__depends__",): # "__recommends__"): requires.update(getattr(__pkginfo__, entry, {})) install_requires = [("%s %s" % (d, v and v or "")).strip() for d, v in requires.iteritems()] @@ -60,7 +61,7 @@ data_files = getattr(__pkginfo__, 'data_files', None) subpackage_of = getattr(__pkginfo__, 'subpackage_of', None) ext_modules = getattr(__pkginfo__, 'ext_modules', None) - +package_data = getattr(__pkginfo__, 'package_data', {}) BASE_BLACKLIST = ('CVS', 'debian', 'dist', 'build', '__buildlog') IGNORED_EXTENSIONS = ('.pyc', '.pyo', '.elc') @@ -163,6 +164,45 @@ dest = join(self.install_dir, base, directory) export(directory, dest, verbose=False) +# write required share/cubicweb/cubes/__init__.py +class MyInstallData(install_data.install_data): + """A class That manages data files installation""" + def run(self): + """overridden from install_data class""" + install_data.install_data.run(self) + path = join(self.install_dir, 'share', 'cubicweb', 'cubes', '__init__.py') + ini = open(path, 'w') + ini.write('# Cubicweb cubes directory\n') + ini.close() + +# re-enable copying data files in sys.prefix +if USE_SETUPTOOLS: + # overwrite MyInstallData to use sys.prefix instead of the egg directory + MyInstallMoreData = MyInstallData + class MyInstallData(MyInstallMoreData): + """A class that manages data files installation""" + def run(self): + _old_install_dir = self.install_dir + if self.install_dir.endswith('egg'): + self.install_dir = sys.prefix + MyInstallMoreData.run(self) + self.install_dir = _old_install_dir + try: + import setuptools.command.easy_install # only if easy_install avaible + # monkey patch: Crack SandboxViolation verification + from setuptools.sandbox import DirectorySandbox as DS + old_ok = DS._ok + def _ok(self, path): + """Return True if ``path`` can be written during installation.""" + out = old_ok(self, path) + realpath = os.path.normcase(os.path.realpath(path)) + if realpath.startswith(sys.prefix): + out = True + return out + DS._ok = _ok + except ImportError: + pass + def install(**kwargs): """setup entry point""" if USE_SETUPTOOLS: @@ -182,13 +222,16 @@ packages = [modname] + get_packages(os.getcwd(), modname) if USE_SETUPTOOLS: kwargs['install_requires'] = install_requires + kwargs['zip_safe'] = False kwargs['packages'] = packages + kwargs['package_data'] = package_data return setup(name=distname, version=version, license=license, url=web, description=description, long_description=long_description, author=author, author_email=author_email, scripts=ensure_scripts(scripts), data_files=data_files, ext_modules=ext_modules, - cmdclass={'install_lib': MyInstallLib}, + cmdclass={'install_lib': MyInstallLib, + 'install_data': MyInstallData}, **kwargs ) diff -r f4d1d5d9ccbb -r 90f2f20367bc skeleton/MANIFEST.in --- a/skeleton/MANIFEST.in Tue Jul 27 12:36:03 2010 +0200 +++ b/skeleton/MANIFEST.in Wed Nov 03 16:38:28 2010 +0100 @@ -1,5 +1,5 @@ include *.py include */*.py -recursive-include data external_resources *.gif *.png *.css *.ico *.js -recursive-include i18n *.pot *.po +recursive-include data *.gif *.png *.ico *.css *.js +recursive-include i18n *.po recursive-include wdoc * diff -r f4d1d5d9ccbb -r 90f2f20367bc skeleton/data/external_resources.tmpl --- a/skeleton/data/external_resources.tmpl Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,11 +0,0 @@ -# -*- shell-script -*- -############################################################################### -# -# put here information about external resources used by your components, -# or to overides existing external resources configuration -# -############################################################################### - -# CSS stylesheets to include in HTML headers -# uncomment the line below to use template specific stylesheet -# STYLESHEETS = DATADIR/cubes.%(cubename)s.css diff -r f4d1d5d9ccbb -r 90f2f20367bc skeleton/setup.py --- a/skeleton/setup.py Tue Jul 27 12:36:03 2010 +0200 +++ b/skeleton/setup.py Wed Nov 03 16:38:28 2010 +0100 @@ -1,5 +1,5 @@ #!/usr/bin/env python -# pylint: disable-msg=W0404,W0622,W0704,W0613,W0152 +# pylint: disable=W0404,W0622,W0704,W0613 # copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr diff -r f4d1d5d9ccbb -r 90f2f20367bc skeleton/test/test_CUBENAME.py --- a/skeleton/test/test_CUBENAME.py Tue Jul 27 12:36:03 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,35 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""template automatic tests - -""" - -from logilab.common.testlib import TestCase, unittest_main - -class DefaultTC(TestCase): - def test_something(self): - self.skip('this cube has no test') - -## uncomment the import if you want to activate automatic test for your -## template - -# from cubicweb.devtools.testlib import AutomaticWebTest - - -if __name__ == '__main__': - unittest_main() diff -r f4d1d5d9ccbb -r 90f2f20367bc skeleton/test/test_CUBENAME.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/skeleton/test/test_CUBENAME.py.tmpl Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,37 @@ +# copyright %(year)s %(author)s, all rights reserved. +# contact %(author-web-site)s -- mailto:%(author-email)s +# +%(long-license)s +"""%(distname)s automatic tests + + +uncomment code below if you want to activate automatic test for your cube: + +.. sourcecode:: python + + from cubicweb.devtools.testlib import AutomaticWebTest + + class AutomaticWebTest(AutomaticWebTest): + '''provides `to_test_etypes` and/or `list_startup_views` implementation + to limit test scope + ''' + + def to_test_etypes(self): + '''only test views for entities of the returned types''' + return set(('My', 'Cube', 'Entity', 'Types')) + + def list_startup_views(self): + '''only test startup views of the returned identifiers''' + return ('some', 'startup', 'views') +""" + +from cubicweb.devtools import testlib + +class DefaultTC(testlib.CubicWebTC): + def test_something(self): + self.skip('this cube has no test') + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r f4d1d5d9ccbb -r 90f2f20367bc skeleton/uiprops.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/skeleton/uiprops.py.tmpl Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,15 @@ +############################################################################### +# +# Put here information about external resources / styles used by your cube, +# or to overides existing UI properties. +# +# Existing properties are available through the `sheet` dictionary available +# in the global namespace. You also have access to a `data` function which +# will return proper url for resources in the 'data' directory. +# +# /!\ this file should not be imported /!\ +############################################################################### + +# CSS stylesheets to include in HTML headers +# uncomment the line below to use template specific stylesheet +# STYLESHEETS = sheet['STYLESHEETS'] + [data('cubes.%(cubename)s.css')] diff -r f4d1d5d9ccbb -r 90f2f20367bc sobjects/notification.py --- a/sobjects/notification.py Tue Jul 27 12:36:03 2010 +0200 +++ b/sobjects/notification.py Wed Nov 03 16:38:28 2010 +0100 @@ -46,7 +46,8 @@ mode = self._cw.vreg.config['default-recipients-mode'] if mode == 'users': execute = self._cw.execute - dests = [(u.get_email(), u.property_value('ui.language')) + dests = [(u.cw_adapt_to('IEmailable').get_email(), + u.property_value('ui.language')) for u in execute(self.user_rql, build_descr=True).entities()] elif mode == 'default-dest-addrs': lang = self._cw.vreg.property_value('ui.language') diff -r f4d1d5d9ccbb -r 90f2f20367bc sobjects/test/data/sobjects/__init__.py --- a/sobjects/test/data/sobjects/__init__.py Tue Jul 27 12:36:03 2010 +0200 +++ b/sobjects/test/data/sobjects/__init__.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,11 +15,9 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -""" -""" -from cubicweb.selectors import implements +from cubicweb.selectors import is_instance from cubicweb.sobjects.notification import StatusChangeMixIn, NotificationView class UserStatusChangeView(StatusChangeMixIn, NotificationView): - __select__ = NotificationView.__select__ & implements('CWUser') + __select__ = NotificationView.__select__ & is_instance('CWUser') diff -r f4d1d5d9ccbb -r 90f2f20367bc sobjects/test/unittest_email.py --- a/sobjects/test/unittest_email.py Tue Jul 27 12:36:03 2010 +0200 +++ b/sobjects/test/unittest_email.py Wed Nov 03 16:38:28 2010 +0100 @@ -26,23 +26,23 @@ def test_use_email_set_primary_email(self): self.execute('INSERT EmailAddress X: X address "admin@logilab.fr", U use_email X WHERE U login "admin"') - self.assertEquals(self.execute('Any A WHERE U primary_email X, U login "admin", X address A').rows, + self.assertEqual(self.execute('Any A WHERE U primary_email X, U login "admin", X address A').rows, []) self.commit() - self.assertEquals(self.execute('Any A WHERE U primary_email X, U login "admin", X address A')[0][0], + self.assertEqual(self.execute('Any A WHERE U primary_email X, U login "admin", X address A')[0][0], 'admin@logilab.fr') # having another email should'nt change anything self.execute('INSERT EmailAddress X: X address "a@logilab.fr", U use_email X WHERE U login "admin"') self.commit() - self.assertEquals(self.execute('Any A WHERE U primary_email X, U login "admin", X address A')[0][0], + self.assertEqual(self.execute('Any A WHERE U primary_email X, U login "admin", X address A')[0][0], 'admin@logilab.fr') def test_primary_email_set_use_email(self): self.execute('INSERT EmailAddress X: X address "admin@logilab.fr", U primary_email X WHERE U login "admin"') - self.assertEquals(self.execute('Any A WHERE U use_email X, U login "admin", X address A').rows, + self.assertEqual(self.execute('Any A WHERE U use_email X, U login "admin", X address A').rows, []) self.commit() - self.assertEquals(self.execute('Any A WHERE U use_email X, U login "admin", X address A')[0][0], + self.assertEqual(self.execute('Any A WHERE U use_email X, U login "admin", X address A')[0][0], 'admin@logilab.fr') def test_cardinality_check(self): diff -r f4d1d5d9ccbb -r 90f2f20367bc sobjects/test/unittest_notification.py --- a/sobjects/test/unittest_notification.py Tue Jul 27 12:36:03 2010 +0200 +++ b/sobjects/test/unittest_notification.py Wed Nov 03 16:38:28 2010 +0100 @@ -72,12 +72,12 @@ finder = self.vreg['components'].select('recipients_finder', self.request(), rset=urset) self.set_option('default-recipients-mode', 'none') - self.assertEquals(finder.recipients(), []) + self.assertEqual(finder.recipients(), []) self.set_option('default-recipients-mode', 'users') - self.assertEquals(finder.recipients(), [(u'admin@logilab.fr', 'fr')]) + self.assertEqual(finder.recipients(), [(u'admin@logilab.fr', 'fr')]) self.set_option('default-recipients-mode', 'default-dest-addrs') self.set_option('default-dest-addrs', 'abcd@logilab.fr, efgh@logilab.fr') - self.assertEquals(finder.recipients(), [('abcd@logilab.fr', 'en'), ('efgh@logilab.fr', 'en')]) + self.assertEqual(finder.recipients(), [('abcd@logilab.fr', 'en'), ('efgh@logilab.fr', 'en')]) class StatusChangeViewsTC(CubicWebTC): @@ -85,12 +85,12 @@ def test_status_change_view(self): req = self.request() u = self.create_user('toto', req=req) - u.fire_transition('deactivate', comment=u'yeah') + u.cw_adapt_to('IWorkflowable').fire_transition('deactivate', comment=u'yeah') self.failIf(MAILBOX) self.commit() - self.assertEquals(len(MAILBOX), 1) + self.assertEqual(len(MAILBOX), 1) email = MAILBOX[0] - self.assertEquals(email.content, + self.assertEqual(email.content, ''' admin changed status from to for entity 'toto' @@ -99,7 +99,7 @@ url: http://testing.fr/cubicweb/cwuser/toto ''') - self.assertEquals(email.subject, 'status changed cwuser #%s (admin)' % u.eid) + self.assertEqual(email.subject, 'status changed cwuser #%s (admin)' % u.eid) if __name__ == '__main__': unittest_main() diff -r f4d1d5d9ccbb -r 90f2f20367bc sobjects/test/unittest_supervising.py --- a/sobjects/test/unittest_supervising.py Tue Jul 27 12:36:03 2010 +0200 +++ b/sobjects/test/unittest_supervising.py Wed Nov 03 16:38:28 2010 +0100 @@ -52,16 +52,16 @@ session = self.session sentops = [op for op in session.pending_operations if isinstance(op, SupervisionMailOp)] - self.assertEquals(len(sentops), 1) + self.assertEqual(len(sentops), 1) # check view content op = sentops[0] view = sentops[0]._get_view() - self.assertEquals(view.recipients(), ['test@logilab.fr']) - self.assertEquals(view.subject(), '[data supervision] changes summary') + self.assertEqual(view.recipients(), ['test@logilab.fr']) + self.assertEqual(view.subject(), '[data supervision] changes summary') data = view.render(changes=session.transaction_data.get('pendingchanges')).strip() data = re.sub('#\d+', '#EID', data) data = re.sub('/\d+', '/EID', data) - self.assertTextEquals('''user admin has made the following change(s): + self.assertMultiLineEqual('''user admin has made the following change(s): * added cwuser #EID (toto) http://testing.fr/cubicweb/cwuser/toto @@ -79,22 +79,22 @@ data) # check prepared email op._prepare_email() - self.assertEquals(len(op.to_send), 1) + self.assertEqual(len(op.to_send), 1) self.assert_(op.to_send[0][0]) - self.assertEquals(op.to_send[0][1], ['test@logilab.fr']) + self.assertEqual(op.to_send[0][1], ['test@logilab.fr']) self.commit() # some other changes ####### - user.fire_transition('deactivate') + user.cw_adapt_to('IWorkflowable').fire_transition('deactivate') sentops = [op for op in session.pending_operations if isinstance(op, SupervisionMailOp)] - self.assertEquals(len(sentops), 1) + self.assertEqual(len(sentops), 1) # check view content op = sentops[0] view = sentops[0]._get_view() data = view.render(changes=session.transaction_data.get('pendingchanges')).strip() data = re.sub('#\d+', '#EID', data) data = re.sub('/\d+', '/EID', data) - self.assertTextEquals('''user admin has made the following change(s): + self.assertMultiLineEqual('''user admin has made the following change(s): * changed state of cwuser #EID (toto) from state activated to state deactivated diff -r f4d1d5d9ccbb -r 90f2f20367bc sobjects/textparsers.py --- a/sobjects/textparsers.py Tue Jul 27 12:36:03 2010 +0200 +++ b/sobjects/textparsers.py Wed Nov 03 16:38:28 2010 +0100 @@ -74,10 +74,14 @@ if not hasattr(entity, 'in_state'): self.error('bad change state instruction for eid %s', eid) continue - tr = entity.current_workflow and entity.current_workflow.transition_by_name(trname) + iworkflowable = entity.cw_adapt_to('IWorkflowable') + if iworkflowable.current_workflow: + tr = iworkflowable.current_workflow.transition_by_name(trname) + else: + tr = None if tr and tr.may_be_fired(entity.eid): try: - trinfo = entity.fire_transition(tr) + trinfo = iworkflowable.fire_transition(tr) caller.fire_event('state-changed', {'trinfo': trinfo, 'entity': entity}) except: diff -r f4d1d5d9ccbb -r 90f2f20367bc tags.py --- a/tags.py Tue Jul 27 12:36:03 2010 +0200 +++ b/tags.py Wed Nov 03 16:38:28 2010 +0100 @@ -48,6 +48,7 @@ tr = tag('tr') th = tag('th') td = tag('td') +iframe = tag('iframe') def select(name, id=None, multiple=False, options=[], **attrs): if multiple: diff -r f4d1d5d9ccbb -r 90f2f20367bc test/data/cubes/comment/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/cubes/comment/__init__.py Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,17 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . diff -r f4d1d5d9ccbb -r 90f2f20367bc test/data/cubes/comment/__pkginfo__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/cubes/comment/__pkginfo__.py Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,25 @@ +# pylint: disable-msg=W0622 +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""cubicweb-comment packaging information""" + +distname = "cubicweb-comment" +modname = distname.split('-', 1)[1] + +numversion = (1, 4, 3) +version = '.'.join(str(num) for num in numversion) diff -r f4d1d5d9ccbb -r 90f2f20367bc test/data/cubes/email/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/cubes/email/__init__.py Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,17 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . diff -r f4d1d5d9ccbb -r 90f2f20367bc test/data/cubes/email/__pkginfo__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/cubes/email/__pkginfo__.py Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,30 @@ +# pylint: disable-msg=W0622 +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""cubicweb-email packaging information""" + +distname = "cubicweb-email" +modname = distname.split('-', 1)[1] + +numversion = (1, 4, 3) +version = '.'.join(str(num) for num in numversion) + + +__depends__ = {'cubicweb': None, + 'cubicweb-file': None} +__recommends__ = {'cubicweb-comment': None} diff -r f4d1d5d9ccbb -r 90f2f20367bc test/data/cubes/email/entities.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/cubes/email/entities.py Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,1 @@ +"test" diff -r f4d1d5d9ccbb -r 90f2f20367bc test/data/cubes/email/hooks.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/cubes/email/hooks.py Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,1 @@ +"test" diff -r f4d1d5d9ccbb -r 90f2f20367bc test/data/cubes/email/views/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/cubes/email/views/__init__.py Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,1 @@ +"test" diff -r f4d1d5d9ccbb -r 90f2f20367bc test/data/cubes/file/__pkginfo__.py --- a/test/data/cubes/file/__pkginfo__.py Tue Jul 27 12:36:03 2010 +0200 +++ b/test/data/cubes/file/__pkginfo__.py Wed Nov 03 16:38:28 2010 +0100 @@ -16,9 +16,7 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""cubicweb-file packaging information - -""" +"""cubicweb-file packaging information""" distname = "cubicweb-file" modname = distname.split('-', 1)[1] diff -r f4d1d5d9ccbb -r 90f2f20367bc test/data/cubes/file/entities/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/cubes/file/entities/__init__.py Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,1 @@ +"test" diff -r f4d1d5d9ccbb -r 90f2f20367bc test/data/cubes/file/hooks/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/cubes/file/hooks/__init__.py Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,1 @@ +"test" diff -r f4d1d5d9ccbb -r 90f2f20367bc test/data/cubes/file/views.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/cubes/file/views.py Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,1 @@ +"test" diff -r f4d1d5d9ccbb -r 90f2f20367bc test/data/cubes/forge/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/cubes/forge/__init__.py Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,17 @@ +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . diff -r f4d1d5d9ccbb -r 90f2f20367bc test/data/cubes/forge/__pkginfo__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/cubes/forge/__pkginfo__.py Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,32 @@ +# pylint: disable-msg=W0622 +# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""cubicweb-forge packaging information""" + +distname = "cubicweb-forge" +modname = distname.split('-', 1)[1] + +numversion = (1, 4, 3) +version = '.'.join(str(num) for num in numversion) + + +__depends__ = {'cubicweb': None, + 'cubicweb-file': None, + 'cubicweb-email': None, + 'cubicweb-comment': None, + } diff -r f4d1d5d9ccbb -r 90f2f20367bc test/data/rewrite/bootstrap_cubes --- a/test/data/rewrite/bootstrap_cubes Tue Jul 27 12:36:03 2010 +0200 +++ b/test/data/rewrite/bootstrap_cubes Wed Nov 03 16:38:28 2010 +0100 @@ -1,1 +1,1 @@ -card, person +card diff -r f4d1d5d9ccbb -r 90f2f20367bc test/data/rewrite/schema.py --- a/test/data/rewrite/schema.py Tue Jul 27 12:36:03 2010 +0200 +++ b/test/data/rewrite/schema.py Wed Nov 03 16:38:28 2010 +0100 @@ -49,7 +49,7 @@ class require_permission(RelationDefinition): - subject = ('Card', 'Note', 'Person') + subject = ('Card', 'Note') object = 'CWPermission' diff -r f4d1d5d9ccbb -r 90f2f20367bc test/data/scripts/script1.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/scripts/script1.py Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,3 @@ +assert 'data/scripts/script1.py' == __file__ +assert '__main__' == __name__ +assert [] == __args__, __args__ diff -r f4d1d5d9ccbb -r 90f2f20367bc test/data/scripts/script2.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/scripts/script2.py Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,3 @@ +assert 'data/scripts/script2.py' == __file__ +assert '__main__' == __name__ +assert ['-v'] == __args__, __args__ diff -r f4d1d5d9ccbb -r 90f2f20367bc test/data/scripts/script3.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/scripts/script3.py Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,3 @@ +assert 'data/scripts/script3.py' == __file__ +assert '__main__' == __name__ +assert ['-vd', '-f', 'FILE.TXT'] == __args__, __args__ diff -r f4d1d5d9ccbb -r 90f2f20367bc test/unittest_cwconfig.py --- a/test/unittest_cwconfig.py Tue Jul 27 12:36:03 2010 +0200 +++ b/test/unittest_cwconfig.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -""" +"""cubicweb.cwconfig unit tests""" -""" import sys import os import tempfile @@ -51,34 +50,38 @@ ApptestConfiguration.CUBES_PATH = [] def test_reorder_cubes(self): - # jpl depends on email and file and comment + self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR] + self.config.adjust_sys_path() + # forge depends on email and file and comment # email depends on file - self.assertEquals(self.config.reorder_cubes(['file', 'email', 'forge']), + self.assertEqual(self.config.reorder_cubes(['file', 'email', 'forge']), ('forge', 'email', 'file')) - self.assertEquals(self.config.reorder_cubes(['email', 'file', 'forge']), + self.assertEqual(self.config.reorder_cubes(['email', 'file', 'forge']), ('forge', 'email', 'file')) - self.assertEquals(self.config.reorder_cubes(['email', 'forge', 'file']), + self.assertEqual(self.config.reorder_cubes(['email', 'forge', 'file']), ('forge', 'email', 'file')) - self.assertEquals(self.config.reorder_cubes(['file', 'forge', 'email']), + self.assertEqual(self.config.reorder_cubes(['file', 'forge', 'email']), ('forge', 'email', 'file')) - self.assertEquals(self.config.reorder_cubes(['forge', 'file', 'email']), + self.assertEqual(self.config.reorder_cubes(['forge', 'file', 'email']), ('forge', 'email', 'file')) - self.assertEquals(self.config.reorder_cubes(('forge', 'email', 'file')), + self.assertEqual(self.config.reorder_cubes(('forge', 'email', 'file')), ('forge', 'email', 'file')) def test_reorder_cubes_recommends(self): + self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR] + self.config.adjust_sys_path() from cubes.comment import __pkginfo__ as comment_pkginfo comment_pkginfo.__recommends_cubes__ = {'file': None} try: # email recommends comment # comment recommends file - self.assertEquals(self.config.reorder_cubes(('forge', 'email', 'file', 'comment')), + self.assertEqual(self.config.reorder_cubes(('forge', 'email', 'file', 'comment')), ('forge', 'email', 'comment', 'file')) - self.assertEquals(self.config.reorder_cubes(('forge', 'email', 'comment', 'file')), + self.assertEqual(self.config.reorder_cubes(('forge', 'email', 'comment', 'file')), ('forge', 'email', 'comment', 'file')) - self.assertEquals(self.config.reorder_cubes(('forge', 'comment', 'email', 'file')), + self.assertEqual(self.config.reorder_cubes(('forge', 'comment', 'email', 'file')), ('forge', 'email', 'comment', 'file')) - self.assertEquals(self.config.reorder_cubes(('comment', 'forge', 'email', 'file')), + self.assertEqual(self.config.reorder_cubes(('comment', 'forge', 'email', 'file')), ('forge', 'email', 'comment', 'file')) finally: comment_pkginfo.__recommends_cubes__ = {} @@ -87,48 +90,53 @@ # def test_vc_config(self): # vcconf = self.config.vc_config() # self.assertIsInstance(vcconf['EEMAIL'], Version) -# self.assertEquals(vcconf['EEMAIL'], (0, 3, 1)) -# self.assertEquals(vcconf['CW'], (2, 31, 2)) +# self.assertEqual(vcconf['EEMAIL'], (0, 3, 1)) +# self.assertEqual(vcconf['CW'], (2, 31, 2)) # self.assertRaises(KeyError, vcconf.__getitem__, 'CW_VERSION') # self.assertRaises(KeyError, vcconf.__getitem__, 'CRM') def test_expand_cubes(self): - self.assertEquals(self.config.expand_cubes(('email', 'blog')), - ['email', 'blog', 'file']) + self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR] + self.config.adjust_sys_path() + self.assertEqual(self.config.expand_cubes(('email', 'comment')), + ['email', 'comment', 'file']) def test_vregistry_path(self): - self.assertEquals([unabsolutize(p) for p in self.config.vregistry_path()], + self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR] + self.config.adjust_sys_path() + self.assertEqual([unabsolutize(p) for p in self.config.vregistry_path()], ['entities', 'web/views', 'sobjects', 'hooks', - 'file/entities.py', 'file/views', 'file/hooks.py', + 'file/entities', 'file/views.py', 'file/hooks', 'email/entities.py', 'email/views', 'email/hooks.py', 'test/data/entities.py', 'test/data/views.py']) def test_cubes_path(self): # make sure we don't import the email cube, but the stdlib email package import email - self.assertNotEquals(dirname(email.__file__), self.config.CUBES_DIR) + self.assertNotEqual(dirname(email.__file__), self.config.CUBES_DIR) self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR] - self.assertEquals(self.config.cubes_search_path(), + self.assertEqual(self.config.cubes_search_path(), [CUSTOM_CUBES_DIR, self.config.CUBES_DIR]) self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR, self.config.CUBES_DIR, 'unexistant'] # filter out unexistant and duplicates - self.assertEquals(self.config.cubes_search_path(), + self.assertEqual(self.config.cubes_search_path(), [CUSTOM_CUBES_DIR, self.config.CUBES_DIR]) self.failUnless('mycube' in self.config.available_cubes()) # test cubes python path self.config.adjust_sys_path() import cubes - self.assertEquals(cubes.__path__, self.config.cubes_search_path()) + self.assertEqual(cubes.__path__, self.config.cubes_search_path()) # this import should succeed once path is adjusted from cubes import mycube - self.assertEquals(mycube.__path__, [join(CUSTOM_CUBES_DIR, 'mycube')]) + self.assertEqual(mycube.__path__, [join(CUSTOM_CUBES_DIR, 'mycube')]) # file cube should be overriden by the one found in data/cubes sys.modules.pop('cubes.file', None) del cubes.file from cubes import file - self.assertEquals(file.__path__, [join(CUSTOM_CUBES_DIR, 'file')]) + self.assertEqual(file.__path__, [join(CUSTOM_CUBES_DIR, 'file')]) + class FindPrefixTC(TestCase): def make_dirs(self, *args): @@ -149,35 +157,35 @@ def test_samedir(self): prefix = tempfile.tempdir self.make_dirs('share', 'cubicweb') - self.assertEquals(_find_prefix(prefix), prefix) + self.assertEqual(_find_prefix(prefix), prefix) @with_tempdir def test_samedir_filepath(self): prefix = tempfile.tempdir self.make_dirs('share', 'cubicweb') file_path = self.make_file('bob.py') - self.assertEquals(_find_prefix(file_path), prefix) + self.assertEqual(_find_prefix(file_path), prefix) @with_tempdir def test_dir_inside_prefix(self): prefix = tempfile.tempdir self.make_dirs('share', 'cubicweb') dir_path = self.make_dirs('bob') - self.assertEquals(_find_prefix(dir_path), prefix) + self.assertEqual(_find_prefix(dir_path), prefix) @with_tempdir def test_file_in_dir_inside_prefix(self): prefix = tempfile.tempdir self.make_dirs('share', 'cubicweb') file_path = self.make_file('bob', 'toto.py') - self.assertEquals(_find_prefix(file_path), prefix) + self.assertEqual(_find_prefix(file_path), prefix) @with_tempdir def test_file_in_deeper_dir_inside_prefix(self): prefix = tempfile.tempdir self.make_dirs('share', 'cubicweb') file_path = self.make_file('bob', 'pyves', 'alain', 'adim', 'syt', 'toto.py') - self.assertEquals(_find_prefix(file_path), prefix) + self.assertEqual(_find_prefix(file_path), prefix) @with_tempdir def test_multiple_candidate_prefix(self): @@ -185,7 +193,7 @@ prefix = self.make_dirs('bob') self.make_dirs('bob', 'share', 'cubicweb') file_path = self.make_file('bob', 'pyves', 'alain', 'adim', 'syt', 'toto.py') - self.assertEquals(_find_prefix(file_path), prefix) + self.assertEqual(_find_prefix(file_path), prefix) @with_tempdir def test_sister_candidate_prefix(self): @@ -193,7 +201,7 @@ self.make_dirs('share', 'cubicweb') self.make_dirs('bob', 'share', 'cubicweb') file_path = self.make_file('bell', 'toto.py') - self.assertEquals(_find_prefix(file_path), prefix) + self.assertEqual(_find_prefix(file_path), prefix) @with_tempdir def test_multiple_parent_candidate_prefix(self): @@ -201,7 +209,7 @@ prefix = self.make_dirs('share', 'cubicweb', 'bob') self.make_dirs('share', 'cubicweb', 'bob', 'share', 'cubicweb') file_path = self.make_file('share', 'cubicweb', 'bob', 'pyves', 'alain', 'adim', 'syt', 'toto.py') - self.assertEquals(_find_prefix(file_path), prefix) + self.assertEqual(_find_prefix(file_path), prefix) @with_tempdir def test_upper_candidate_prefix(self): @@ -209,12 +217,12 @@ self.make_dirs('share', 'cubicweb') self.make_dirs('bell','bob', 'share', 'cubicweb') file_path = self.make_file('bell', 'toto.py') - self.assertEquals(_find_prefix(file_path), prefix) + self.assertEqual(_find_prefix(file_path), prefix) @with_tempdir def test_no_prefix(self): prefix = tempfile.tempdir - self.assertEquals(_find_prefix(prefix), sys.prefix) + self.assertEqual(_find_prefix(prefix), sys.prefix) if __name__ == '__main__': unittest_main() diff -r f4d1d5d9ccbb -r 90f2f20367bc test/unittest_cwctl.py --- a/test/unittest_cwctl.py Tue Jul 27 12:36:03 2010 +0200 +++ b/test/unittest_cwctl.py Wed Nov 03 16:38:28 2010 +0100 @@ -24,8 +24,12 @@ from logilab.common.testlib import TestCase, unittest_main from cubicweb.cwconfig import CubicWebConfiguration +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.server.migractions import ServerMigrationHelper + CubicWebConfiguration.load_cwctl_plugins() # XXX necessary? + class CubicWebCtlTC(TestCase): def setUp(self): self.stream = StringIO() @@ -35,7 +39,27 @@ def test_list(self): from cubicweb.cwctl import ListCommand - ListCommand().run([]) + ListCommand(None).run([]) + + +class CubicWebShellTC(CubicWebTC): + + def test_process_script_args_context(self): + repo = self.cnx._repo + mih = ServerMigrationHelper(None, repo=repo, cnx=self.cnx, + interactive=False, + # hack so it don't try to load fs schema + schema=1) + scripts = {'script1.py': list(), + 'script2.py': ['-v'], + 'script3.py': ['-vd', '-f', 'FILE.TXT'], + } + mih.cmd_process_script('data/scripts/script1.py', funcname=None) + for script, args in scripts.items(): + scriptname = os.path.join('data/scripts/', script) + self.assert_(os.path.exists(scriptname)) + mih.cmd_process_script(scriptname, None, scriptargs=args) + if __name__ == '__main__': unittest_main() diff -r f4d1d5d9ccbb -r 90f2f20367bc test/unittest_dbapi.py --- a/test/unittest_dbapi.py Tue Jul 27 12:36:03 2010 +0200 +++ b/test/unittest_dbapi.py Wed Nov 03 16:38:28 2010 +0100 @@ -29,40 +29,40 @@ def test_public_repo_api(self): cnx = self.login('anon') - self.assertEquals(cnx.get_schema(), self.repo.schema) - self.assertEquals(cnx.source_defs(), {'system': {'adapter': 'native', 'uri': 'system'}}) + self.assertEqual(cnx.get_schema(), self.repo.schema) + self.assertEqual(cnx.source_defs(), {'system': {'adapter': 'native', 'uri': 'system'}}) self.restore_connection() # proper way to close cnx self.assertRaises(ProgrammingError, cnx.get_schema) self.assertRaises(ProgrammingError, cnx.source_defs) def test_db_api(self): cnx = self.login('anon') - self.assertEquals(cnx.rollback(), None) - self.assertEquals(cnx.commit(), None) + self.assertEqual(cnx.rollback(), None) + self.assertEqual(cnx.commit(), None) self.restore_connection() # proper way to close cnx - #self.assertEquals(cnx.close(), None) + #self.assertEqual(cnx.close(), None) self.assertRaises(ProgrammingError, cnx.rollback) self.assertRaises(ProgrammingError, cnx.commit) self.assertRaises(ProgrammingError, cnx.close) def test_api(self): cnx = self.login('anon') - self.assertEquals(cnx.user(None).login, 'anon') - self.assertEquals(cnx.describe(1), (u'CWGroup', u'system', None)) + self.assertEqual(cnx.user(None).login, 'anon') + self.assertEqual(cnx.describe(1), (u'CWGroup', u'system', None)) self.restore_connection() # proper way to close cnx self.assertRaises(ProgrammingError, cnx.user, None) self.assertRaises(ProgrammingError, cnx.describe, 1) def test_shared_data_api(self): cnx = self.login('anon') - self.assertEquals(cnx.get_shared_data('data'), None) + self.assertEqual(cnx.get_shared_data('data'), None) cnx.set_shared_data('data', 4) - self.assertEquals(cnx.get_shared_data('data'), 4) + self.assertEqual(cnx.get_shared_data('data'), 4) cnx.get_shared_data('data', pop=True) cnx.get_shared_data('whatever', pop=True) - self.assertEquals(cnx.get_shared_data('data'), None) + self.assertEqual(cnx.get_shared_data('data'), None) cnx.set_shared_data('data', 4) - self.assertEquals(cnx.get_shared_data('data'), 4) + self.assertEqual(cnx.get_shared_data('data'), 4) self.restore_connection() # proper way to close cnx self.assertRaises(ProgrammingError, cnx.check) self.assertRaises(ProgrammingError, cnx.set_shared_data, 'data', 0) diff -r f4d1d5d9ccbb -r 90f2f20367bc test/unittest_entity.py --- a/test/unittest_entity.py Tue Jul 27 12:36:03 2010 +0200 +++ b/test/unittest_entity.py Wed Nov 03 16:38:28 2010 +0100 @@ -42,14 +42,14 @@ def test_has_eid(self): e = self.vreg['etypes'].etype_class('CWUser')(self.request()) - self.assertEquals(e.eid, None) - self.assertEquals(e.has_eid(), False) + self.assertEqual(e.eid, None) + self.assertEqual(e.has_eid(), False) e.eid = 'X' - self.assertEquals(e.has_eid(), False) + self.assertEqual(e.has_eid(), False) e.eid = 0 - self.assertEquals(e.has_eid(), True) + self.assertEqual(e.has_eid(), True) e.eid = 2 - self.assertEquals(e.has_eid(), True) + self.assertEqual(e.has_eid(), True) def test_copy(self): req = self.request() @@ -61,11 +61,11 @@ self.execute('SET TAG tags X WHERE X eid %(x)s', {'x': oe.eid}) e = req.create_entity('Note', type=u'z') e.copy_relations(oe.eid) - self.assertEquals(len(e.ecrit_par), 1) - self.assertEquals(e.ecrit_par[0].eid, p.eid) - self.assertEquals(len(e.reverse_tags), 1) + self.assertEqual(len(e.ecrit_par), 1) + self.assertEqual(e.ecrit_par[0].eid, p.eid) + self.assertEqual(len(e.reverse_tags), 1) # check meta-relations are not copied, set on commit - self.assertEquals(len(e.created_by), 0) + self.assertEqual(len(e.created_by), 0) def test_copy_with_nonmeta_composite_inlined(self): req = self.request() @@ -83,8 +83,8 @@ user = self.user() adeleid = self.execute('INSERT EmailAddress X: X address "toto@logilab.org", U use_email X WHERE U login "admin"')[0][0] e = self.execute('Any X WHERE X eid %(x)s', {'x': user.eid}).get_entity(0, 0) - self.assertEquals(e.use_email[0].address, "toto@logilab.org") - self.assertEquals(e.use_email[0].eid, adeleid) + self.assertEqual(e.use_email[0].address, "toto@logilab.org") + self.assertEqual(e.use_email[0].eid, adeleid) usereid = self.execute('INSERT CWUser X: X login "toto", X upassword "toto", X in_group G ' 'WHERE G name "users"')[0][0] e = self.execute('Any X WHERE X eid %(x)s', {'x': usereid}).get_entity(0, 0) @@ -97,27 +97,27 @@ user = self.execute('INSERT CWUser X: X login "toto", X upassword %(pwd)s, X in_group G WHERE G name "users"', {'pwd': 'toto'}).get_entity(0, 0) self.commit() - user.fire_transition('deactivate') + user.cw_adapt_to('IWorkflowable').fire_transition('deactivate') self.commit() eid2 = self.execute('INSERT CWUser X: X login "tutu", X upassword %(pwd)s', {'pwd': 'toto'})[0][0] e = self.execute('Any X WHERE X eid %(x)s', {'x': eid2}).get_entity(0, 0) e.copy_relations(user.eid) self.commit() - e.clear_related_cache('in_state', 'subject') - self.assertEquals(e.state, 'activated') + e.cw_clear_relation_cache('in_state', 'subject') + self.assertEqual(e.cw_adapt_to('IWorkflowable').state, 'activated') def test_related_cache_both(self): user = self.execute('Any X WHERE X eid %(x)s', {'x':self.user().eid}).get_entity(0, 0) adeleid = self.execute('INSERT EmailAddress X: X address "toto@logilab.org", U use_email X WHERE U login "admin"')[0][0] self.commit() - self.assertEquals(user._related_cache, {}) + self.assertEqual(user._cw_related_cache, {}) email = user.primary_email[0] - self.assertEquals(sorted(user._related_cache), ['primary_email_subject']) - self.assertEquals(email._related_cache.keys(), ['primary_email_object']) + self.assertEqual(sorted(user._cw_related_cache), ['primary_email_subject']) + self.assertEqual(email._cw_related_cache.keys(), ['primary_email_object']) groups = user.in_group - self.assertEquals(sorted(user._related_cache), ['in_group_subject', 'primary_email_subject']) + self.assertEqual(sorted(user._cw_related_cache), ['in_group_subject', 'primary_email_subject']) for group in groups: - self.failIf('in_group_subject' in group._related_cache, group._related_cache.keys()) + self.failIf('in_group_subject' in group._cw_related_cache, group._cw_related_cache.keys()) def test_related_limit(self): req = self.request() @@ -125,8 +125,8 @@ for tag in u'abcd': req.create_entity('Tag', name=tag) self.execute('SET X tags Y WHERE X is Tag, Y is Personne') - self.assertEquals(len(p.related('tags', 'object', limit=2)), 2) - self.assertEquals(len(p.related('tags', 'object')), 4) + self.assertEqual(len(p.related('tags', 'object', limit=2)), 2) + self.assertEqual(len(p.related('tags', 'object')), 4) def test_fetch_rql(self): @@ -141,7 +141,7 @@ peschema.subjrels['evaluee'].rdef(peschema, Note.e_schema).cardinality = '1*' seschema.subjrels['evaluee'].rdef(seschema, Note.e_schema).cardinality = '1*' # testing basic fetch_attrs attribute - self.assertEquals(Personne.fetch_rql(user), + self.assertEqual(Personne.fetch_rql(user), 'Any X,AA,AB,AC ORDERBY AA ASC ' 'WHERE X is Personne, X nom AA, X prenom AB, X modification_date AC') pfetch_attrs = Personne.fetch_attrs @@ -149,39 +149,39 @@ try: # testing unknown attributes Personne.fetch_attrs = ('bloug', 'beep') - self.assertEquals(Personne.fetch_rql(user), 'Any X WHERE X is Personne') + self.assertEqual(Personne.fetch_rql(user), 'Any X WHERE X is Personne') # testing one non final relation Personne.fetch_attrs = ('nom', 'prenom', 'travaille') - self.assertEquals(Personne.fetch_rql(user), + self.assertEqual(Personne.fetch_rql(user), 'Any X,AA,AB,AC,AD ORDERBY AA ASC ' 'WHERE X is Personne, X nom AA, X prenom AB, X travaille AC?, AC nom AD') # testing two non final relations Personne.fetch_attrs = ('nom', 'prenom', 'travaille', 'evaluee') - self.assertEquals(Personne.fetch_rql(user), + self.assertEqual(Personne.fetch_rql(user), 'Any X,AA,AB,AC,AD,AE,AF ORDERBY AA ASC,AF DESC ' 'WHERE X is Personne, X nom AA, X prenom AB, X travaille AC?, AC nom AD, ' 'X evaluee AE?, AE modification_date AF') # testing one non final relation with recursion Personne.fetch_attrs = ('nom', 'prenom', 'travaille') Societe.fetch_attrs = ('nom', 'evaluee') - self.assertEquals(Personne.fetch_rql(user), + self.assertEqual(Personne.fetch_rql(user), 'Any X,AA,AB,AC,AD,AE,AF ORDERBY AA ASC,AF DESC ' 'WHERE X is Personne, X nom AA, X prenom AB, X travaille AC?, AC nom AD, ' 'AC evaluee AE?, AE modification_date AF' ) # testing symmetric relation Personne.fetch_attrs = ('nom', 'connait') - self.assertEquals(Personne.fetch_rql(user), 'Any X,AA,AB ORDERBY AA ASC ' + self.assertEqual(Personne.fetch_rql(user), 'Any X,AA,AB ORDERBY AA ASC ' 'WHERE X is Personne, X nom AA, X connait AB?') # testing optional relation peschema.subjrels['travaille'].rdef(peschema, seschema).cardinality = '?*' Personne.fetch_attrs = ('nom', 'prenom', 'travaille') Societe.fetch_attrs = ('nom',) - self.assertEquals(Personne.fetch_rql(user), + self.assertEqual(Personne.fetch_rql(user), 'Any X,AA,AB,AC,AD ORDERBY AA ASC WHERE X is Personne, X nom AA, X prenom AB, X travaille AC?, AC nom AD') # testing relation with cardinality > 1 peschema.subjrels['travaille'].rdef(peschema, seschema).cardinality = '**' - self.assertEquals(Personne.fetch_rql(user), + self.assertEqual(Personne.fetch_rql(user), 'Any X,AA,AB ORDERBY AA ASC WHERE X is Personne, X nom AA, X prenom AB') # XXX test unauthorized attribute finally: @@ -197,20 +197,20 @@ Note.fetch_attrs, Note.fetch_order = fetch_config(('type',)) SubNote.fetch_attrs, SubNote.fetch_order = fetch_config(('type',)) p = self.request().create_entity('Personne', nom=u'pouet') - self.assertEquals(p.related_rql('evaluee'), + self.assertEqual(p.cw_related_rql('evaluee'), 'Any X,AA,AB ORDERBY AA ASC WHERE E eid %(x)s, E evaluee X, ' 'X type AA, X modification_date AB') Personne.fetch_attrs, Personne.fetch_order = fetch_config(('nom', )) # XXX - self.assertEquals(p.related_rql('evaluee'), + self.assertEqual(p.cw_related_rql('evaluee'), 'Any X,AA ORDERBY AA DESC ' 'WHERE E eid %(x)s, E evaluee X, X modification_date AA') tag = self.vreg['etypes'].etype_class('Tag')(self.request()) - self.assertEquals(tag.related_rql('tags', 'subject'), + self.assertEqual(tag.cw_related_rql('tags', 'subject'), 'Any X,AA ORDERBY AA DESC ' 'WHERE E eid %(x)s, E tags X, X modification_date AA') - self.assertEquals(tag.related_rql('tags', 'subject', ('Personne',)), + self.assertEqual(tag.cw_related_rql('tags', 'subject', ('Personne',)), 'Any X,AA,AB ORDERBY AA ASC ' 'WHERE E eid %(x)s, E tags X, X is IN (Personne), X nom AA, ' 'X modification_date AB') @@ -219,48 +219,48 @@ tag = self.vreg['etypes'].etype_class('Tag')(self.request()) for ttype in self.schema['tags'].objects(): self.vreg['etypes'].etype_class(ttype).fetch_attrs = ('modification_date',) - self.assertEquals(tag.related_rql('tags', 'subject'), + self.assertEqual(tag.cw_related_rql('tags', 'subject'), 'Any X,AA ORDERBY AA DESC ' 'WHERE E eid %(x)s, E tags X, X modification_date AA') def test_unrelated_rql_security_1(self): user = self.request().user - rql = user.unrelated_rql('use_email', 'EmailAddress', 'subject')[0] - self.assertEquals(rql, 'Any O,AA,AB,AC ORDERBY AC DESC ' + rql = user.cw_unrelated_rql('use_email', 'EmailAddress', 'subject')[0] + self.assertEqual(rql, 'Any O,AA,AB,AC ORDERBY AC DESC ' 'WHERE NOT S use_email O, S eid %(x)s, O is EmailAddress, O address AA, O alias AB, O modification_date AC') self.create_user('toto') self.login('toto') user = self.request().user - rql = user.unrelated_rql('use_email', 'EmailAddress', 'subject')[0] - self.assertEquals(rql, 'Any O,AA,AB,AC ORDERBY AC DESC ' + rql = user.cw_unrelated_rql('use_email', 'EmailAddress', 'subject')[0] + self.assertEqual(rql, 'Any O,AA,AB,AC ORDERBY AC DESC ' 'WHERE NOT S use_email O, S eid %(x)s, O is EmailAddress, O address AA, O alias AB, O modification_date AC') user = self.execute('Any X WHERE X login "admin"').get_entity(0, 0) - self.assertRaises(Unauthorized, user.unrelated_rql, 'use_email', 'EmailAddress', 'subject') + self.assertRaises(Unauthorized, user.cw_unrelated_rql, 'use_email', 'EmailAddress', 'subject') self.login('anon') user = self.request().user - self.assertRaises(Unauthorized, user.unrelated_rql, 'use_email', 'EmailAddress', 'subject') + self.assertRaises(Unauthorized, user.cw_unrelated_rql, 'use_email', 'EmailAddress', 'subject') def test_unrelated_rql_security_2(self): email = self.execute('INSERT EmailAddress X: X address "hop"').get_entity(0, 0) - rql = email.unrelated_rql('use_email', 'CWUser', 'object')[0] - self.assertEquals(rql, 'Any S,AA,AB,AC,AD ORDERBY AA ASC ' + rql = email.cw_unrelated_rql('use_email', 'CWUser', 'object')[0] + self.assertEqual(rql, 'Any S,AA,AB,AC,AD ORDERBY AA ASC ' 'WHERE NOT S use_email O, O eid %(x)s, S is CWUser, S login AA, S firstname AB, S surname AC, S modification_date AD') - #rql = email.unrelated_rql('use_email', 'Person', 'object')[0] - #self.assertEquals(rql, '') + #rql = email.cw_unrelated_rql('use_email', 'Person', 'object')[0] + #self.assertEqual(rql, '') self.login('anon') email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0) - rql = email.unrelated_rql('use_email', 'CWUser', 'object')[0] - self.assertEquals(rql, 'Any S,AA,AB,AC,AD ORDERBY AA ' + rql = email.cw_unrelated_rql('use_email', 'CWUser', 'object')[0] + self.assertEqual(rql, 'Any S,AA,AB,AC,AD ORDERBY AA ' 'WHERE NOT EXISTS(S use_email O), O eid %(x)s, S is CWUser, S login AA, S firstname AB, S surname AC, S modification_date AD, ' 'A eid %(B)s, EXISTS(S identity A, NOT A in_group C, C name "guests", C is CWGroup)') - #rql = email.unrelated_rql('use_email', 'Person', 'object')[0] - #self.assertEquals(rql, '') + #rql = email.cw_unrelated_rql('use_email', 'Person', 'object')[0] + #self.assertEqual(rql, '') def test_unrelated_rql_security_nonexistant(self): self.login('anon') email = self.vreg['etypes'].etype_class('EmailAddress')(self.request()) - rql = email.unrelated_rql('use_email', 'CWUser', 'object')[0] - self.assertEquals(rql, 'Any S,AA,AB,AC,AD ORDERBY AA ' + rql = email.cw_unrelated_rql('use_email', 'CWUser', 'object')[0] + self.assertEqual(rql, 'Any S,AA,AB,AC,AD ORDERBY AA ' 'WHERE S is CWUser, S login AA, S firstname AB, S surname AC, S modification_date AD, ' 'A eid %(B)s, EXISTS(S identity A, NOT A in_group C, C name "guests", C is CWGroup)') @@ -282,71 +282,71 @@ e = req.create_entity('Tag', name=u'x') req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien') req.create_entity('Personne', nom=u'thenault', prenom=u'sylvain') - self.assertEquals(len(e.unrelated('tags', 'Personne', 'subject', limit=1)), + self.assertEqual(len(e.unrelated('tags', 'Personne', 'subject', limit=1)), 1) def test_unrelated_security(self): email = self.execute('INSERT EmailAddress X: X address "hop"').get_entity(0, 0) rset = email.unrelated('use_email', 'CWUser', 'object') - self.assertEquals([x.login for x in rset.entities()], [u'admin', u'anon']) + self.assertEqual([x.login for x in rset.entities()], [u'admin', u'anon']) user = self.request().user rset = user.unrelated('use_email', 'EmailAddress', 'subject') - self.assertEquals([x.address for x in rset.entities()], [u'hop']) + self.assertEqual([x.address for x in rset.entities()], [u'hop']) self.create_user('toto') self.login('toto') email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0) rset = email.unrelated('use_email', 'CWUser', 'object') - self.assertEquals([x.login for x in rset.entities()], ['toto']) + self.assertEqual([x.login for x in rset.entities()], ['toto']) user = self.request().user rset = user.unrelated('use_email', 'EmailAddress', 'subject') - self.assertEquals([x.address for x in rset.entities()], ['hop']) + self.assertEqual([x.address for x in rset.entities()], ['hop']) user = self.execute('Any X WHERE X login "admin"').get_entity(0, 0) rset = user.unrelated('use_email', 'EmailAddress', 'subject') - self.assertEquals([x.address for x in rset.entities()], []) + self.assertEqual([x.address for x in rset.entities()], []) self.login('anon') email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0) rset = email.unrelated('use_email', 'CWUser', 'object') - self.assertEquals([x.login for x in rset.entities()], []) + self.assertEqual([x.login for x in rset.entities()], []) user = self.request().user rset = user.unrelated('use_email', 'EmailAddress', 'subject') - self.assertEquals([x.address for x in rset.entities()], []) + self.assertEqual([x.address for x in rset.entities()], []) def test_unrelated_new_entity(self): e = self.vreg['etypes'].etype_class('CWUser')(self.request()) unrelated = [r[0] for r in e.unrelated('in_group', 'CWGroup', 'subject')] # should be default groups but owners, i.e. managers, users, guests - self.assertEquals(len(unrelated), 3) + self.assertEqual(len(unrelated), 3) def test_printable_value_string(self): e = self.request().create_entity('Card', title=u'rest test', content=u'du :eid:`1:*ReST*`', content_format=u'text/rest') - self.assertEquals(e.printable_value('content'), + self.assertEqual(e.printable_value('content'), '

        du *ReST*

        \n') e['content'] = 'du html users' e['content_format'] = 'text/html' - self.assertEquals(e.printable_value('content'), + self.assertEqual(e.printable_value('content'), 'du html users') e['content'] = 'du *texte*' e['content_format'] = 'text/plain' - self.assertEquals(e.printable_value('content'), - '

        \ndu *texte*\n

        ') + self.assertEqual(e.printable_value('content'), + '

        \ndu *texte*
        \n

        ') e['title'] = 'zou' e['content'] = '''\ a title ======= du :eid:`1:*ReST*`''' e['content_format'] = 'text/rest' - self.assertEquals(e.printable_value('content', format='text/plain'), + self.assertEqual(e.printable_value('content', format='text/plain'), e['content']) e['content'] = u'yo (zou éà ;)' e['content_format'] = 'text/html' - self.assertEquals(e.printable_value('content', format='text/plain').strip(), + self.assertEqual(e.printable_value('content', format='text/plain').strip(), u'**yo (zou éà ;)**') if HAS_TAL: e['content'] = '

        titre

        ' e['content_format'] = 'text/cubicweb-page-template' - self.assertEquals(e.printable_value('content'), + self.assertEqual(e.printable_value('content'), '

        zou

        ') @@ -358,17 +358,17 @@ if mttransforms.HAS_PYGMENTS_TRANSFORMS: import pygments if tuple(int(i) for i in pygments.__version__.split('.')[:2]) >= (1, 3): - self.assertEquals(e.printable_value('data'), + self.assertEqual(e.printable_value('data'), '''
        lambda x: 1
         
        ''') else: - self.assertEquals(e.printable_value('data'), + self.assertEqual(e.printable_value('data'), '''
        lambda x: 1
         
        ''') else: - self.assertEquals(e.printable_value('data'), + self.assertEqual(e.printable_value('data'), '''
         lambda x: 1
         
        @@ -376,7 +376,7 @@ e = req.create_entity('File', data=Binary('*héhéhé*'), data_format=u'text/rest', data_encoding=u'utf-8', data_name=u'toto.txt') - self.assertEquals(e.printable_value('data'), + self.assertEqual(e.printable_value('data'), u'

        héhéhé

        \n') def test_printable_value_bad_html(self): @@ -385,42 +385,42 @@ e = req.create_entity('Card', title=u'bad html', content=u'
        R&D
        ', content_format=u'text/html') tidy = lambda x: x.replace('\n', '') - self.assertEquals(tidy(e.printable_value('content')), + self.assertEqual(tidy(e.printable_value('content')), '
        R&D
        ') e['content'] = u'yo !! R&D
        pas fermé' - self.assertEquals(tidy(e.printable_value('content')), + self.assertEqual(tidy(e.printable_value('content')), u'yo !! R&D
        pas fermé
        ') e['content'] = u'R&D' - self.assertEquals(tidy(e.printable_value('content')), u'R&D') + self.assertEqual(tidy(e.printable_value('content')), u'R&D') e['content'] = u'R&D;' - self.assertEquals(tidy(e.printable_value('content')), u'R&D;') + self.assertEqual(tidy(e.printable_value('content')), u'R&D;') e['content'] = u'yo !! R&D
        pas fermé' - self.assertEquals(tidy(e.printable_value('content')), + self.assertEqual(tidy(e.printable_value('content')), u'yo !! R&D
        pas fermé
        ') e['content'] = u'été
        été' - self.assertEquals(tidy(e.printable_value('content')), + self.assertEqual(tidy(e.printable_value('content')), u'été
        été
        ') e['content'] = u'C'est un exemple sérieux' - self.assertEquals(tidy(e.printable_value('content')), + self.assertEqual(tidy(e.printable_value('content')), u"C'est un exemple sérieux") # make sure valid xhtml is left untouched e['content'] = u'
        R&D
        ' - self.assertEquals(e.printable_value('content'), e['content']) + self.assertEqual(e.printable_value('content'), e['content']) e['content'] = u'
        été
        ' - self.assertEquals(e.printable_value('content'), e['content']) + self.assertEqual(e.printable_value('content'), e['content']) e['content'] = u'été' - self.assertEquals(e.printable_value('content'), e['content']) + self.assertEqual(e.printable_value('content'), e['content']) e['content'] = u'hop\r\nhop\nhip\rmomo' - self.assertEquals(e.printable_value('content'), u'hop\nhop\nhip\nmomo') + self.assertEqual(e.printable_value('content'), u'hop\nhop\nhip\nmomo') def test_printable_value_bad_html_ms(self): - self.skip('fix soup2xhtml to handle this test') + self.skipTest('fix soup2xhtml to handle this test') req = self.request() e = req.create_entity('Card', title=u'bad html', content=u'
        R&D
        ', content_format=u'text/html') tidy = lambda x: x.replace('\n', '') e['content'] = u'
        ms orifice produces weird html
        ' - self.assertEquals(tidy(e.printable_value('content')), + self.assertEqual(tidy(e.printable_value('content')), u'
        ms orifice produces weird html
        ') import tidy as tidymod # apt-get install python-tidy tidy = lambda x: str(tidymod.parseString(x.encode('utf-8'), @@ -429,7 +429,7 @@ 'show_body_only' : True, 'quote-nbsp' : False, 'char_encoding' : 'utf8'})).decode('utf-8').strip() - self.assertEquals(tidy(e.printable_value('content')), + self.assertEqual(tidy(e.printable_value('content')), u'
        ms orifice produces weird html
        ') @@ -442,8 +442,8 @@ e['data_format'] = 'text/html' e['data_encoding'] = 'ascii' e._cw.transaction_data = {} # XXX req should be a session - self.assertEquals(set(e.get_words()), - set(['an', 'html', 'file', 'du', 'html', 'some', 'data'])) + self.assertEqual(e.cw_adapt_to('IFTIndexable').get_words(), + {'C': [u'du', u'html', 'an', 'html', 'file', u'some', u'data']}) def test_nonregr_relation_cache(self): @@ -451,7 +451,7 @@ p1 = req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien') p2 = req.create_entity('Personne', nom=u'toto') self.execute('SET X evaluee Y WHERE X nom "di mascio", Y nom "toto"') - self.assertEquals(p1.evaluee[0].nom, "toto") + self.assertEqual(p1.evaluee[0].nom, "toto") self.failUnless(not p1.reverse_evaluee) def test_complete_relation(self): @@ -462,10 +462,10 @@ trinfo = self.execute('Any X WHERE X eid %(x)s', {'x': eid}).get_entity(0, 0) trinfo.complete() self.failUnless(isinstance(trinfo['creation_date'], datetime)) - self.failUnless(trinfo.relation_cached('from_state', 'subject')) - self.failUnless(trinfo.relation_cached('to_state', 'subject')) - self.failUnless(trinfo.relation_cached('wf_info_for', 'subject')) - self.assertEquals(trinfo.by_transition, ()) + self.failUnless(trinfo.cw_relation_cached('from_state', 'subject')) + self.failUnless(trinfo.cw_relation_cached('to_state', 'subject')) + self.failUnless(trinfo.cw_relation_cached('wf_info_for', 'subject')) + self.assertEqual(trinfo.by_transition, ()) def test_request_cache(self): req = self.request() @@ -477,49 +477,55 @@ def test_rest_path(self): req = self.request() note = req.create_entity('Note', type=u'z') - self.assertEquals(note.rest_path(), 'note/%s' % note.eid) + self.assertEqual(note.rest_path(), 'note/%s' % note.eid) # unique attr tag = req.create_entity('Tag', name=u'x') - self.assertEquals(tag.rest_path(), 'tag/x') + self.assertEqual(tag.rest_path(), 'tag/x') # test explicit rest_attr person = req.create_entity('Personne', prenom=u'john', nom=u'doe') - self.assertEquals(person.rest_path(), 'personne/doe') + self.assertEqual(person.rest_path(), 'personne/doe') # ambiguity test person2 = req.create_entity('Personne', prenom=u'remi', nom=u'doe') person.clear_all_caches() - self.assertEquals(person.rest_path(), 'personne/eid/%s' % person.eid) - self.assertEquals(person2.rest_path(), 'personne/eid/%s' % person2.eid) + self.assertEqual(person.rest_path(), 'personne/eid/%s' % person.eid) + self.assertEqual(person2.rest_path(), 'personne/eid/%s' % person2.eid) # unique attr with None value (wikiid in this case) card1 = req.create_entity('Card', title=u'hop') - self.assertEquals(card1.rest_path(), 'card/eid/%s' % card1.eid) - card2 = req.create_entity('Card', title=u'pod', wikiid=u'zob/i') - self.assertEquals(card2.rest_path(), 'card/zob%2Fi') + self.assertEqual(card1.rest_path(), 'card/eid/%s' % card1.eid) + # don't use rest if we have /, ? or & in the path (breaks mod_proxy) + card2 = req.create_entity('Card', title=u'pod', wikiid=u'zo/bi') + self.assertEqual(card2.rest_path(), 'card/eid/%d' % card2.eid) + card3 = req.create_entity('Card', title=u'pod', wikiid=u'zo&bi') + self.assertEqual(card3.rest_path(), 'card/eid/%d' % card3.eid) + card4 = req.create_entity('Card', title=u'pod', wikiid=u'zo?bi') + self.assertEqual(card4.rest_path(), 'card/eid/%d' % card4.eid) + def test_set_attributes(self): req = self.request() person = req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien') - self.assertEquals(person.prenom, u'adrien') - self.assertEquals(person.nom, u'di mascio') + self.assertEqual(person.prenom, u'adrien') + self.assertEqual(person.nom, u'di mascio') person.set_attributes(prenom=u'sylvain', nom=u'thénault') person = self.execute('Personne P').get_entity(0, 0) # XXX retreival needed ? - self.assertEquals(person.prenom, u'sylvain') - self.assertEquals(person.nom, u'thénault') + self.assertEqual(person.prenom, u'sylvain') + self.assertEqual(person.nom, u'thénault') def test_metainformation_and_external_absolute_url(self): req = self.request() note = req.create_entity('Note', type=u'z') - metainf = note.metainformation() - self.assertEquals(metainf, {'source': {'adapter': 'native', 'uri': 'system'}, 'type': u'Note', 'extid': None}) - self.assertEquals(note.absolute_url(), 'http://testing.fr/cubicweb/note/%s' % note.eid) + metainf = note.cw_metainformation() + self.assertEqual(metainf, {'source': {'adapter': 'native', 'uri': 'system'}, 'type': u'Note', 'extid': None}) + self.assertEqual(note.absolute_url(), 'http://testing.fr/cubicweb/note/%s' % note.eid) metainf['source'] = metainf['source'].copy() metainf['source']['base-url'] = 'http://cubicweb2.com/' metainf['extid'] = 1234 - self.assertEquals(note.absolute_url(), 'http://cubicweb2.com/note/1234') + self.assertEqual(note.absolute_url(), 'http://cubicweb2.com/note/1234') def test_absolute_url_empty_field(self): req = self.request() card = req.create_entity('Card', wikiid=u'', title=u'test') - self.assertEquals(card.absolute_url(), + self.assertEqual(card.absolute_url(), 'http://testing.fr/cubicweb/card/eid/%s' % card.eid) def test_create_entity(self): @@ -531,10 +537,10 @@ p = req.create_entity('Personne', nom=u'di mascio', prenom=u'adrien', connait=p1, evaluee=[p1, p2], reverse_ecrit_par=note) - self.assertEquals(p.nom, 'di mascio') - self.assertEquals([c.nom for c in p.connait], ['fayolle']) - self.assertEquals(sorted([c.nom for c in p.evaluee]), ['campeas', 'fayolle']) - self.assertEquals([c.type for c in p.reverse_ecrit_par], ['z']) + self.assertEqual(p.nom, 'di mascio') + self.assertEqual([c.nom for c in p.connait], ['fayolle']) + self.assertEqual(sorted([c.nom for c in p.evaluee]), ['campeas', 'fayolle']) + self.assertEqual([c.type for c in p.reverse_ecrit_par], ['z']) diff -r f4d1d5d9ccbb -r 90f2f20367bc test/unittest_mail.py --- a/test/unittest_mail.py Tue Jul 27 12:36:03 2010 +0200 +++ b/test/unittest_mail.py Wed Nov 03 16:38:28 2010 +0100 @@ -51,7 +51,7 @@ mail = format_mail({'name': 'oim', 'email': 'oim@logilab.fr'}, ['test@logilab.fr'], u'un petit cöucou', u'bïjour', config=self.config) - self.assertLinesEquals(mail.as_string(), """\ + self.assertMultiLineEqual(mail.as_string(), """\ MIME-Version: 1.0 Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: base64 @@ -64,17 +64,17 @@ dW4gcGV0aXQgY8O2dWNvdQ== """) msg = message_from_string(mail.as_string()) - self.assertEquals(msg.get('subject'), u'bïjour') - self.assertEquals(msg.get('from'), u'oim ') - self.assertEquals(msg.get('to'), u'test@logilab.fr') - self.assertEquals(msg.get('reply-to'), u'oim , BimBam ') - self.assertEquals(msg.get_payload(decode=True), u'un petit cöucou') + self.assertEqual(msg.get('subject'), u'bïjour') + self.assertEqual(msg.get('from'), u'oim ') + self.assertEqual(msg.get('to'), u'test@logilab.fr') + self.assertEqual(msg.get('reply-to'), u'oim , BimBam ') + self.assertEqual(msg.get_payload(decode=True), u'un petit cöucou') def test_format_mail_euro(self): mail = format_mail({'name': u'oîm', 'email': u'oim@logilab.fr'}, ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €') - self.assertLinesEquals(mail.as_string(), """\ + self.assertMultiLineEqual(mail.as_string(), """\ MIME-Version: 1.0 Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: base64 @@ -86,11 +86,11 @@ dW4gcGV0aXQgY8O2dWNvdSDigqw= """) msg = message_from_string(mail.as_string()) - self.assertEquals(msg.get('subject'), u'bïjour €') - self.assertEquals(msg.get('from'), u'oîm ') - self.assertEquals(msg.get('to'), u'test@logilab.fr') - self.assertEquals(msg.get('reply-to'), u'oîm ') - self.assertEquals(msg.get_payload(decode=True), u'un petit cöucou €') + self.assertEqual(msg.get('subject'), u'bïjour €') + self.assertEqual(msg.get('from'), u'oîm ') + self.assertEqual(msg.get('to'), u'test@logilab.fr') + self.assertEqual(msg.get('reply-to'), u'oîm ') + self.assertEqual(msg.get_payload(decode=True), u'un petit cöucou €') def test_format_mail_from_reply_to(self): @@ -100,19 +100,19 @@ msg = format_mail({'name': u'', 'email': u''}, ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', config=self.config) - self.assertEquals(msg.get('from'), u'') - self.assertEquals(msg.get('reply-to'), None) + self.assertEqual(msg.get('from'), u'') + self.assertEqual(msg.get('reply-to'), None) msg = format_mail({'name': u'tutu', 'email': u'tutu@logilab.fr'}, ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', config=self.config) msg = message_from_string(msg.as_string()) - self.assertEquals(msg.get('from'), u'tutu ') - self.assertEquals(msg.get('reply-to'), u'tutu ') + self.assertEqual(msg.get('from'), u'tutu ') + self.assertEqual(msg.get('reply-to'), u'tutu ') msg = format_mail({'name': u'tutu', 'email': u'tutu@logilab.fr'}, ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €') msg = message_from_string(msg.as_string()) - self.assertEquals(msg.get('from'), u'tutu ') - self.assertEquals(msg.get('reply-to'), u'tutu ') + self.assertEqual(msg.get('from'), u'tutu ') + self.assertEqual(msg.get('reply-to'), u'tutu ') # set sender name and address as expected self.set_option('sender-name', 'cubicweb-test') self.set_option('sender-addr', 'cubicweb-test@logilab.fr') @@ -121,22 +121,22 @@ ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', config=self.config) msg = message_from_string(msg.as_string()) - self.assertEquals(msg.get('from'), u'cubicweb-test ') - self.assertEquals(msg.get('reply-to'), u'cubicweb-test ') + self.assertEqual(msg.get('from'), u'cubicweb-test ') + self.assertEqual(msg.get('reply-to'), u'cubicweb-test ') # anonymous notification: only email specified msg = format_mail({'email': u'tutu@logilab.fr'}, ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', config=self.config) msg = message_from_string(msg.as_string()) - self.assertEquals(msg.get('from'), u'cubicweb-test ') - self.assertEquals(msg.get('reply-to'), u'cubicweb-test , cubicweb-test ') + self.assertEqual(msg.get('from'), u'cubicweb-test ') + self.assertEqual(msg.get('reply-to'), u'cubicweb-test , cubicweb-test ') # anonymous notification: only name specified msg = format_mail({'name': u'tutu'}, ['test@logilab.fr'], u'un petit cöucou €', u'bïjour €', config=self.config) msg = message_from_string(msg.as_string()) - self.assertEquals(msg.get('from'), u'tutu ') - self.assertEquals(msg.get('reply-to'), u'tutu ') + self.assertEqual(msg.get('from'), u'tutu ') + self.assertEqual(msg.get('reply-to'), u'tutu ') diff -r f4d1d5d9ccbb -r 90f2f20367bc test/unittest_migration.py --- a/test/unittest_migration.py Tue Jul 27 12:36:03 2010 +0200 +++ b/test/unittest_migration.py Wed Nov 03 16:38:28 2010 +0100 @@ -52,26 +52,26 @@ self.config.__class__.cube_appobject_path = frozenset() def test_filter_scripts_base(self): - self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,3,0), (2,4,0)), + self.assertListEqual(filter_scripts(self.config, SMIGRDIR, (2,3,0), (2,4,0)), []) - self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,4,0), (2,5,0)), + self.assertListEqual(filter_scripts(self.config, SMIGRDIR, (2,4,0), (2,5,0)), [((2, 5, 0), SMIGRDIR+'2.5.0_Any.sql')]) - self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,5,0), (2,6,0)), + self.assertListEqual(filter_scripts(self.config, SMIGRDIR, (2,5,0), (2,6,0)), [((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql')]) - self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,4,0), (2,6,0)), + self.assertListEqual(filter_scripts(self.config, SMIGRDIR, (2,4,0), (2,6,0)), [((2, 5, 0), SMIGRDIR+'2.5.0_Any.sql'), ((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql')]) - self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,5,0), (2,5,1)), + self.assertListEqual(filter_scripts(self.config, SMIGRDIR, (2,5,0), (2,5,1)), []) - self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,5,0), (2,10,2)), + self.assertListEqual(filter_scripts(self.config, SMIGRDIR, (2,5,0), (2,10,2)), [((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql'), ((2, 10, 2), SMIGRDIR+'2.10.2_Any.sql')]) - self.assertListEquals(filter_scripts(self.config, SMIGRDIR, (2,5,1), (2,6,0)), + self.assertListEqual(filter_scripts(self.config, SMIGRDIR, (2,5,1), (2,6,0)), [((2, 6, 0), SMIGRDIR+'2.6.0_Any.sql')]) - self.assertListEquals(filter_scripts(self.config, TMIGRDIR, (0,0,2), (0,0,3)), + self.assertListEqual(filter_scripts(self.config, TMIGRDIR, (0,0,2), (0,0,3)), [((0, 0, 3), TMIGRDIR+'0.0.3_Any.py')]) - self.assertListEquals(filter_scripts(self.config, TMIGRDIR, (0,0,2), (0,0,4)), + self.assertListEqual(filter_scripts(self.config, TMIGRDIR, (0,0,2), (0,0,4)), [((0, 0, 3), TMIGRDIR+'0.0.3_Any.py'), ((0, 0, 4), TMIGRDIR+'0.0.4_Any.py')]) @@ -82,16 +82,16 @@ self.assertIsInstance(config.migration_handler(), MigrationHelper) config = self.config config.__class__.name = 'twisted' - self.assertListEquals(filter_scripts(config, TMIGRDIR, (0,0,4), (0,1,0)), + self.assertListEqual(filter_scripts(config, TMIGRDIR, (0,0,4), (0,1,0)), [((0, 1 ,0), TMIGRDIR+'0.1.0_common.py'), ((0, 1 ,0), TMIGRDIR+'0.1.0_web.py')]) config.__class__.name = 'repository' - self.assertListEquals(filter_scripts(config, TMIGRDIR, (0,0,4), (0,1,0)), + self.assertListEqual(filter_scripts(config, TMIGRDIR, (0,0,4), (0,1,0)), [((0, 1 ,0), TMIGRDIR+'0.1.0_Any.py'), ((0, 1 ,0), TMIGRDIR+'0.1.0_common.py'), ((0, 1 ,0), TMIGRDIR+'0.1.0_repository.py')]) config.__class__.name = 'all-in-one' - self.assertListEquals(filter_scripts(config, TMIGRDIR, (0,0,4), (0,1,0)), + self.assertListEqual(filter_scripts(config, TMIGRDIR, (0,0,4), (0,1,0)), [((0, 1 ,0), TMIGRDIR+'0.1.0_Any.py'), ((0, 1 ,0), TMIGRDIR+'0.1.0_common.py'), ((0, 1 ,0), TMIGRDIR+'0.1.0_repository.py'), @@ -107,7 +107,7 @@ """make sure database can be created""" config = ApptestConfiguration('data') source = config.sources()['system'] - self.assertEquals(source['db-driver'], 'sqlite') + self.assertEqual(source['db-driver'], 'sqlite') cleanup_sqlite(source['db-name'], removetemplate=True) init_test_database(config=config) diff -r f4d1d5d9ccbb -r 90f2f20367bc test/unittest_req.py --- a/test/unittest_req.py Tue Jul 27 12:36:03 2010 +0200 +++ b/test/unittest_req.py Wed Nov 03 16:38:28 2010 +0100 @@ -17,17 +17,31 @@ # with CubicWeb. If not, see . from logilab.common.testlib import TestCase, unittest_main from cubicweb.req import RequestSessionBase +from cubicweb.devtools.testlib import CubicWebTC + class RebuildURLTC(TestCase): - def test(self): + def test_rebuild_url(self): rebuild_url = RequestSessionBase(None).rebuild_url - self.assertEquals(rebuild_url('http://logilab.fr?__message=pouet', __message='hop'), + self.assertEqual(rebuild_url('http://logilab.fr?__message=pouet', __message='hop'), + 'http://logilab.fr?__message=hop') + self.assertEqual(rebuild_url('http://logilab.fr', __message='hop'), 'http://logilab.fr?__message=hop') - self.assertEquals(rebuild_url('http://logilab.fr', __message='hop'), - 'http://logilab.fr?__message=hop') - self.assertEquals(rebuild_url('http://logilab.fr?vid=index', __message='hop'), + self.assertEqual(rebuild_url('http://logilab.fr?vid=index', __message='hop'), 'http://logilab.fr?__message=hop&vid=index') + def test_build_url(self): + req = RequestSessionBase(None) + req.from_controller = lambda : 'view' + req.relative_path = lambda includeparams=True: None + req.base_url = lambda : 'http://testing.fr/cubicweb/' + self.assertEqual(req.build_url(), u'http://testing.fr/cubicweb/view') + self.assertEqual(req.build_url(None), u'http://testing.fr/cubicweb/view') + self.assertEqual(req.build_url('one'), u'http://testing.fr/cubicweb/one') + self.assertEqual(req.build_url(param='ok'), u'http://testing.fr/cubicweb/view?param=ok') + self.assertRaises(AssertionError, req.build_url, 'one', 'two not allowed') + self.assertRaises(ValueError, req.build_url, 'view', test=None) + if __name__ == '__main__': unittest_main() diff -r f4d1d5d9ccbb -r 90f2f20367bc test/unittest_rqlrewrite.py --- a/test/unittest_rqlrewrite.py Tue Jul 27 12:36:03 2010 +0200 +++ b/test/unittest_rqlrewrite.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,7 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -""" -""" from logilab.common.testlib import unittest_main, TestCase from logilab.common.testlib import mock_object from yams import BadSchemaDefinition @@ -109,7 +107,7 @@ rqlst = parse('Any S WHERE S documented_by C, C eid %(u)s') rewrite(rqlst, {('C', 'X'): (card_constraint,), ('S', 'X'): affaire_constraints}, kwargs) - self.assertTextEquals(rqlst.as_string(), + self.assertMultiLineEqual(rqlst.as_string(), "Any S WHERE S documented_by C, C eid %(u)s, B eid %(D)s, " "EXISTS(C in_state A, B in_group E, F require_state A, " "F name 'read', F require_group E, A is State, E is CWGroup, F is CWPermission), " @@ -272,7 +270,7 @@ "EXISTS(U in_group B, B name 'managers', B is CWGroup), T is TrInfo") def test_unsupported_constraint_3(self): - self.skip('raise unauthorized for now') + self.skipTest('raise unauthorized for now') trinfo_constraint = ('X wf_info_for Y, Y require_permission P, P name "read"') rqlst = parse('Any T WHERE T wf_info_for X') rewrite(rqlst, {('T', 'X'): (trinfo_constraint, 'X in_group G, G name "managers"')}, {}) diff -r f4d1d5d9ccbb -r 90f2f20367bc test/unittest_rset.py --- a/test/unittest_rset.py Tue Jul 27 12:36:03 2010 +0200 +++ b/test/unittest_rset.py Wed Nov 03 16:38:28 2010 +0100 @@ -52,7 +52,7 @@ } for rql, relations in queries.items(): result = list(attr_desc_iterator(parse(rql).children[0])) - self.assertEquals((rql, result), (rql, relations)) + self.assertEqual((rql, result), (rql, relations)) def test_relations_description_indexed(self): """tests relations_description() function""" @@ -63,7 +63,7 @@ for rql, results in queries.items(): for var_index, relations in results.items(): result = list(attr_desc_iterator(parse(rql).children[0], var_index)) - self.assertEquals(result, relations) + self.assertEqual(result, relations) @@ -79,15 +79,15 @@ def compare_urls(self, url1, url2): info1 = urlsplit(url1) info2 = urlsplit(url2) - self.assertEquals(info1[:3], info2[:3]) + self.assertEqual(info1[:3], info2[:3]) if info1[3] != info2[3]: params1 = dict(pair.split('=') for pair in info1[3].split('&')) params2 = dict(pair.split('=') for pair in info1[3].split('&')) - self.assertDictEquals(params1, params2) + self.assertDictEqual(params1, params2) def test_pickle(self): del self.rset.req - self.assertEquals(len(pickle.dumps(self.rset)), 392) + self.assertEqual(len(pickle.dumps(self.rset)), 392) def test_build_url(self): req = self.request() @@ -105,9 +105,9 @@ def test_resultset_build(self): """test basic build of a ResultSet""" rs = ResultSet([1,2,3], 'CWGroup X', description=['CWGroup', 'CWGroup', 'CWGroup']) - self.assertEquals(rs.rowcount, 3) - self.assertEquals(rs.rows, [1,2,3]) - self.assertEquals(rs.description, ['CWGroup', 'CWGroup', 'CWGroup']) + self.assertEqual(rs.rowcount, 3) + self.assertEqual(rs.rows, [1,2,3]) + self.assertEqual(rs.description, ['CWGroup', 'CWGroup', 'CWGroup']) def test_resultset_limit(self): @@ -117,12 +117,12 @@ rs.req = self.request() rs.vreg = self.vreg - self.assertEquals(rs.limit(2).rows, [[12000, 'adim'], [13000, 'syt']]) + self.assertEqual(rs.limit(2).rows, [[12000, 'adim'], [13000, 'syt']]) rs2 = rs.limit(2, offset=1) - self.assertEquals(rs2.rows, [[13000, 'syt'], [14000, 'nico']]) - self.assertEquals(rs2.get_entity(0, 0).cw_row, 0) - self.assertEquals(rs.limit(2, offset=2).rows, [[14000, 'nico']]) - self.assertEquals(rs.limit(2, offset=3).rows, []) + self.assertEqual(rs2.rows, [[13000, 'syt'], [14000, 'nico']]) + self.assertEqual(rs2.get_entity(0, 0).cw_row, 0) + self.assertEqual(rs.limit(2, offset=2).rows, [[14000, 'nico']]) + self.assertEqual(rs.limit(2, offset=3).rows, []) def test_resultset_filter(self): @@ -135,8 +135,8 @@ return entity.login != 'nico' rs2 = rs.filtered_rset(test_filter) - self.assertEquals(len(rs2), 2) - self.assertEquals([login for _, login in rs2], ['adim', 'syt']) + self.assertEqual(len(rs2), 2) + self.assertEqual([login for _, login in rs2], ['adim', 'syt']) def test_resultset_transform(self): rs = ResultSet([[12, 'adim'], [13, 'syt'], [14, 'nico']], @@ -147,8 +147,8 @@ return row[1:], desc[1:] rs2 = rs.transformed_rset(test_transform) - self.assertEquals(len(rs2), 3) - self.assertEquals(list(rs2), [['adim'],['syt'],['nico']]) + self.assertEqual(len(rs2), 3) + self.assertEqual(list(rs2), [['adim'],['syt'],['nico']]) def test_resultset_sort(self): rs = ResultSet([[12000, 'adim'], [13000, 'syt'], [14000, 'nico']], @@ -158,22 +158,22 @@ rs.vreg = self.vreg rs2 = rs.sorted_rset(lambda e:e['login']) - self.assertEquals(len(rs2), 3) - self.assertEquals([login for _, login in rs2], ['adim', 'nico', 'syt']) + self.assertEqual(len(rs2), 3) + self.assertEqual([login for _, login in rs2], ['adim', 'nico', 'syt']) # make sure rs is unchanged - self.assertEquals([login for _, login in rs], ['adim', 'syt', 'nico']) + self.assertEqual([login for _, login in rs], ['adim', 'syt', 'nico']) rs2 = rs.sorted_rset(lambda e:e['login'], reverse=True) - self.assertEquals(len(rs2), 3) - self.assertEquals([login for _, login in rs2], ['syt', 'nico', 'adim']) + self.assertEqual(len(rs2), 3) + self.assertEqual([login for _, login in rs2], ['syt', 'nico', 'adim']) # make sure rs is unchanged - self.assertEquals([login for _, login in rs], ['adim', 'syt', 'nico']) + self.assertEqual([login for _, login in rs], ['adim', 'syt', 'nico']) rs3 = rs.sorted_rset(lambda row: row[1], col=-1) - self.assertEquals(len(rs3), 3) - self.assertEquals([login for _, login in rs3], ['adim', 'nico', 'syt']) + self.assertEqual(len(rs3), 3) + self.assertEqual([login for _, login in rs3], ['adim', 'nico', 'syt']) # make sure rs is unchanged - self.assertEquals([login for _, login in rs], ['adim', 'syt', 'nico']) + self.assertEqual([login for _, login in rs], ['adim', 'syt', 'nico']) def test_resultset_split(self): rs = ResultSet([[12000, 'adim', u'Adim chez les pinguins'], @@ -188,32 +188,32 @@ rs.vreg = self.vreg rsets = rs.split_rset(lambda e:e['login']) - self.assertEquals(len(rsets), 3) - self.assertEquals([login for _, login,_ in rsets[0]], ['adim', 'adim']) - self.assertEquals([login for _, login,_ in rsets[1]], ['syt']) - self.assertEquals([login for _, login,_ in rsets[2]], ['nico', 'nico']) + self.assertEqual(len(rsets), 3) + self.assertEqual([login for _, login,_ in rsets[0]], ['adim', 'adim']) + self.assertEqual([login for _, login,_ in rsets[1]], ['syt']) + self.assertEqual([login for _, login,_ in rsets[2]], ['nico', 'nico']) # make sure rs is unchanged - self.assertEquals([login for _, login,_ in rs], ['adim', 'adim', 'syt', 'nico', 'nico']) + self.assertEqual([login for _, login,_ in rs], ['adim', 'adim', 'syt', 'nico', 'nico']) rsets = rs.split_rset(lambda e:e['login'], return_dict=True) - self.assertEquals(len(rsets), 3) - self.assertEquals([login for _, login,_ in rsets['nico']], ['nico', 'nico']) - self.assertEquals([login for _, login,_ in rsets['adim']], ['adim', 'adim']) - self.assertEquals([login for _, login,_ in rsets['syt']], ['syt']) + self.assertEqual(len(rsets), 3) + self.assertEqual([login for _, login,_ in rsets['nico']], ['nico', 'nico']) + self.assertEqual([login for _, login,_ in rsets['adim']], ['adim', 'adim']) + self.assertEqual([login for _, login,_ in rsets['syt']], ['syt']) # make sure rs is unchanged - self.assertEquals([login for _, login,_ in rs], ['adim', 'adim', 'syt', 'nico', 'nico']) + self.assertEqual([login for _, login,_ in rs], ['adim', 'adim', 'syt', 'nico', 'nico']) rsets = rs.split_rset(lambda s: s.count('d'), col=2) - self.assertEquals(len(rsets), 2) - self.assertEquals([title for _, _, title in rsets[0]], + self.assertEqual(len(rsets), 2) + self.assertEqual([title for _, _, title in rsets[0]], [u"Adim chez les pinguins", u"Jardiner facile", u"L'épluchage du castor commun",]) - self.assertEquals([title for _, _, title in rsets[1]], + self.assertEqual([title for _, _, title in rsets[1]], [u"Le carrelage en 42 leçons", u"La tarte tatin en 15 minutes",]) # make sure rs is unchanged - self.assertEquals([title for _, _, title in rs], + self.assertEqual([title for _, _, title in rs], [u'Adim chez les pinguins', u'Jardiner facile', u'Le carrelage en 42 leçons', @@ -228,15 +228,15 @@ def test_get_entity_simple(self): self.request().create_entity('CWUser', login=u'adim', upassword='adim', - surname=u'di mascio', firstname=u'adrien') + surname=u'di mascio', firstname=u'adrien') e = self.execute('Any X,T WHERE X login "adim", X surname T').get_entity(0, 0) - self.assertEquals(e['surname'], 'di mascio') + self.assertEqual(e['surname'], 'di mascio') self.assertRaises(KeyError, e.__getitem__, 'firstname') self.assertRaises(KeyError, e.__getitem__, 'creation_date') - self.assertEquals(pprelcachedict(e._related_cache), []) + self.assertEqual(pprelcachedict(e._cw_related_cache), []) e.complete() - self.assertEquals(e['firstname'], 'adrien') - self.assertEquals(pprelcachedict(e._related_cache), []) + self.assertEqual(e['firstname'], 'adrien') + self.assertEqual(pprelcachedict(e._cw_related_cache), []) def test_get_entity_advanced(self): self.request().create_entity('Bookmark', title=u'zou', path=u'/view') @@ -244,24 +244,24 @@ rset = self.execute('Any X,Y,XT,YN WHERE X bookmarked_by Y, X title XT, Y login YN') e = rset.get_entity(0, 0) - self.assertEquals(e.cw_row, 0) - self.assertEquals(e.cw_col, 0) - self.assertEquals(e['title'], 'zou') + self.assertEqual(e.cw_row, 0) + self.assertEqual(e.cw_col, 0) + self.assertEqual(e['title'], 'zou') self.assertRaises(KeyError, e.__getitem__, 'path') - self.assertEquals(e.view('text'), 'zou') - self.assertEquals(pprelcachedict(e._related_cache), []) + self.assertEqual(e.view('text'), 'zou') + self.assertEqual(pprelcachedict(e._cw_related_cache), []) e = rset.get_entity(0, 1) - self.assertEquals(e.cw_row, 0) - self.assertEquals(e.cw_col, 1) - self.assertEquals(e['login'], 'anon') + self.assertEqual(e.cw_row, 0) + self.assertEqual(e.cw_col, 1) + self.assertEqual(e['login'], 'anon') self.assertRaises(KeyError, e.__getitem__, 'firstname') - self.assertEquals(pprelcachedict(e._related_cache), + self.assertEqual(pprelcachedict(e._cw_related_cache), []) e.complete() - self.assertEquals(e['firstname'], None) - self.assertEquals(e.view('text'), 'anon') - self.assertEquals(pprelcachedict(e._related_cache), + self.assertEqual(e['firstname'], None) + self.assertEqual(e.view('text'), 'anon') + self.assertEqual(pprelcachedict(e._cw_related_cache), []) self.assertRaises(NotAnEntity, rset.get_entity, 0, 2) @@ -273,7 +273,7 @@ seid = self.execute('State X WHERE X name "activated"')[0][0] # for_user / in_group are prefetched in CWUser __init__, in_state should # be filed from our query rset - self.assertEquals(pprelcachedict(e._related_cache), + self.assertEqual(pprelcachedict(e._cw_related_cache), [('in_state_subject', [seid])]) def test_get_entity_advanced_prefilled_cache(self): @@ -282,16 +282,16 @@ rset = self.execute('Any X,U,S,XT,UL,SN WHERE X created_by U, U in_state S, ' 'X title XT, S name SN, U login UL, X eid %s' % e.eid) e = rset.get_entity(0, 0) - self.assertEquals(e['title'], 'zou') - self.assertEquals(pprelcachedict(e._related_cache), + self.assertEqual(e['title'], 'zou') + self.assertEqual(pprelcachedict(e._cw_related_cache), [('created_by_subject', [5])]) # first level of recursion u = e.created_by[0] - self.assertEquals(u['login'], 'admin') + self.assertEqual(u['login'], 'admin') self.assertRaises(KeyError, u.__getitem__, 'firstname') # second level of recursion s = u.in_state[0] - self.assertEquals(s['name'], 'activated') + self.assertEqual(s['name'], 'activated') self.assertRaises(KeyError, s.__getitem__, 'description') @@ -302,11 +302,11 @@ e = rset.get_entity(0, 0) # if any of the assertion below fails with a KeyError, the relation is not cached # related entities should be an empty list - self.assertEquals(e.related_cache('primary_email', 'subject', True), ()) + self.assertEqual(e._cw_relation_cache('primary_email', 'subject', True), ()) # related rset should be an empty rset - cached = e.related_cache('primary_email', 'subject', False) + cached = e._cw_relation_cache('primary_email', 'subject', False) self.assertIsInstance(cached, ResultSet) - self.assertEquals(cached.rowcount, 0) + self.assertEqual(cached.rowcount, 0) def test_get_entity_union(self): @@ -320,16 +320,16 @@ ('CWGroup', 'users')) for entity in rset.entities(): # test get_entity for each row actually etype, n = expected[entity.cw_row] - self.assertEquals(entity.__regid__, etype) + self.assertEqual(entity.__regid__, etype) attr = etype == 'Bookmark' and 'title' or 'name' - self.assertEquals(entity[attr], n) + self.assertEqual(entity[attr], n) def test_related_entity_optional(self): e = self.request().create_entity('Bookmark', title=u'aaaa', path=u'path') rset = self.execute('Any B,U,L WHERE B bookmarked_by U?, U login L') entity, rtype = rset.related_entity(0, 2) - self.assertEquals(entity, None) - self.assertEquals(rtype, None) + self.assertEqual(entity, None) + self.assertEqual(rtype, None) def test_related_entity_union_subquery(self): e = self.request().create_entity('Bookmark', title=u'aaaa', path=u'path') @@ -338,27 +338,27 @@ ' UNION ' ' (Any X,N WHERE X is Bookmark, X title N))') entity, rtype = rset.related_entity(0, 1) - self.assertEquals(entity.eid, e.eid) - self.assertEquals(rtype, 'title') + self.assertEqual(entity.eid, e.eid) + self.assertEqual(rtype, 'title') entity, rtype = rset.related_entity(1, 1) - self.assertEquals(entity.__regid__, 'CWGroup') - self.assertEquals(rtype, 'name') + self.assertEqual(entity.__regid__, 'CWGroup') + self.assertEqual(rtype, 'name') # rset = self.execute('Any X,N ORDERBY N WHERE X is Bookmark WITH X,N BEING ' '((Any X,N WHERE X is CWGroup, X name N)' ' UNION ' ' (Any X,N WHERE X is Bookmark, X title N))') entity, rtype = rset.related_entity(0, 1) - self.assertEquals(entity.eid, e.eid) - self.assertEquals(rtype, 'title') + self.assertEqual(entity.eid, e.eid) + self.assertEqual(rtype, 'title') # rset = self.execute('Any X,N ORDERBY N WITH N,X BEING ' '((Any N,X WHERE X is CWGroup, X name N)' ' UNION ' ' (Any N,X WHERE X is Bookmark, X title N))') entity, rtype = rset.related_entity(0, 1) - self.assertEquals(entity.eid, e.eid) - self.assertEquals(rtype, 'title') + self.assertEqual(entity.eid, e.eid) + self.assertEqual(rtype, 'title') def test_related_entity_trap_subquery(self): req = self.request() @@ -368,32 +368,40 @@ 'WITH B,T BEING (Any B,T WHERE B is Bookmark, B title T)') rset.related_entity(0, 2) + def test_related_entity_subquery_outerjoin(self): + rset = self.execute('Any X,S,L WHERE X in_state S ' + 'WITH X, L BEING (Any X,MAX(L) GROUPBY X ' + 'WHERE X is CWUser, T? wf_info_for X, T creation_date L)') + self.assertEqual(len(rset), 2) + rset.related_entity(0, 1) + rset.related_entity(0, 2) + def test_entities(self): rset = self.execute('Any U,G WHERE U in_group G') # make sure we have at least one element self.failUnless(rset) - self.assertEquals(set(e.e_schema.type for e in rset.entities(0)), + self.assertEqual(set(e.e_schema.type for e in rset.entities(0)), set(['CWUser',])) - self.assertEquals(set(e.e_schema.type for e in rset.entities(1)), + self.assertEqual(set(e.e_schema.type for e in rset.entities(1)), set(['CWGroup',])) def test_printable_rql(self): rset = self.execute(u'CWEType X WHERE X final FALSE') - self.assertEquals(rset.printable_rql(), + self.assertEqual(rset.printable_rql(), 'Any X WHERE X final FALSE, X is CWEType') def test_searched_text(self): rset = self.execute(u'Any X WHERE X has_text "foobar"') - self.assertEquals(rset.searched_text(), 'foobar') + self.assertEqual(rset.searched_text(), 'foobar') rset = self.execute(u'Any X WHERE X has_text %(text)s', {'text' : 'foo'}) - self.assertEquals(rset.searched_text(), 'foo') + self.assertEqual(rset.searched_text(), 'foo') def test_union_limited_rql(self): rset = self.execute('(Any X,N WHERE X is Bookmark, X title N)' ' UNION ' '(Any X,N WHERE X is CWGroup, X name N)') rset.limit(2, 10, inplace=True) - self.assertEquals(rset.limited_rql(), + self.assertEqual(rset.limited_rql(), 'Any A,B LIMIT 2 OFFSET 10 ' 'WITH A,B BEING (' '(Any X,N WHERE X is Bookmark, X title N) ' @@ -403,7 +411,21 @@ def test_count_users_by_date(self): rset = self.execute('Any D, COUNT(U) GROUPBY D WHERE U is CWUser, U creation_date D') - self.assertEquals(rset.related_entity(0,0), (None, None)) + self.assertEqual(rset.related_entity(0,0), (None, None)) + + def test_str(self): + rset = self.execute('(Any X,N WHERE X is CWGroup, X name N)') + self.assertIsInstance(str(rset), basestring) + self.assertEqual(len(str(rset).splitlines()), 1) + + def test_repr(self): + rset = self.execute('(Any X,N WHERE X is CWGroup, X name N)') + self.assertIsInstance(repr(rset), basestring) + self.assertTrue(len(repr(rset).splitlines()) > 1) + + rset = self.execute('(Any X WHERE X is CWGroup, X name "managers")') + self.assertIsInstance(str(rset), basestring) + self.assertEqual(len(str(rset).splitlines()), 1) if __name__ == '__main__': unittest_main() diff -r f4d1d5d9ccbb -r 90f2f20367bc test/unittest_rtags.py --- a/test/unittest_rtags.py Tue Jul 27 12:36:03 2010 +0200 +++ b/test/unittest_rtags.py Wed Nov 03 16:38:28 2010 +0100 @@ -28,25 +28,25 @@ rtags.tag_subject_of(('Societe', 'travaille', '*'), 'primary') rtags.tag_subject_of(('*', 'evaluee', '*'), 'secondary') rtags.tag_object_of(('*', 'tags', '*'), 'generated') - self.assertEquals(rtags.get('Note', 'evaluee', '*', 'subject'), + self.assertEqual(rtags.get('Note', 'evaluee', '*', 'subject'), 'secondary') - self.assertEquals(rtags.get('Societe', 'travaille', '*', 'subject'), + self.assertEqual(rtags.get('Societe', 'travaille', '*', 'subject'), 'primary') - self.assertEquals(rtags.get('Note', 'travaille', '*', 'subject'), + self.assertEqual(rtags.get('Note', 'travaille', '*', 'subject'), None) - self.assertEquals(rtags.get('Note', 'tags', '*', 'subject'), + self.assertEqual(rtags.get('Note', 'tags', '*', 'subject'), None) - self.assertEquals(rtags.get('*', 'tags', 'Note', 'object'), + self.assertEqual(rtags.get('*', 'tags', 'Note', 'object'), 'generated') - self.assertEquals(rtags.get('Tag', 'tags', '*', 'object'), + self.assertEqual(rtags.get('Tag', 'tags', '*', 'object'), 'generated') -# self.assertEquals(rtags.rtag('evaluee', 'Note', 'subject'), set(('secondary', 'link'))) -# self.assertEquals(rtags.is_inlined('evaluee', 'Note', 'subject'), False) -# self.assertEquals(rtags.rtag('evaluee', 'Personne', 'subject'), set(('secondary', 'link'))) -# self.assertEquals(rtags.is_inlined('evaluee', 'Personne', 'subject'), False) -# self.assertEquals(rtags.rtag('ecrit_par', 'Note', 'object'), set(('inlineview', 'link'))) -# self.assertEquals(rtags.is_inlined('ecrit_par', 'Note', 'object'), True) +# self.assertEqual(rtags.rtag('evaluee', 'Note', 'subject'), set(('secondary', 'link'))) +# self.assertEqual(rtags.is_inlined('evaluee', 'Note', 'subject'), False) +# self.assertEqual(rtags.rtag('evaluee', 'Personne', 'subject'), set(('secondary', 'link'))) +# self.assertEqual(rtags.is_inlined('evaluee', 'Personne', 'subject'), False) +# self.assertEqual(rtags.rtag('ecrit_par', 'Note', 'object'), set(('inlineview', 'link'))) +# self.assertEqual(rtags.is_inlined('ecrit_par', 'Note', 'object'), True) # class Personne2(Personne): # id = 'Personne' # __rtags__ = { @@ -54,21 +54,21 @@ # } # self.vreg.register(Personne2) # rtags = Personne2.rtags -# self.assertEquals(rtags.rtag('evaluee', 'Note', 'subject'), set(('inlineview', 'link'))) -# self.assertEquals(rtags.is_inlined('evaluee', 'Note', 'subject'), True) -# self.assertEquals(rtags.rtag('evaluee', 'Personne', 'subject'), set(('secondary', 'link'))) -# self.assertEquals(rtags.is_inlined('evaluee', 'Personne', 'subject'), False) +# self.assertEqual(rtags.rtag('evaluee', 'Note', 'subject'), set(('inlineview', 'link'))) +# self.assertEqual(rtags.is_inlined('evaluee', 'Note', 'subject'), True) +# self.assertEqual(rtags.rtag('evaluee', 'Personne', 'subject'), set(('secondary', 'link'))) +# self.assertEqual(rtags.is_inlined('evaluee', 'Personne', 'subject'), False) def test_rtagset_expansion(self): rtags = RelationTagsSet() rtags.tag_subject_of(('Societe', 'travaille', '*'), 'primary') rtags.tag_subject_of(('*', 'travaille', '*'), 'secondary') - self.assertEquals(rtags.get('Societe', 'travaille', '*', 'subject'), + self.assertEqual(rtags.get('Societe', 'travaille', '*', 'subject'), set(('primary', 'secondary'))) - self.assertEquals(rtags.get('Note', 'travaille', '*', 'subject'), + self.assertEqual(rtags.get('Note', 'travaille', '*', 'subject'), set(('secondary',))) - self.assertEquals(rtags.get('Note', 'tags', "*", 'subject'), + self.assertEqual(rtags.get('Note', 'tags', "*", 'subject'), set()) def test_rtagdict_expansion(self): @@ -79,16 +79,16 @@ {'key1': 'val0', 'key3': 'val0'}) rtags.tag_subject_of(('Societe', 'travaille', '*'), {'key2': 'val2'}) - self.assertEquals(rtags.get('Societe', 'travaille', '*', 'subject'), + self.assertEqual(rtags.get('Societe', 'travaille', '*', 'subject'), {'key1': 'val1', 'key2': 'val2', 'key3': 'val0'}) - self.assertEquals(rtags.get('Note', 'travaille', '*', 'subject'), + self.assertEqual(rtags.get('Note', 'travaille', '*', 'subject'), {'key1': 'val0', 'key3': 'val0'}) - self.assertEquals(rtags.get('Note', 'tags', "*", 'subject'), + self.assertEqual(rtags.get('Note', 'tags', "*", 'subject'), {}) rtags.setdefault(('Societe', 'travaille', '*', 'subject'), 'key1', 'val4') rtags.setdefault(('Societe', 'travaille', '*', 'subject'), 'key4', 'val4') - self.assertEquals(rtags.get('Societe', 'travaille', '*', 'subject'), + self.assertEqual(rtags.get('Societe', 'travaille', '*', 'subject'), {'key1': 'val1', 'key2': 'val2', 'key3': 'val0', 'key4': 'val4'}) if __name__ == '__main__': diff -r f4d1d5d9ccbb -r 90f2f20367bc test/unittest_schema.py --- a/test/unittest_schema.py Tue Jul 27 12:36:03 2010 +0200 +++ b/test/unittest_schema.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,7 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""unit tests for module cubicweb.schema - -""" +"""unit tests for module cubicweb.schema""" import sys from os.path import join, isabs, basename, dirname @@ -144,13 +142,13 @@ def test_erqlexpression(self): self.assertRaises(RQLSyntaxError, ERQLExpression, '1') expr = ERQLExpression('X travaille S, S owned_by U') - self.assertEquals(str(expr), 'Any X WHERE X travaille S, S owned_by U, X eid %(x)s, U eid %(u)s') + self.assertEqual(str(expr), 'Any X WHERE X travaille S, S owned_by U, X eid %(x)s, U eid %(u)s') def test_rrqlexpression(self): self.assertRaises(Exception, RRQLExpression, '1') self.assertRaises(RQLSyntaxError, RRQLExpression, 'O X Y') expr = RRQLExpression('U has_update_permission O') - self.assertEquals(str(expr), 'Any O,U WHERE U has_update_permission O, O eid %(o)s, U eid %(u)s') + self.assertEqual(str(expr), 'Any O,U WHERE U has_update_permission O, O eid %(o)s, U eid %(u)s') loader = CubicWebSchemaLoader() config = TestConfiguration('data') @@ -160,37 +158,40 @@ def test_order_eschemas(self): schema = loader.load(config) - self.assertEquals(order_eschemas([schema['Note'], schema['SubNote']]), + self.assertEqual(order_eschemas([schema['Note'], schema['SubNote']]), [schema['Note'], schema['SubNote']]) - self.assertEquals(order_eschemas([schema['SubNote'], schema['Note']]), + self.assertEqual(order_eschemas([schema['SubNote'], schema['Note']]), [schema['Note'], schema['SubNote']]) def test_knownValues_load_schema(self): schema = loader.load(config) self.assert_(isinstance(schema, CubicWebSchema)) - self.assertEquals(schema.name, 'data') + self.assertEqual(schema.name, 'data') entities = [str(e) for e in schema.entities()] entities.sort() expected_entities = ['BaseTransition', 'Bookmark', 'Boolean', 'Bytes', 'Card', 'Date', 'Datetime', 'Decimal', 'CWCache', 'CWConstraint', 'CWConstraintType', 'CWEType', 'CWAttribute', 'CWGroup', 'EmailAddress', 'CWRelation', - 'CWPermission', 'CWProperty', 'CWRType', 'CWUser', - 'ExternalUri', 'File', 'Float', 'Image', 'Int', 'Interval', 'Note', + 'CWPermission', 'CWProperty', 'CWRType', + 'CWUniqueTogetherConstraint', 'CWUser', + 'ExternalUri', 'File', 'Float', 'Int', 'Interval', 'Note', 'Password', 'Personne', 'RQLExpression', 'Societe', 'State', 'String', 'SubNote', 'SubWorkflowExitPoint', 'Tag', 'Time', 'Transition', 'TrInfo', 'Workflow', 'WorkflowTransition'] - self.assertListEquals(entities, sorted(expected_entities)) + self.assertListEqual(entities, sorted(expected_entities)) relations = [str(r) for r in schema.relations()] relations.sort() expected_relations = ['add_permission', 'address', 'alias', 'allowed_transition', 'bookmarked_by', 'by_transition', 'cardinality', 'comment', 'comment_format', - 'composite', 'condition', 'connait', 'constrained_by', 'content', - 'content_format', 'created_by', 'creation_date', 'cstrtype', 'custom_workflow', 'cwuri', + 'composite', 'condition', 'connait', + 'constrained_by', 'constraint_of', + 'content', 'content_format', + 'created_by', 'creation_date', 'cstrtype', 'custom_workflow', 'cwuri', 'data', 'data_encoding', 'data_format', 'data_name', 'default_workflow', 'defaultval', 'delete_permission', 'description', 'description_format', 'destination_state', @@ -214,7 +215,7 @@ 'path', 'pkey', 'prefered_form', 'prenom', 'primary_email', - 'read_permission', 'relation_type', 'require_group', + 'read_permission', 'relation_type', 'relations', 'require_group', 'specializes', 'state_of', 'subworkflow', 'subworkflow_exit', 'subworkflow_state', 'surname', 'symmetric', 'synopsis', @@ -226,11 +227,11 @@ 'wf_info_for', 'wikiid', 'workflow_of'] - self.assertListEquals(relations, expected_relations) + self.assertListEqual(relations, expected_relations) eschema = schema.eschema('CWUser') rels = sorted(str(r) for r in eschema.subject_relations()) - self.assertListEquals(rels, ['created_by', 'creation_date', 'custom_workflow', 'cwuri', 'eid', + self.assertListEqual(rels, ['created_by', 'creation_date', 'custom_workflow', 'cwuri', 'eid', 'evaluee', 'firstname', 'has_text', 'identity', 'in_group', 'in_state', 'is', 'is_instance_of', 'last_login_time', @@ -238,11 +239,11 @@ 'primary_email', 'surname', 'upassword', 'use_email']) rels = sorted(r.type for r in eschema.object_relations()) - self.assertListEquals(rels, ['bookmarked_by', 'created_by', 'for_user', + self.assertListEqual(rels, ['bookmarked_by', 'created_by', 'for_user', 'identity', 'owned_by', 'wf_info_for']) rschema = schema.rschema('relation_type') properties = rschema.rdef('CWAttribute', 'CWRType') - self.assertEquals(properties.cardinality, '1*') + self.assertEqual(properties.cardinality, '1*') constraints = properties.constraints self.failUnlessEqual(len(constraints), 1, constraints) constraint = constraints[0] @@ -257,13 +258,13 @@ def test_permission_settings(self): schema = loader.load(config) aschema = schema['TrInfo'].rdef('comment') - self.assertEquals(aschema.get_groups('read'), + self.assertEqual(aschema.get_groups('read'), set(('managers', 'users', 'guests'))) - self.assertEquals(aschema.get_rqlexprs('read'), + self.assertEqual(aschema.get_rqlexprs('read'), ()) - self.assertEquals(aschema.get_groups('update'), + self.assertEqual(aschema.get_groups('update'), set(('managers',))) - self.assertEquals([x.expression for x in aschema.get_rqlexprs('update')], + self.assertEqual([x.expression for x in aschema.get_rqlexprs('update')], ['U has_update_permission X']) class BadSchemaRQLExprTC(TestCase): @@ -278,7 +279,7 @@ self.loader.handle_file(join(DATADIR, schemafile)) ex = self.assertRaises(BadSchemaDefinition, self.loader._build_schema, 'toto', False) - self.assertEquals(str(ex), msg) + self.assertEqual(str(ex), msg) def test_rrqlexpr_on_etype(self): self._test('rrqlexpr_on_eetype.py', @@ -300,12 +301,12 @@ class NormalizeExpressionTC(TestCase): def test(self): - self.assertEquals(normalize_expression('X bla Y,Y blur Z , Z zigoulou X '), + self.assertEqual(normalize_expression('X bla Y,Y blur Z , Z zigoulou X '), 'X bla Y, Y blur Z, Z zigoulou X') class RQLExpressionTC(TestCase): def test_comparison(self): - self.assertEquals(ERQLExpression('X is CWUser', 'X', 0), + self.assertEqual(ERQLExpression('X is CWUser', 'X', 0), ERQLExpression('X is CWUser', 'X', 0)) self.assertNotEquals(ERQLExpression('X is CWUser', 'X', 0), ERQLExpression('X is CWGroup', 'X', 0)) @@ -313,7 +314,7 @@ class GuessRrqlExprMainVarsTC(TestCase): def test_exists(self): mainvars = guess_rrqlexpr_mainvars(normalize_expression('NOT EXISTS(O team_competition C, C level < 3)')) - self.assertEquals(mainvars, 'O') + self.assertEqual(mainvars, 'O') if __name__ == '__main__': diff -r f4d1d5d9ccbb -r 90f2f20367bc test/unittest_selectors.py --- a/test/unittest_selectors.py Tue Jul 27 12:36:03 2010 +0200 +++ b/test/unittest_selectors.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,15 +15,16 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""unit tests for selectors mechanism +"""unit tests for selectors mechanism""" -""" - +from operator import eq, lt, le, gt from logilab.common.testlib import TestCase, unittest_main +from cubicweb import Binary from cubicweb.devtools.testlib import CubicWebTC from cubicweb.appobject import Selector, AndSelector, OrSelector -from cubicweb.selectors import implements, match_user_groups +from cubicweb.selectors import (is_instance, adaptable, match_user_groups, + multi_lines_rset) from cubicweb.interfaces import IDownloadable from cubicweb.web import action @@ -42,114 +43,120 @@ class SelectorsTC(TestCase): def test_basic_and(self): selector = _1_() & _1_() - self.assertEquals(selector(None), 2) + self.assertEqual(selector(None), 2) selector = _1_() & _0_() - self.assertEquals(selector(None), 0) + self.assertEqual(selector(None), 0) selector = _0_() & _1_() - self.assertEquals(selector(None), 0) + self.assertEqual(selector(None), 0) def test_basic_or(self): selector = _1_() | _1_() - self.assertEquals(selector(None), 1) + self.assertEqual(selector(None), 1) selector = _1_() | _0_() - self.assertEquals(selector(None), 1) + self.assertEqual(selector(None), 1) selector = _0_() | _1_() - self.assertEquals(selector(None), 1) + self.assertEqual(selector(None), 1) selector = _0_() | _0_() - self.assertEquals(selector(None), 0) + self.assertEqual(selector(None), 0) def test_selector_and_function(self): selector = _1_() & _2_ - self.assertEquals(selector(None), 3) + self.assertEqual(selector(None), 3) selector = _2_ & _1_() - self.assertEquals(selector(None), 3) + self.assertEqual(selector(None), 3) def test_three_and(self): selector = _1_() & _1_() & _1_() - self.assertEquals(selector(None), 3) + self.assertEqual(selector(None), 3) selector = _1_() & _0_() & _1_() - self.assertEquals(selector(None), 0) + self.assertEqual(selector(None), 0) selector = _0_() & _1_() & _1_() - self.assertEquals(selector(None), 0) + self.assertEqual(selector(None), 0) def test_three_or(self): selector = _1_() | _1_() | _1_() - self.assertEquals(selector(None), 1) + self.assertEqual(selector(None), 1) selector = _1_() | _0_() | _1_() - self.assertEquals(selector(None), 1) + self.assertEqual(selector(None), 1) selector = _0_() | _1_() | _1_() - self.assertEquals(selector(None), 1) + self.assertEqual(selector(None), 1) selector = _0_() | _0_() | _0_() - self.assertEquals(selector(None), 0) + self.assertEqual(selector(None), 0) def test_composition(self): selector = (_1_() & _1_()) & (_1_() & _1_()) self.failUnless(isinstance(selector, AndSelector)) - self.assertEquals(len(selector.selectors), 4) - self.assertEquals(selector(None), 4) + self.assertEqual(len(selector.selectors), 4) + self.assertEqual(selector(None), 4) selector = (_1_() & _0_()) | (_1_() & _1_()) self.failUnless(isinstance(selector, OrSelector)) - self.assertEquals(len(selector.selectors), 2) - self.assertEquals(selector(None), 2) + self.assertEqual(len(selector.selectors), 2) + self.assertEqual(selector(None), 2) def test_search_selectors(self): - sel = implements('something') - self.assertIs(sel.search_selector(implements), sel) + sel = is_instance('something') + self.assertIs(sel.search_selector(is_instance), sel) csel = AndSelector(sel, Selector()) - self.assertIs(csel.search_selector(implements), sel) + self.assertIs(csel.search_selector(is_instance), sel) csel = AndSelector(Selector(), sel) - self.assertIs(csel.search_selector(implements), sel) + self.assertIs(csel.search_selector(is_instance), sel) def test_inplace_and(self): selector = _1_() selector &= _1_() selector &= _1_() - self.assertEquals(selector(None), 3) + self.assertEqual(selector(None), 3) selector = _1_() selector &= _0_() selector &= _1_() - self.assertEquals(selector(None), 0) + self.assertEqual(selector(None), 0) selector = _0_() selector &= _1_() selector &= _1_() - self.assertEquals(selector(None), 0) + self.assertEqual(selector(None), 0) selector = _0_() selector &= _0_() selector &= _0_() - self.assertEquals(selector(None), 0) + self.assertEqual(selector(None), 0) def test_inplace_or(self): selector = _1_() selector |= _1_() selector |= _1_() - self.assertEquals(selector(None), 1) + self.assertEqual(selector(None), 1) selector = _1_() selector |= _0_() selector |= _1_() - self.assertEquals(selector(None), 1) + self.assertEqual(selector(None), 1) selector = _0_() selector |= _1_() selector |= _1_() - self.assertEquals(selector(None), 1) + self.assertEqual(selector(None), 1) selector = _0_() selector |= _0_() selector |= _0_() - self.assertEquals(selector(None), 0) + self.assertEqual(selector(None), 0) class ImplementsSelectorTC(CubicWebTC): def test_etype_priority(self): req = self.request() - cls = self.vreg['etypes'].etype_class('File') - anyscore = implements('Any').score_class(cls, req) - idownscore = implements(IDownloadable).score_class(cls, req) + f = req.create_entity('File', data_name=u'hop.txt', data=Binary('hop')) + rset = f.as_rset() + anyscore = is_instance('Any')(f.__class__, req, rset=rset) + idownscore = adaptable('IDownloadable')(f.__class__, req, rset=rset) self.failUnless(idownscore > anyscore, (idownscore, anyscore)) - filescore = implements('File').score_class(cls, req) + filescore = is_instance('File')(f.__class__, req, rset=rset) self.failUnless(filescore > idownscore, (filescore, idownscore)) def test_etype_inheritance_no_yams_inheritance(self): cls = self.vreg['etypes'].etype_class('Personne') - self.failIf(implements('Societe').score_class(cls, self.request())) + self.failIf(is_instance('Societe').score_class(cls, self.request())) + + def test_yams_inheritance(self): + cls = self.vreg['etypes'].etype_class('Transition') + self.assertEqual(is_instance('BaseTransition').score_class(cls, self.request()), + 3) class MatchUserGroupsTC(CubicWebTC): @@ -185,6 +192,59 @@ finally: del self.vreg[SomeAction.__registry__][SomeAction.__regid__] + +class MultiLinesRsetSelectorTC(CubicWebTC): + def setUp(self): + super(MultiLinesRsetSelectorTC, self).setUp() + self.req = self.request() + self.req.execute('INSERT CWGroup G: G name "group1"') + self.req.execute('INSERT CWGroup G: G name "group2"') + self.commit() + self.rset = self.req.execute('Any G WHERE G is CWGroup') + + def test_default_op_in_selector(self): + expected = len(self.rset) + selector = multi_lines_rset(expected) + self.assertEqual(selector(None, self.req, self.rset), 1) + self.assertEqual(selector(None, self.req, None), 0) + selector = multi_lines_rset(expected + 1) + self.assertEqual(selector(None, self.req, self.rset), 0) + self.assertEqual(selector(None, self.req, None), 0) + selector = multi_lines_rset(expected - 1) + self.assertEqual(selector(None, self.req, self.rset), 0) + self.assertEqual(selector(None, self.req, None), 0) + + def test_without_rset(self): + expected = len(self.rset) + selector = multi_lines_rset(expected) + self.assertEqual(selector(None, self.req, None), 0) + selector = multi_lines_rset(expected + 1) + self.assertEqual(selector(None, self.req, None), 0) + selector = multi_lines_rset(expected - 1) + self.assertEqual(selector(None, self.req, None), 0) + + def test_with_operators(self): + expected = len(self.rset) + + # Format 'expected', 'operator', 'assert' + testdata = (( expected, eq, 1), + ( expected+1, eq, 0), + ( expected-1, eq, 0), + ( expected, le, 1), + ( expected+1, le, 1), + ( expected-1, le, 0), + ( expected-1, gt, 1), + ( expected, gt, 0), + ( expected+1, gt, 0), + ( expected+1, lt, 1), + ( expected, lt, 0), + ( expected-1, lt, 0)) + + for (expected, operator, assertion) in testdata: + selector = multi_lines_rset(expected, operator) + yield self.assertEqual, selector(None, self.req, self.rset), assertion + + if __name__ == '__main__': unittest_main() diff -r f4d1d5d9ccbb -r 90f2f20367bc test/unittest_spa2rql.py --- a/test/unittest_spa2rql.py Tue Jul 27 12:36:03 2010 +0200 +++ b/test/unittest_spa2rql.py Wed Nov 03 16:38:28 2010 +0100 @@ -37,7 +37,7 @@ def _test(self, sparql, rql, args={}): qi = self.tr.translate(sparql) - self.assertEquals(qi.finalize(), (rql, args)) + self.assertEqual(qi.finalize(), (rql, args)) def XXX_test_base_01(self): self._test('SELECT * WHERE { }', 'Any X') diff -r f4d1d5d9ccbb -r 90f2f20367bc test/unittest_uilib.py --- a/test/unittest_uilib.py Tue Jul 27 12:36:03 2010 +0200 +++ b/test/unittest_uilib.py Wed Nov 03 16:38:28 2010 +0100 @@ -39,22 +39,22 @@ ] for text, expected in data: got = uilib.remove_html_tags(text) - self.assertEquals(got, expected) + self.assertEqual(got, expected) def test_fallback_safe_cut(self): - self.assertEquals(uilib.fallback_safe_cut(u'ab cd', 4), u'ab c...') - self.assertEquals(uilib.fallback_safe_cut(u'ab cd', 5), u'ab cd') - self.assertEquals(uilib.fallback_safe_cut(u'ab &d', 4), u'ab &...') - self.assertEquals(uilib.fallback_safe_cut(u'ab &d ef', 5), u'ab &d...') - self.assertEquals(uilib.fallback_safe_cut(u'ab ìd', 4), u'ab ì...') - self.assertEquals(uilib.fallback_safe_cut(u'& &d ef', 4), u'& &d...') + self.assertEqual(uilib.fallback_safe_cut(u'ab cd', 4), u'ab c...') + self.assertEqual(uilib.fallback_safe_cut(u'ab cd', 5), u'ab cd') + self.assertEqual(uilib.fallback_safe_cut(u'ab &d', 4), u'ab &...') + self.assertEqual(uilib.fallback_safe_cut(u'ab &d ef', 5), u'ab &d...') + self.assertEqual(uilib.fallback_safe_cut(u'ab ìd', 4), u'ab ì...') + self.assertEqual(uilib.fallback_safe_cut(u'& &d ef', 4), u'& &d...') def test_lxml_safe_cut(self): - self.assertEquals(uilib.safe_cut(u'aaa
        aaad
        ef', 4), u'

        aaa

        a...
        ') - self.assertEquals(uilib.safe_cut(u'aaa
        aaad
        ef', 7), u'

        aaa

        aaad
        ...') - self.assertEquals(uilib.safe_cut(u'aaa
        aaad
        ', 7), u'

        aaa

        aaad
        ') + self.assertEqual(uilib.safe_cut(u'aaa
        aaad
        ef', 4), u'

        aaa

        a...
        ') + self.assertEqual(uilib.safe_cut(u'aaa
        aaad
        ef', 7), u'

        aaa

        aaad
        ...') + self.assertEqual(uilib.safe_cut(u'aaa
        aaad
        ', 7), u'

        aaa

        aaad
        ') # Missing ellipsis due to space management but we don't care - self.assertEquals(uilib.safe_cut(u'ab &d', 4), u'

        ab &...

        ') + self.assertEqual(uilib.safe_cut(u'ab &d', 4), u'

        ab &...

        ') def test_cut(self): """tests uilib.cut() behaviour""" @@ -65,7 +65,7 @@ ] for text, expected in data: got = uilib.cut(text, 8) - self.assertEquals(got, expected) + self.assertEqual(got, expected) def test_text_cut(self): """tests uilib.text_cut() behaviour with no text""" @@ -92,56 +92,64 @@ ] for text, expected in data: got = uilib.text_cut(text, 30) - self.assertEquals(got, expected) + self.assertEqual(got, expected) def test_soup2xhtml_1_1(self): - self.assertEquals(uilib.soup2xhtml('hop
        ', 'ascii'), + self.assertEqual(uilib.soup2xhtml('hop
        ', 'ascii'), 'hop
        ') - self.assertEquals(uilib.soup2xhtml('
        hop', 'ascii'), + self.assertEqual(uilib.soup2xhtml('
        hop', 'ascii'), '
        hop
        ') - self.assertEquals(uilib.soup2xhtml('hop
        hop', 'ascii'), + self.assertEqual(uilib.soup2xhtml('hop
        hop', 'ascii'), 'hop
        hop
        ') def test_soup2xhtml_1_2(self): - self.assertEquals(uilib.soup2xhtml('hop
        ', 'ascii'), + self.assertEqual(uilib.soup2xhtml('hop
        ', 'ascii'), 'hop ') - self.assertEquals(uilib.soup2xhtml('
        hop', 'ascii'), + self.assertEqual(uilib.soup2xhtml('
        hop', 'ascii'), '
        hop') - self.assertEquals(uilib.soup2xhtml('hop
        hop', 'ascii'), + self.assertEqual(uilib.soup2xhtml('hop
        hop', 'ascii'), '
        hop
        hop') def test_soup2xhtml_2_1(self): - self.assertEquals(uilib.soup2xhtml('hop ', 'ascii'), + self.assertEqual(uilib.soup2xhtml('hop ', 'ascii'), 'hop ') - self.assertEquals(uilib.soup2xhtml(' hop', 'ascii'), + self.assertEqual(uilib.soup2xhtml(' hop', 'ascii'), ' hop') - self.assertEquals(uilib.soup2xhtml('hop hop', 'ascii'), + self.assertEqual(uilib.soup2xhtml('hop hop', 'ascii'), 'hop hop') def test_soup2xhtml_2_2(self): - self.assertEquals(uilib.soup2xhtml('hop ', 'ascii'), + self.assertEqual(uilib.soup2xhtml('hop ', 'ascii'), 'hop ') - self.assertEquals(uilib.soup2xhtml(' hop', 'ascii'), + self.assertEqual(uilib.soup2xhtml(' hop', 'ascii'), ' hop') - self.assertEquals(uilib.soup2xhtml('hop hop', 'ascii'), + self.assertEqual(uilib.soup2xhtml('hop hop', 'ascii'), 'hop hop') def test_soup2xhtml_3_1(self): - self.assertEquals(uilib.soup2xhtml('hop ', 'ascii'), + self.assertEqual(uilib.soup2xhtml('hop ', 'ascii'), 'hop ') - self.assertEquals(uilib.soup2xhtml(' hop', 'ascii'), + self.assertEqual(uilib.soup2xhtml(' hop', 'ascii'), ' hop') - self.assertEquals(uilib.soup2xhtml('hop hop', 'ascii'), + self.assertEqual(uilib.soup2xhtml('hop hop', 'ascii'), 'hop hop') def test_soup2xhtml_3_2(self): - self.assertEquals(uilib.soup2xhtml('hop ', 'ascii'), + self.assertEqual(uilib.soup2xhtml('hop ', 'ascii'), 'hop ') - self.assertEquals(uilib.soup2xhtml(' hop', 'ascii'), + self.assertEqual(uilib.soup2xhtml(' hop', 'ascii'), ' hop') - self.assertEquals(uilib.soup2xhtml('hop hop', 'ascii'), + self.assertEqual(uilib.soup2xhtml('hop hop', 'ascii'), 'hop hop') + def test_js(self): + self.assertEqual(str(uilib.js.pouet(1, "2")), + 'pouet(1,"2")') + self.assertEqual(str(uilib.js.cw.pouet(1, "2")), + 'cw.pouet(1,"2")') + self.assertEqual(str(uilib.js.cw.pouet(1, "2").pouet(None)), + 'cw.pouet(1,"2").pouet(null)') + if __name__ == '__main__': unittest_main() diff -r f4d1d5d9ccbb -r 90f2f20367bc test/unittest_utils.py --- a/test/unittest_utils.py Tue Jul 27 12:36:03 2010 +0200 +++ b/test/unittest_utils.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,16 +15,16 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""unit tests for module cubicweb.utils - -""" +"""unit tests for module cubicweb.utils""" import re import decimal import datetime from logilab.common.testlib import TestCase, unittest_main + from cubicweb.utils import make_uid, UStringIO, SizeConstrainedList, RepeatList +from cubicweb.entity import Entity try: from cubicweb.utils import CubicWebJsonEncoder, json @@ -57,47 +57,48 @@ def test_base(self): l = RepeatList(3, (1, 3)) - self.assertEquals(l[0], (1, 3)) - self.assertEquals(l[2], (1, 3)) - self.assertEquals(l[-1], (1, 3)) - self.assertEquals(len(l), 3) + self.assertEqual(l[0], (1, 3)) + self.assertEqual(l[2], (1, 3)) + self.assertEqual(l[-1], (1, 3)) + self.assertEqual(len(l), 3) # XXX - self.assertEquals(l[4], (1, 3)) + self.assertEqual(l[4], (1, 3)) self.failIf(RepeatList(0, None)) def test_slice(self): l = RepeatList(3, (1, 3)) - self.assertEquals(l[0:1], [(1, 3)]) - self.assertEquals(l[0:4], [(1, 3)]*3) - self.assertEquals(l[:], [(1, 3)]*3) + self.assertEqual(l[0:1], [(1, 3)]) + self.assertEqual(l[0:4], [(1, 3)]*3) + self.assertEqual(l[:], [(1, 3)]*3) def test_iter(self): - self.assertEquals(list(RepeatList(3, (1, 3))), + self.assertEqual(list(RepeatList(3, (1, 3))), [(1, 3)]*3) def test_add(self): l = RepeatList(3, (1, 3)) - self.assertEquals(l + [(1, 4)], [(1, 3)]*3 + [(1, 4)]) - self.assertEquals([(1, 4)] + l, [(1, 4)] + [(1, 3)]*3) - self.assertEquals(l + RepeatList(2, (2, 3)), [(1, 3)]*3 + [(2, 3)]*2) + self.assertEqual(l + [(1, 4)], [(1, 3)]*3 + [(1, 4)]) + self.assertEqual([(1, 4)] + l, [(1, 4)] + [(1, 3)]*3) + self.assertEqual(l + RepeatList(2, (2, 3)), [(1, 3)]*3 + [(2, 3)]*2) x = l + RepeatList(2, (1, 3)) self.assertIsInstance(x, RepeatList) - self.assertEquals(len(x), 5) - self.assertEquals(x[0], (1, 3)) + self.assertEqual(len(x), 5) + self.assertEqual(x[0], (1, 3)) x = l + [(1, 3)] * 2 - self.assertEquals(x, [(1, 3)] * 5) + self.assertEqual(x, [(1, 3)] * 5) def test_eq(self): - self.assertEquals(RepeatList(3, (1, 3)), + self.assertEqual(RepeatList(3, (1, 3)), [(1, 3)]*3) def test_pop(self): l = RepeatList(3, (1, 3)) l.pop(2) - self.assertEquals(l, [(1, 3)]*2) + self.assertEqual(l, [(1, 3)]*2) + class SizeConstrainedListTC(TestCase): @@ -105,7 +106,7 @@ l = SizeConstrainedList(10) for i in xrange(12): l.append(i) - self.assertEquals(l, range(2, 12)) + self.assertEqual(l, range(2, 12)) def test_extend(self): testdata = [(range(5), range(5)), @@ -115,29 +116,44 @@ for extension, expected in testdata: l = SizeConstrainedList(10) l.extend(extension) - yield self.assertEquals, l, expected + yield self.assertEqual, l, expected + class JSONEncoderTC(TestCase): def setUp(self): if json is None: - self.skip('json not available') + self.skipTest('json not available') def encode(self, value): return json.dumps(value, cls=CubicWebJsonEncoder) def test_encoding_dates(self): - self.assertEquals(self.encode(datetime.datetime(2009, 9, 9, 20, 30)), + self.assertEqual(self.encode(datetime.datetime(2009, 9, 9, 20, 30)), '"2009/09/09 20:30:00"') - self.assertEquals(self.encode(datetime.date(2009, 9, 9)), + self.assertEqual(self.encode(datetime.date(2009, 9, 9)), '"2009/09/09"') - self.assertEquals(self.encode(datetime.time(20, 30)), + self.assertEqual(self.encode(datetime.time(20, 30)), '"20:30:00"') def test_encoding_decimal(self): - self.assertEquals(self.encode(decimal.Decimal('1.2')), '1.2') + self.assertEqual(self.encode(decimal.Decimal('1.2')), '1.2') + + def test_encoding_bare_entity(self): + e = Entity(None) + e['pouet'] = 'hop' + e.eid = 2 + self.assertEqual(json.loads(self.encode(e)), + {'pouet': 'hop', 'eid': 2}) + + def test_encoding_entity_in_list(self): + e = Entity(None) + e['pouet'] = 'hop' + e.eid = 2 + self.assertEqual(json.loads(self.encode([e])), + [{'pouet': 'hop', 'eid': 2}]) def test_encoding_unknown_stuff(self): - self.assertEquals(self.encode(TestCase), 'null') + self.assertEqual(self.encode(TestCase), 'null') if __name__ == '__main__': diff -r f4d1d5d9ccbb -r 90f2f20367bc test/unittest_vregistry.py --- a/test/unittest_vregistry.py Tue Jul 27 12:36:03 2010 +0200 +++ b/test/unittest_vregistry.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,7 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -""" -""" from logilab.common.testlib import unittest_main, TestCase from os.path import join @@ -27,7 +25,7 @@ from cubicweb.cwvreg import CubicWebVRegistry, UnknownProperty from cubicweb.devtools import TestServerConfiguration from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.interfaces import IMileStone +from cubicweb.view import EntityAdapter from cubes.card.entities import Card @@ -50,27 +48,32 @@ self.vreg.load_file(join(BASE, 'entities', '__init__.py'), 'cubicweb.entities.__init__') self.vreg.load_file(join(WEBVIEWSDIR, 'idownloadable.py'), 'cubicweb.web.views.idownloadable') self.vreg.load_file(join(WEBVIEWSDIR, 'primary.py'), 'cubicweb.web.views.primary') - self.assertEquals(len(self.vreg['views']['primary']), 2) + self.assertEqual(len(self.vreg['views']['primary']), 2) self.vreg.initialization_completed() - self.assertEquals(len(self.vreg['views']['primary']), 1) + self.assertEqual(len(self.vreg['views']['primary']), 1) def test_load_subinterface_based_appobjects(self): - self.vreg.reset() self.vreg.register_objects([join(BASE, 'web', 'views', 'iprogress.py')]) # check progressbar was kicked self.failIf(self.vreg['views'].get('progressbar')) - class MyCard(Card): - __implements__ = (IMileStone,) - self.vreg.reset() + # we've to emulate register_objects to add custom MyCard objects + path = [join(BASE, 'entities', '__init__.py'), + join(BASE, 'entities', 'adapters.py'), + join(BASE, 'web', 'views', 'iprogress.py')] + filemods = self.vreg.init_registration(path, None) + for filepath, modname in filemods: + self.vreg.load_file(filepath, modname) + class CardIProgressAdapter(EntityAdapter): + __regid__ = 'IProgress' self.vreg._loadedmods[__name__] = {} - self.vreg.register(MyCard) - self.vreg.register_objects([join(BASE, 'entities', '__init__.py'), - join(BASE, 'web', 'views', 'iprogress.py')]) + self.vreg.register(CardIProgressAdapter) + self.vreg.initialization_completed() # check progressbar isn't kicked - self.assertEquals(len(self.vreg['views']['progressbar']), 1) + self.assertEqual(len(self.vreg['views']['progressbar']), 1) def test_properties(self): + self.vreg.reset() self.failIf('system.version.cubicweb' in self.vreg['propertydefs']) self.failUnless(self.vreg.property_info('system.version.cubicweb')) self.assertRaises(UnknownProperty, self.vreg.property_info, 'a.non.existent.key') @@ -81,7 +84,7 @@ def test_property_default_overriding(self): # see data/views.py from cubicweb.web.views.xmlrss import RSSIconBox - self.assertEquals(self.vreg.property_info(RSSIconBox._cwpropkey('visible'))['default'], True) + self.assertEqual(self.vreg.property_info(RSSIconBox._cwpropkey('visible'))['default'], True) if __name__ == '__main__': unittest_main() diff -r f4d1d5d9ccbb -r 90f2f20367bc toolsutils.py --- a/toolsutils.py Tue Jul 27 12:36:03 2010 +0200 +++ b/toolsutils.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""some utilities for cubicweb tools +"""some utilities for cubicweb command line tools""" -""" __docformat__ = "restructuredtext en" # XXX move most of this in logilab.common (shellutils ?) @@ -33,8 +32,7 @@ def symlink(*args): raise NotImplementedError -from logilab.common.clcommands import Command as BaseCommand, \ - main_run as base_main_run +from logilab.common.clcommands import Command as BaseCommand from logilab.common.compat import any from logilab.common.shellutils import ASK @@ -196,30 +194,6 @@ config_file, ex) return config -def env_path(env_var, default, name, checkexists=True): - """get a path specified in a variable or using the default value and return - it. - - :type env_var: str - :param env_var: name of an environment variable - - :type default: str - :param default: default value if the environment variable is not defined - - :type name: str - :param name: the informal name of the path, used for error message - - :rtype: str - :return: the value of the environment variable or the default value - - :raise `ConfigurationError`: if the returned path does not exist - """ - path = environ.get(env_var, default) - if checkexists and not exists(path): - raise ConfigurationError('%s directory %s doesn\'t exist' % (name, path)) - return abspath(path) - - _HDLRS = {} @@ -260,17 +234,6 @@ sys.exit(1) -def main_run(args, doc): - """command line tool""" - try: - base_main_run(args, doc, copyright=None) - except ConfigurationError, err: - print 'ERROR: ', err - sys.exit(1) - except ExecutionError, err: - print err - sys.exit(2) - CONNECT_OPTIONS = ( ("user", {'short': 'u', 'type' : 'string', 'metavar': '', diff -r f4d1d5d9ccbb -r 90f2f20367bc uilib.py --- a/uilib.py Tue Jul 27 12:36:03 2010 +0200 +++ b/uilib.py Wed Nov 03 16:38:28 2010 +0100 @@ -31,6 +31,8 @@ from logilab.mtconverter import xml_escape, html_unescape from logilab.common.date import ustrftime +from cubicweb.utils import json_dumps + def rql_for_eid(eid): """return the rql query necessary to fetch entity with the given eid. This @@ -44,6 +46,11 @@ """ return 'Any X WHERE X eid %s' % eid +def eid_param(name, eid): + assert eid is not None + if eid is None: + eid = '' + return '%s:%s' % (name, eid) def printable_value(req, attrtype, value, props=None, displaytime=True): """return a displayable value (i.e. unicode string)""" @@ -228,6 +235,54 @@ # HTML generation helper functions ############################################ +class _JSId(object): + def __init__(self, id, parent=None): + self.id = id + self.parent = parent + def __unicode__(self): + if self.parent: + return u'%s.%s' % (self.parent, self.id) + return unicode(self.id) + def __str__(self): + return unicode(self).encode('utf8') + def __getattr__(self, attr): + return _JSId(attr, self) + def __call__(self, *args): + return _JSCallArgs(args, self) + +class _JSCallArgs(_JSId): + def __init__(self, args, parent=None): + assert isinstance(args, tuple) + self.args = args + self.parent = parent + def __unicode__(self): + args = u','.join(json_dumps(arg) for arg in self.args) + if self.parent: + return u'%s(%s)' % (self.parent, args) + return args + +class _JS(object): + def __getattr__(self, attr): + return _JSId(attr) + +"""magic object to return strings suitable to call some javascript function with +the given arguments (which should be correctly typed). + +>>> str(js.pouet(1, "2")) +'pouet(1,"2")' +>>> str(js.cw.pouet(1, "2")) +'cw.pouet(1,"2")' +>>> str(js.cw.pouet(1, "2").pouet(None)) +'cw.pouet(1,"2").pouet(null)') +""" +js = _JS() + +def domid(string): + """return a valid DOM id from a string (should also be usable in jQuery + search expression...) + """ + return string.replace('.', '_').replace('-', '_') + HTML4_EMPTY_TAGS = frozenset(('base', 'meta', 'link', 'hr', 'br', 'param', 'img', 'area', 'input', 'col')) diff -r f4d1d5d9ccbb -r 90f2f20367bc utils.py --- a/utils.py Tue Jul 27 12:36:03 2010 +0200 +++ b/utils.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""Some utilities for CubicWeb server/clients. +"""Some utilities for CubicWeb server/clients.""" -""" __docformat__ = "restructuredtext en" import os @@ -121,6 +120,9 @@ def __init__(self, size, item): self._size = size self._item = item + def __repr__(self): + return '' % ( + id(self), self._item, self._size) def __len__(self): return self._size def __nonzero__(self): @@ -129,6 +131,9 @@ return repeat(self._item, self._size) def __getitem__(self, index): return self._item + def __delitem__(self, idc): + assert self._size > 0 + self._size -= 1 def __getslice__(self, i, j): # XXX could be more efficient, but do we bother? return ([self._item] * self._size)[i:j] @@ -178,6 +183,11 @@ javascripts and stylesheets """ js_unload_code = u'jQuery(window).unload(unloadPageData);' + # Making ' def __init__(self): super(HTMLHead, self).__init__() @@ -251,14 +261,14 @@ w = self.write # 1/ variable declaration if any if self.jsvars: - w(u'\n') + w(self.xhtml_safe_script_closing) # 2/ css files for cssfile, media in self.cssfiles: w(u'\n' % @@ -276,9 +286,9 @@ xml_escape(jsfile)) # 5/ post inlined scripts (i.e. scripts depending on other JS files) if self.post_inlined_scripts: - w(u'\n') + w(self.xhtml_safe_script_closing) header = super(HTMLHead, self).getvalue() if skiphead: return header @@ -324,36 +334,28 @@ try: # may not be there if cubicweb-web not installed - if sys.version_info < (2,6): + if sys.version_info < (2, 6): import simplejson as json else: import json except ImportError: - pass + json_dumps = None + else: + from logilab.common.date import ustrftime class CubicWebJsonEncoder(json.JSONEncoder): """define a json encoder to be able to encode yams std types""" - # _iterencode is the only entry point I've found to use a custom encode - # hook early enough: .default() is called if nothing else matched before, - # .iterencode() is called once on the main structure to encode and then - # never gets called again. - # For the record, our main use case is in FormValidateController with: - # json.dumps((status, args, entity), cls=CubicWebJsonEncoder) - # where we want all the entity attributes, including eid, to be part - # of the json object dumped. - # This would have once more been easier if Entity didn't extend dict. - def _iterencode(self, obj, markers=None): - if hasattr(obj, '__json_encode__'): - obj = obj.__json_encode__() - return json.JSONEncoder._iterencode(self, obj, markers) - def default(self, obj): + if hasattr(obj, 'eid'): + d = obj.cw_attr_cache.copy() + d['eid'] = obj.eid + return d if isinstance(obj, datetime.datetime): - return obj.strftime('%Y/%m/%d %H:%M:%S') + return ustrftime(obj, '%Y/%m/%d %H:%M:%S') elif isinstance(obj, datetime.date): - return obj.strftime('%Y/%m/%d') + return ustrftime(obj, '%Y/%m/%d') elif isinstance(obj, datetime.time): return obj.strftime('%H:%M:%S') elif isinstance(obj, datetime.timedelta): @@ -367,6 +369,9 @@ # just return None in those cases. return None + def json_dumps(value): + return json.dumps(value, cls=CubicWebJsonEncoder) + @deprecated('[3.7] merge_dicts is deprecated') def merge_dicts(dict1, dict2): @@ -379,7 +384,7 @@ _THIS_MOD_NS = globals() for funcname in ('date_range', 'todate', 'todatetime', 'datetime2ticks', 'days_in_month', 'days_in_year', 'previous_month', - 'next_month', 'first_day', 'last_day', 'ustrftime', + 'next_month', 'first_day', 'last_day', 'strptime'): msg = '[3.6] %s has been moved to logilab.common.date' % funcname _THIS_MOD_NS[funcname] = deprecated(msg)(getattr(date, funcname)) diff -r f4d1d5d9ccbb -r 90f2f20367bc view.py --- a/view.py Tue Jul 27 12:36:03 2010 +0200 +++ b/view.py Wed Nov 03 16:38:28 2010 +0100 @@ -23,7 +23,6 @@ from cStringIO import StringIO from warnings import warn -from cubicweb.utils import json from logilab.common.deprecation import deprecated from logilab.mtconverter import xml_escape @@ -33,7 +32,9 @@ from cubicweb.selectors import yes, non_final_entity, nonempty_rset, none_rset from cubicweb.appobject import AppObject from cubicweb.utils import UStringIO, HTMLStream +from cubicweb.uilib import domid, js from cubicweb.schema import display_name +from cubicweb.vregistry import classid # robots control NOINDEX = u'' @@ -334,7 +335,8 @@ """ return the url of the entity creation form for a given entity type""" return self._cw.build_url('add/%s' % etype, **kwargs) - def field(self, label, value, row=True, show_label=True, w=None, tr=True, table=False): + def field(self, label, value, row=True, show_label=True, w=None, tr=True, + table=False): """read-only field""" if w is None: w = self.w @@ -366,6 +368,17 @@ __select__ = non_final_entity() category = 'entityview' + def call(self, **kwargs): + if self.cw_rset is None: + self.entity_call(self.cw_extra_kwargs.pop('entity')) + else: + super(EntityView, self).call(**kwargs) + + def cell_call(self, row, col, **kwargs): + self.entity_call(self.cw_rset.get_entity(row, col), **kwargs) + + def entity_call(self, entity, **kwargs): + raise NotImplementedError() class StartupView(View): """base class for views which doesn't need a particular result set to be @@ -493,12 +506,11 @@ def build_update_js_call(self, cbname, msg): rql = self.cw_rset.printable_rql() - return "javascript:userCallbackThenUpdateUI('%s', '%s', %s, %s, '%s', '%s')" % ( - cbname, self.id, json.dumps(rql), json.dumps(msg), - self.__registry__, self.div_id()) + return "javascript: %s" % js.userCallbackThenUpdateUI( + cbname, self.__regid__, rql, msg, self.__registry__, self.domid) def build_reload_js_call(self, cbname, msg): - return "javascript:userCallbackThenReloadPage('%s', %s)" % (cbname, json.dumps(msg)) + return "javascript: %s" % js.userCallbackThenReloadPage(cbname, msg) build_js = build_update_js_call # expect updatable component by default @@ -519,3 +531,37 @@ # XXX a generic '%s%s' % (self.__regid__, self.__registry__.capitalize()) would probably be nicer def div_id(self): return '%sComponent' % self.__regid__ + + +class Adapter(AppObject): + """base class for adapters""" + __registry__ = 'adapters' + + +class EntityAdapter(Adapter): + """base class for entity adapters (eg adapt an entity to an interface)""" + def __init__(self, _cw, **kwargs): + try: + self.entity = kwargs.pop('entity') + except KeyError: + self.entity = kwargs['rset'].get_entity(kwargs.get('row') or 0, + kwargs.get('col') or 0) + Adapter.__init__(self, _cw, **kwargs) + + +def implements_adapter_compat(iface): + def _pre39_compat(func): + def decorated(self, *args, **kwargs): + entity = self.entity + if hasattr(entity, func.__name__): + warn('[3.9] %s method is deprecated, define it on a custom ' + '%s for %s instead' % (func.__name__, iface, + classid(entity.__class__)), + DeprecationWarning) + member = getattr(entity, func.__name__) + if callable(member): + return member(*args, **kwargs) + return member + return func(self, *args, **kwargs) + return decorated + return _pre39_compat diff -r f4d1d5d9ccbb -r 90f2f20367bc vregistry.py --- a/vregistry.py Tue Jul 27 12:36:03 2010 +0200 +++ b/vregistry.py Wed Nov 03 16:38:28 2010 +0100 @@ -44,7 +44,8 @@ from cubicweb import CW_SOFTWARE_ROOT from cubicweb import RegistryNotFound, ObjectNotFound, NoSelectableObject -from cubicweb.appobject import AppObject +from cubicweb.appobject import AppObject, class_regid + def _toload_info(path, extrapath, _toload=None): """return a dictionary of : and an ordered list of @@ -83,16 +84,6 @@ """returns a unique identifier for an appobject class""" return '%s.%s' % (cls.__module__, cls.__name__) -def class_regid(cls): - """returns a unique identifier for an appobject class""" - if 'id' in cls.__dict__: - warn('[3.6] %s.%s: id is deprecated, use __regid__' - % (cls.__module__, cls.__name__), DeprecationWarning) - cls.__regid__ = cls.id - if hasattr(cls, 'id') and not isinstance(cls.id, property): - return cls.id - return cls.__regid__ - def class_registries(cls, registryname): if registryname: return (registryname,) @@ -231,17 +222,14 @@ elif appobjectscore > 0 and appobjectscore == score: winners.append(appobject) if winners is None: - raise NoSelectableObject('args: %s\nkwargs: %s %s' - % (args, kwargs.keys(), - [repr(v) for v in appobjects])) + raise NoSelectableObject(args, kwargs, appobjects) if len(winners) > 1: - # log in production environement, error while debugging - if self.config.debugmode: - raise Exception('select ambiguity, args: %s\nkwargs: %s %s' - % (args, kwargs.keys(), - [repr(v) for v in winners])) - self.error('select ambiguity, args: %s\nkwargs: %s %s', - args, kwargs.keys(), [repr(v) for v in winners]) + # log in production environement / test, error while debugging + msg = 'select ambiguity: %s\n(args: %s, kwargs: %s)' + if self.config.debugmode or self.config.mode == 'test': + # raise bare exception in debug mode + raise Exception(msg % (winners, args, kwargs.keys())) + self.error(msg, winners, args, kwargs.keys()) # return the result of calling the appobject return winners[0](*args, **kwargs) @@ -382,7 +370,7 @@ for registryname in class_registries(obj, registryname): registry = self.setdefault(registryname) registry.register(obj, oid=oid, clear=clear) - self.debug('registered appobject %s in registry %s with id %s', + self.debug('register %s in %s[\'%s\']', vname, registryname, oid or class_regid(obj)) self._loadedmods.setdefault(obj.__module__, {})[classid(obj)] = obj @@ -405,6 +393,7 @@ # initialization methods ################################################### def init_registration(self, path, extrapath=None): + self.reset() # compute list of all modules that have to be loaded self._toloadmods, filemods = _toload_info(path, extrapath) # XXX is _loadedmods still necessary ? It seems like it's useful @@ -491,7 +480,7 @@ - first ensure parent classes are already registered - class with __abstract__ == True in their local dictionnary or - with a name starting starting by an underscore are not registered + with a name starting with an underscore are not registered - appobject class needs to have __registry__ and __regid__ attributes set to a non empty string to be registered. diff -r f4d1d5d9ccbb -r 90f2f20367bc web/__init__.py --- a/web/__init__.py Tue Jul 27 12:36:03 2010 +0200 +++ b/web/__init__.py Wed Nov 03 16:38:28 2010 +0100 @@ -17,26 +17,20 @@ # with CubicWeb. If not, see . """CubicWeb web client core. You'll need a apache-modpython or twisted publisher to get a full CubicWeb web application - +""" -""" __docformat__ = "restructuredtext en" _ = unicode -import sys -if sys.version_info < (2,6): - import simplejson as json -else: - import json - -dumps = json.dumps - from urllib import quote as urlquote from logilab.common.deprecation import deprecated from cubicweb.web._exceptions import * -from cubicweb.utils import CubicWebJsonEncoder +from cubicweb.utils import json_dumps +from cubicweb.uilib import eid_param + +dumps = deprecated('[3.9] use cubicweb.utils.json_dumps instead of dumps')(json_dumps) INTERNAL_FIELD_VALUE = '__cubicweb_internal_field__' @@ -51,13 +45,6 @@ NO = (_('no'), None) -def eid_param(name, eid): - assert eid is not None - if eid is None: - eid = '' - return '%s:%s' % (name, eid) - - from logging import getLogger LOGGER = getLogger('cubicweb.web') @@ -65,9 +52,6 @@ FACETTES = set() -def json_dumps(value): - return dumps(value, cls=CubicWebJsonEncoder) - def jsonize(function): def newfunc(*args, **kwargs): value = function(*args, **kwargs) @@ -77,7 +61,7 @@ return json_dumps(repr(value)) return newfunc -@deprecated('[3.4] use req.build_ajax_replace_url() instead') +@deprecated('[3.4] use req.ajax_replace_url() instead') def ajax_replace_url(nodeid, rql, vid=None, swap=False, **extraparams): """builds a replacePageChunk-like url >>> ajax_replace_url('foo', 'Person P') diff -r f4d1d5d9ccbb -r 90f2f20367bc web/_exceptions.py --- a/web/_exceptions.py Tue Jul 27 12:36:03 2010 +0200 +++ b/web/_exceptions.py Wed Nov 03 16:38:28 2010 +0100 @@ -16,12 +16,12 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""exceptions used in the core of the CubicWeb web application +"""exceptions used in the core of the CubicWeb web application""" -""" __docformat__ = "restructuredtext en" from cubicweb._exceptions import * +from cubicweb.utils import json_dumps class PublishException(CubicWebException): """base class for publishing related exception""" @@ -66,8 +66,7 @@ self.reason = reason def dumps(self): - from cubicweb.web import json - return json.dumps({'reason': self.reason}) + return json_dumps({'reason': self.reason}) class LogOut(PublishException): """raised to ask for deauthentication of a logged in user""" diff -r f4d1d5d9ccbb -r 90f2f20367bc web/action.py --- a/web/action.py Tue Jul 27 12:36:03 2010 +0200 +++ b/web/action.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""abstract action classes for CubicWeb web client +"""abstract action classes for CubicWeb web client""" -""" __docformat__ = "restructuredtext en" _ = unicode diff -r f4d1d5d9ccbb -r 90f2f20367bc web/application.py --- a/web/application.py Tue Jul 27 12:36:03 2010 +0200 +++ b/web/application.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""CubicWeb web client application object +"""CubicWeb web client application object""" -""" from __future__ import with_statement __docformat__ = "restructuredtext en" @@ -196,7 +195,7 @@ if no session id is found, open a new session for the connected user or request authentification as needed - :raise Redirect: if authentication has occured and succeed + :raise Redirect: if authentication has occurred and succeed """ cookie = req.get_cookie() try: @@ -234,7 +233,7 @@ def _update_last_login_time(self, req): # XXX should properly detect missing permission / non writeable source # and avoid "except (RepositoryError, Unauthorized)" below - if req.user.metainformation()['source']['adapter'] == 'ldapuser': + if req.user.cw_metainformation()['source']['adapter'] == 'ldapuser': return try: req.execute('SET X last_login_time NOW WHERE X eid %(x)s', @@ -282,12 +281,12 @@ to publish HTTP request. """ - def __init__(self, config, debug=None, + def __init__(self, config, session_handler_fact=CookieSessionHandler, vreg=None): self.info('starting web instance from %s', config.apphome) if vreg is None: - vreg = cwvreg.CubicWebVRegistry(config, debug=debug) + vreg = cwvreg.CubicWebVRegistry(config) self.vreg = vreg # connect to the repository and get instance's schema self.repo = config.repository(vreg) @@ -370,7 +369,8 @@ """ path = path or 'view' # don't log form values they may contains sensitive information - self.info('publish "%s" (form params: %s)', path, req.form.keys()) + self.info('publish "%s" (%s, form params: %s)', + path, req.session.sessionid, req.form.keys()) # remove user callbacks on a new request (except for json controllers # to avoid callbacks being unregistered before they could be called) tstart = clock() @@ -430,11 +430,12 @@ self.validation_error_handler(req, ex) except (Unauthorized, BadRQLQuery, RequestError), ex: self.error_handler(req, ex, tb=False) - except Exception, ex: + except BaseException, ex: self.error_handler(req, ex, tb=True) except: self.critical('Catch all triggered!!!') self.exception('this is what happened') + result = 'oops' finally: if req.cnx and not commited: try: diff -r f4d1d5d9ccbb -r 90f2f20367bc web/box.py --- a/web/box.py Tue Jul 27 12:36:03 2010 +0200 +++ b/web/box.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""abstract box classes for CubicWeb web client +"""abstract box classes for CubicWeb web client""" -""" __docformat__ = "restructuredtext en" _ = unicode @@ -26,10 +25,11 @@ from cubicweb import Unauthorized, role as get_role, target as get_target from cubicweb.schema import display_name from cubicweb.selectors import (no_cnx, one_line_rset, primary_view, - match_context_prop, partial_has_related_entities) + match_context_prop, partial_relation_possible, + partial_has_related_entities) from cubicweb.view import View, ReloadableMixIn - -from cubicweb.web import INTERNAL_FIELD_VALUE +from cubicweb.uilib import domid, js +from cubicweb.web import INTERNAL_FIELD_VALUE, stdmsgs from cubicweb.web.htmlwidgets import (BoxLink, BoxWidget, SideBoxWidget, RawBoxItem, BoxSeparator) from cubicweb.web.action import UnregisteredAction @@ -143,7 +143,7 @@ def to_display_rql(self): assert self.rql is not None, self.__regid__ - return (self.rql, {'x': self._cw.user.eid}, 'x') + return (self.rql, {'x': self._cw.user.eid}) class EntityBoxTemplate(BoxTemplate): @@ -165,7 +165,8 @@ role = get_role(self) self.w(u'') @@ -180,7 +181,8 @@ def cell_call(self, row, col, view=None, **kwargs): self._cw.add_js('cubicweb.ajax.js') entity = self.cw_rset.get_entity(row, col) - box = SideBoxWidget(display_name(self._cw, self.rtype), self.__regid__) + title = display_name(self._cw, self.rtype, get_role(self), context=entity.__regid__) + box = SideBoxWidget(title, self.__regid__) related = self.related_boxitems(entity) unrelated = self.unrelated_boxitems(entity) box.extend(related) @@ -224,8 +226,8 @@ """returns the list of unrelated entities, using the entity's appropriate vocabulary function """ - skip = set(e.eid for e in entity.related(self.rtype, get_role(self), - entities=True)) + skip = set(unicode(e.eid) for e in entity.related(self.rtype, get_role(self), + entities=True)) skip.add(None) skip.add(INTERNAL_FIELD_VALUE) filteretype = getattr(self, 'etype', None) @@ -241,3 +243,92 @@ entities.append(entity) return entities + +class AjaxEditRelationBoxTemplate(EntityBoxTemplate): + __select__ = EntityBoxTemplate.__select__ & partial_relation_possible() + + # view used to display related entties + item_vid = 'incontext' + # values separator when multiple values are allowed + separator = ',' + # msgid of the message to display when some new relation has been added/removed + added_msg = None + removed_msg = None + + # class attributes below *must* be set in concret classes (additionaly to + # rtype / role [/ target_etype]. They should correspond to js_* methods on + # the json controller + + # function(eid) + # -> expected to return a list of values to display as input selector + # vocabulary + fname_vocabulary = None + + # function(eid, value) + # -> handle the selector's input (eg create necessary entities and/or + # relations). If the relation is multiple, you'll get a list of value, else + # a single string value. + fname_validate = None + + # function(eid, linked entity eid) + # -> remove the relation + fname_remove = None + + def cell_call(self, row, col, **kwargs): + req = self._cw + entity = self.cw_rset.get_entity(row, col) + related = entity.related(self.rtype, self.role) + rdef = entity.e_schema.rdef(self.rtype, self.role, self.target_etype) + if self.role == 'subject': + mayadd = rdef.has_perm(req, 'add', fromeid=entity.eid) + maydel = rdef.has_perm(req, 'delete', fromeid=entity.eid) + else: + mayadd = rdef.has_perm(req, 'add', toeid=entity.eid) + maydel = rdef.has_perm(req, 'delete', toeid=entity.eid) + if not (related or mayadd): + return + if mayadd or maydel: + req.add_js(('cubicweb.ajax.js', 'cubicweb.ajax.box.js')) + _ = req._ + w = self.w + divid = domid(self.__regid__) + unicode(entity.eid) + w(u'
        ') + for rentity in related.entities(): + # for each related entity, provide a link to remove the relation + subview = rentity.view(self.item_vid) + if maydel: + jscall = unicode(js.ajaxBoxRemoveLinkedEntity( + self.__regid__, entity.eid, rentity.eid, + self.fname_remove, + self.removed_msg and _(self.removed_msg))) + w(u'' + '' % (xml_escape(jscall), + subview)) + else: + w(u'' % (subview)) + w(u'
        [-]%s
        %s
        ') + else: + w(_('no related entity')) + if mayadd: + req.add_js('jquery.autocomplete.js') + req.add_css('jquery.autocomplete.css') + multiple = rdef.role_cardinality(self.role) in '*+' + w(u'
        ') + jscall = unicode(js.ajaxBoxShowSelector( + self.__regid__, entity.eid, self.fname_vocabulary, + self.fname_validate, self.added_msg and _(self.added_msg), + _(stdmsgs.BUTTON_OK[0]), _(stdmsgs.BUTTON_CANCEL[0]), + multiple and self.separator)) + w('%s' % ( + xml_escape(jscall), + multiple and _('add_relation') or _('update_relation'))) + w(u'') + w(u'
        ' % divid) + w(u'
        ') + w(u'
        \n') + w(u'
        \n') diff -r f4d1d5d9ccbb -r 90f2f20367bc web/component.py --- a/web/component.py Tue Jul 27 12:36:03 2010 +0200 +++ b/web/component.py Wed Nov 03 16:38:28 2010 +0100 @@ -15,9 +15,10 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""abstract component class and base components definition for CubicWeb web client +"""abstract component class and base components definition for CubicWeb web +client +""" -""" __docformat__ = "restructuredtext en" _ = unicode @@ -25,7 +26,8 @@ from logilab.mtconverter import xml_escape from cubicweb import role -from cubicweb.web import json +from cubicweb.utils import json_dumps +from cubicweb.uilib import js from cubicweb.view import Component from cubicweb.selectors import ( paginated_rset, one_line_rset, primary_view, match_context_prop, @@ -61,9 +63,15 @@ context = 'navcontentbottom' def call(self, view=None): - return self.cell_call(0, 0, view=view) + if self.cw_rset is None: + self.entity_call(self.cw_extra_kwargs.pop('entity')) + else: + self.cell_call(0, 0, view=view) def cell_call(self, row, col, view=None): + self.entity_call(self.cw_rset.get_entity(row, col), view=view) + + def entity_call(self, entity, view=None): raise NotImplementedError() @@ -126,23 +134,31 @@ if self.stop_param in params: del params[self.stop_param] - def page_url(self, path, params, start, stop): + def page_url(self, path, params, start=None, stop=None): params = dict(params) - params.update({self.start_param : start, - self.stop_param : stop,}) + if start is not None: + params[self.start_param] = start + if stop is not None: + params[self.stop_param] = stop view = self.cw_extra_kwargs.get('view') if view is not None and hasattr(view, 'page_navigation_url'): url = view.page_navigation_url(self, path, params) elif path == 'json': - rql = params.pop('rql', self.cw_rset.printable_rql()) - # latest 'true' used for 'swap' mode - url = 'javascript: replacePageChunk(%s, %s, %s, %s, true)' % ( - json.dumps(params.get('divid', 'pageContent')), - json.dumps(rql), json.dumps(params.pop('vid', None)), json.dumps(params)) + url = self.ajax_page_url(**params) else: url = self._cw.build_url(path, **params) + # XXX hack to avoid opening a new page containing the evaluation of the + # js expression on ajax call + if url.startswith('javascript:'): + url += '; noop();' return url + def ajax_page_url(self, **params): + divid = params.setdefault('divid', 'pageContent') + params['rql'] = self.cw_rset.printable_rql() + return "javascript: $(%s).loadxhtml('json', %s, 'get', 'swap')" % ( + json_dumps('#'+divid), js.ajaxFuncArgs('view', params)) + def page_link(self, path, params, start, stop, content): url = xml_escape(self.page_url(path, params, start, stop)) if start == self.starting_from: diff -r f4d1d5d9ccbb -r 90f2f20367bc web/controller.py --- a/web/controller.py Tue Jul 27 12:36:03 2010 +0200 +++ b/web/controller.py Wed Nov 03 16:38:28 2010 +0100 @@ -23,6 +23,7 @@ from cubicweb.selectors import yes from cubicweb.appobject import AppObject +from cubicweb.mail import format_mail from cubicweb.web import LOGGER, Redirect, RequestError @@ -79,18 +80,20 @@ # generic methods useful for concrete implementations ###################### - def process_rql(self, rql): + def process_rql(self): """execute rql if specified""" - # XXX assigning to self really necessary? - self.cw_rset = None + req = self._cw + rql = req.form.get('rql') if rql: - self._cw.ensure_ro_rql(rql) + req.ensure_ro_rql(rql) if not isinstance(rql, unicode): - rql = unicode(rql, self._cw.encoding) - pp = self._cw.vreg['components'].select_or_none('magicsearch', self._cw) + rql = unicode(rql, req.encoding) + pp = req.vreg['components'].select_or_none('magicsearch', req) if pp is not None: - self.cw_rset = pp.process_query(rql) - return self.cw_rset + return pp.process_query(rql) + if 'eid' in req.form: + return req.eid_rset(req.form['eid']) + return None def notify_edited(self, entity): """called by edit_entity() to notify which entity is edited""" @@ -104,6 +107,16 @@ view.set_http_cache_headers() self._cw.validate_cache() + def sendmail(self, recipient, subject, body): + senderemail = self._cw.user.cw_adapt_to('IEmailable').get_email() + msg = format_mail({'email' : senderemail, + 'name' : self._cw.user.dc_title(),}, + [recipient], body, subject) + if not self._cw.vreg.config.sendmails([(msg, [recipient])]): + msg = self._cw._('could not connect to the SMTP server') + url = self._cw.build_url(__message=msg) + raise Redirect(url) + def reset(self): """reset form parameters and redirect to a view determinated by given parameters diff -r f4d1d5d9ccbb -r 90f2f20367bc web/data/actionBoxHeader.png Binary file web/data/actionBoxHeader.png has changed diff -r f4d1d5d9ccbb -r 90f2f20367bc web/data/boxHeader.png Binary file web/data/boxHeader.png has changed diff -r f4d1d5d9ccbb -r 90f2f20367bc web/data/button.png Binary file web/data/button.png has changed diff -r f4d1d5d9ccbb -r 90f2f20367bc web/data/cubicweb.acl.css --- a/web/data/cubicweb.acl.css Tue Jul 27 12:36:03 2010 +0200 +++ b/web/data/cubicweb.acl.css Wed Nov 03 16:38:28 2010 +0100 @@ -6,78 +6,35 @@ */ /******************************************************************************/ -/* security edition form (views/management.py) */ +/* security edition form (views/management.py) web/views/schema.py */ /******************************************************************************/ h2.schema{ - background : #ff7700; - color: #fff; - font-weight: bold; - padding : 0.1em 0.3em; + color: %(aColor)s; } - -h3.schema{ +table.schemaInfo td a.users{ + color : #00CC33; font-weight: bold; } -h4 a, -h4 a:link, -h4 a:visited{ - color:#000; - } - -table.schemaInfo { - margin: 1em 0em; - text-align: left; - border: 1px solid black; - border-collapse: collapse; - width:100%; -} - -table.schemaInfo th, -table.schemaInfo td { - padding: .3em .5em; - border: 1px solid grey; - width:33%; -} - - -table.schemaInfo tr th { - padding: 0.2em 0px 0.2em 5px; - background-image:none; - background-color:#dfdfdf; -} - -table.schemaInfo thead tr { - border: 1px solid #dfdfdf; -} - -table.schemaInfo td { - padding: 3px 10px 3px 5px; - -} - -a.users{ - color : #00CC33; - font-weight: bold } - -a.guests{ - color : #ff7700; +table.schemaInfo td a.guests{ + color: #ff7700; font-weight: bold; } -a.owners{ - color : #8b0000; +table.schemaInfo td a.owners{ + color: #8b0000; font-weight: bold; } -a.managers{ +table.schemaInfo td a.managers{ color: #000000; + font-weight: bold; } .discret, -a.grey{ +table.schemaInfo td a.grey{ color:#666; } @@ -86,39 +43,9 @@ } .red{ - color : #ff7700; + color: #ff7700; } div#schema_security{ width:100%; - } -/******************************************************************************/ -/* user groups edition form (views/euser.py) */ -/******************************************************************************/ - -table#groupedit { - margin: 1ex 1em; - text-align: left; - border: 1px solid black; - border-collapse: collapse; -} - -table#groupedit th, -table#groupedit td { - padding: 0.5em 1em; -} - -table#groupedit tr { - border-bottom: 1px solid black; -} - -table#groupedit tr.nogroup { - border: 1px solid red; - margin: 1px; -} - -table#groupedit td { - text-align: center; - padding: 0.5em; -} - + } \ No newline at end of file diff -r f4d1d5d9ccbb -r 90f2f20367bc web/data/cubicweb.ajax.box.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/web/data/cubicweb.ajax.box.js Wed Nov 03 16:38:28 2010 +0100 @@ -0,0 +1,81 @@ +/** + * Functions for ajax boxes. + * + * :organization: Logilab + * :copyright: 2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. + * :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr + * + */ + +function ajaxBoxValidateSelectorInput(boxid, eid, separator, fname, msg) { + var holderid = cw.utils.domid(boxid) + eid + 'Holder'; + var value = $('#' + holderid + 'Input').val(); + if (separator) { + value = $.map(value.split(separator), jQuery.trim); + } + var d = loadRemote('json', ajaxFuncArgs(fname, null, eid, value)); + d.addCallback(function() { + $('#' + holderid).empty(); + var formparams = ajaxFuncArgs('render', null, 'boxes', boxid, eid); + $('#' + cw.utils.domid(boxid) + eid).loadxhtml('json', formparams); + if (msg) { + document.location.hash = '#header'; + updateMessage(msg); + } + }); +} + +function ajaxBoxRemoveLinkedEntity(boxid, eid, relatedeid, delfname, msg) { + var d = loadRemote('json', ajaxFuncArgs(delfname, null, eid, relatedeid)); + d.addCallback(function() { + var formparams = ajaxFuncArgs('render', null, 'boxes', boxid, eid); + $('#' + cw.utils.domid(boxid) + eid).loadxhtml('json', formparams); + if (msg) { + document.location.hash = '#header'; + updateMessage(msg); + } + }); +} + +function ajaxBoxShowSelector(boxid, eid, + unrelfname, + addfname, msg, + oklabel, cancellabel, + separator) { + var holderid = cw.utils.domid(boxid) + eid + 'Holder'; + var holder = $('#' + holderid); + if (holder.children().length) { + holder.empty(); + } + else { + var inputid = holderid + 'Input'; + var deferred = loadRemote('json', ajaxFuncArgs(unrelfname, null, eid)); + deferred.addCallback(function (unrelated) { + var input = INPUT({'type': 'text', 'id': inputid, 'size': 20}); + holder.append(input).show(); + $input = $(input); + $input.keypress(function (event) { + if (event.keyCode == KEYS.KEY_ENTER) { + // XXX not very user friendly: we should test that the suggestions + // aren't visible anymore + ajaxBoxValidateSelectorInput(boxid, eid, separator, addfname, msg); + } + }); + var buttons = DIV({'class' : "sgformbuttons"}, + A({'href' : "javascript: noop();", + 'onclick' : cw.utils.strFuncCall('ajaxBoxValidateSelectorInput', + boxid, eid, separator, addfname, msg)}, + oklabel), + ' / ', + A({'href' : "javascript: noop();", + 'onclick' : '$("#' + holderid + '").empty()'}, + cancellabel)); + holder.append(buttons); + $input.autocomplete(unrelated, { + multiple: separator, + max: 15 + }); + $input.focus(); + }); + } +} diff -r f4d1d5d9ccbb -r 90f2f20367bc web/data/cubicweb.ajax.js --- a/web/data/cubicweb.ajax.js Tue Jul 27 12:36:03 2010 +0200 +++ b/web/data/cubicweb.ajax.js Wed Nov 03 16:38:28 2010 +0100 @@ -1,33 +1,128 @@ -/* - * :organization: Logilab - * :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. - * :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +/* copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. + * contact http://www.logilab.fr/ -- mailto:contact@logilab.fr + * + * This file is part of CubicWeb. + * + * CubicWeb is free software: you can redistribute it and/or modify it under the + * terms of the GNU Lesser General Public License as published by the Free + * Software Foundation, either version 2.1 of the License, or (at your option) + * any later version. + * + * CubicWeb is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS + * FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more + * details. + * + * You should have received a copy of the GNU Lesser General Public License along + * with CubicWeb. If not, see . */ -CubicWeb.require('python.js'); -CubicWeb.require('htmlhelpers.js'); +/** + * .. function:: Deferred + * + * dummy ultra minimalist implementation of deferred for jQuery + */ +function Deferred() { + this.__init__(this); +} + +jQuery.extend(Deferred.prototype, { + __init__: function() { + this._onSuccess = []; + this._onFailure = []; + this._req = null; + this._result = null; + this._error = null; + }, + + addCallback: function(callback) { + if ((this._req.readyState == 4) && this._result) { + var args = [this._result, this._req]; + jQuery.merge(args, cw.utils.sliceList(arguments, 1)); + callback.apply(null, args); + } + else { + this._onSuccess.push([callback, cw.utils.sliceList(arguments, 1)]); + } + return this; + }, + + addErrback: function(callback) { + if (this._req.readyState == 4 && this._error) { + callback.apply(null, [this._error, this._req]); + } + else { + this._onFailure.push([callback, cw.utils.sliceList(arguments, 1)]); + } + return this; + }, + + success: function(result) { + this._result = result; + try { + for (var i = 0; i < this._onSuccess.length; i++) { + var callback = this._onSuccess[i][0]; + var args = [result, this._req]; + jQuery.merge(args, this._onSuccess[i][1]); + callback.apply(null, args); + } + } catch(error) { + this.error(this.xhr, null, error); + } + }, + + error: function(xhr, status, error) { + this._error = error; + for (var i = 0; i < this._onFailure.length; i++) { + var callback = this._onFailure[i][0]; + var args = [error, this._req]; + jQuery.merge(args, this._onFailure[i][1]); + callback.apply(null, args); + } + } + +}); + var JSON_BASE_URL = baseuri() + 'json?'; -function _loadAjaxHtmlHead(node, head, tag, srcattr) { - var loaded = []; +//============= utility function handling remote calls responses. ==============// +function _loadAjaxHtmlHead($node, $head, tag, srcattr) { var jqtagfilter = tag + '[' + srcattr + ']'; - jQuery('head ' + jqtagfilter).each(function(i) { - loaded.push(this.getAttribute(srcattr)); - }); - node.find(tag).each(function(i) { - if (this.getAttribute(srcattr)) { - if (!loaded.contains(this.getAttribute(srcattr))) { - jQuery(this).appendTo(head); + if (cw['loaded_'+srcattr] === undefined) { + cw['loaded_'+srcattr] = []; + var loaded = cw['loaded_'+srcattr]; + jQuery('head ' + jqtagfilter).each(function(i) { + loaded.push(this.getAttribute(srcattr)); + }); + } else { + var loaded = cw['loaded_'+srcattr]; + } + $node.find(tag).each(function(i) { + var url = this.getAttribute(srcattr); + if (url) { + if (jQuery.inArray(url, loaded) == -1) { + // take care to * - *================================================== + * */ if (typeof SimileAjax == "undefined") { @@ -213,9 +213,9 @@ SimileAjax.loaded = true; })(); } -/*================================================== +/* * Platform Utility Functions and Constants - *================================================== + * */ /* This must be called after our jQuery has been loaded @@ -319,9 +319,10 @@ SimileAjax.Platform.getDefaultLocale = function() { return SimileAjax.Platform.clientLocale; -};/*================================================== +}; +/* * Debug Utility Functions - *================================================== + * */ SimileAjax.Debug = { @@ -678,9 +679,9 @@ } }; })(); -/*================================================== +/* * DOM Utility Functions - *================================================== + * */ SimileAjax.DOM = new Object(); @@ -1040,9 +1041,9 @@ SimileAjax.includeCssFile(document, SimileAjax.urlPrefix + "styles/graphics-ie6.css"); } -/*================================================== +/* * Opacity, translucency - *================================================== + * */ SimileAjax.Graphics._createTranslucentImage1 = function(url, verticalAlign) { var elmt = document.createElement("img"); @@ -1119,9 +1120,9 @@ } }; -/*================================================== +/* * Bubble - *================================================== + * */ SimileAjax.Graphics.bubbleConfig = { @@ -1479,9 +1480,9 @@ }; }; -/*================================================== +/* * Animation - *================================================== + * */ /** @@ -1549,11 +1550,11 @@ } }; -/*================================================== +/* * CopyPasteButton * * Adapted from http://spaces.live.com/editorial/rayozzie/demo/liveclip/liveclipsample/techPreview.html. - *================================================== + * */ /** @@ -1606,9 +1607,9 @@ return div; }; -/*================================================== +/* * getWidthHeight - *================================================== + * */ SimileAjax.Graphics.getWidthHeight = function(el) { // RETURNS hash {width: w, height: h} in pixels @@ -1633,9 +1634,9 @@ }; -/*================================================== +/* * FontRenderingContext - *================================================== + * */ SimileAjax.Graphics.getFontRenderingContext = function(elmt, width) { return new SimileAjax.Graphics._FontRenderingContext(elmt, width); @@ -2127,9 +2128,9 @@ var d = new Date().getTimezoneOffset(); return d / -60; }; -/*================================================== +/* * String Utility Functions and Constants - *================================================== + * */ String.prototype.trim = function() { @@ -2170,9 +2171,9 @@ } return result; }; -/*================================================== +/* * HTML Utility Functions - *================================================== + * */ SimileAjax.HTML = new Object(); @@ -2655,9 +2656,9 @@ return (this._a.length > 0) ? this._a[this._a.length - 1] : null; }; -/*================================================== +/* * Event Index - *================================================== + * */ SimileAjax.EventIndex = function(unit) { @@ -2889,9 +2890,9 @@ return this._index < this._events.length() ? this._events.elementAt(this._index++) : null; } -};/*================================================== +};/* * Default Unit - *================================================== + * */ SimileAjax.NativeDateUnit = new Object(); @@ -2953,9 +2954,9 @@ return new Date(v.getTime() + n); }; -/*================================================== +/* * General, miscellaneous SimileAjax stuff - *================================================== + * */ SimileAjax.ListenerQueue = function(wildcardHandlerName) { @@ -2998,7 +2999,7 @@ } }; -/*====================================================================== +/* * History * * This is a singleton that keeps track of undoable user actions and @@ -3020,7 +3021,7 @@ * * An iframe is inserted into the document's body element to track * onload events. - *====================================================================== + * */ SimileAjax.History = { @@ -3632,7 +3633,7 @@ } return elmt; }; -/*================================================== +/* * Timeline API * * This file will load all the Javascript files @@ -3696,7 +3697,7 @@ * Note that the Ajax version is usually NOT the same as the Timeline version. * See variable simile_ajax_ver below for the current version * - *================================================== + * */ (function() { @@ -3928,7 +3929,7 @@ loadMe(); } })(); -/*================================================= +/* * * Coding standards: * @@ -3950,14 +3951,14 @@ * We also want to use jslint: http://www.jslint.com/ * * - *================================================== + * */ -/*================================================== +/* * Timeline VERSION - *================================================== + * */ // Note: version is also stored in the build.xml file Timeline.version = 'pre 2.4.0'; // use format 'pre 1.2.3' for trunk versions @@ -3965,9 +3966,9 @@ Timeline.display_version = Timeline.version + ' (with Ajax lib ' + Timeline.ajax_lib_version + ')'; // cf method Timeline.writeVersion -/*================================================== +/* * Timeline - *================================================== + * */ Timeline.strings = {}; // localization string tables Timeline.HORIZONTAL = 0; @@ -4183,9 +4184,9 @@ -/*================================================== +/* * Timeline Implementation object - *================================================== + * */ Timeline._Impl = function(elmt, bandInfos, orientation, unit, timelineID) { SimileAjax.WindowManager.initialize(); @@ -4585,7 +4586,7 @@ this.paint(); }; -/*================================================= +/* * * Coding standards: * @@ -4607,14 +4608,14 @@ * We also want to use jslint: http://www.jslint.com/ * * - *================================================== + * */ -/*================================================== +/* * Band - *================================================== + * */ Timeline._Band = function(timeline, bandInfo, index) { // hack for easier subclassing @@ -5344,9 +5345,9 @@ Timeline._Band.prototype.closeBubble = function() { SimileAjax.WindowManager.cancelPopups(); }; -/*================================================== +/* * Classic Theme - *================================================== + * */ @@ -5523,14 +5524,14 @@ }; this.mouseWheel = 'scroll'; // 'default', 'zoom', 'scroll' -};/*================================================== +};/* * An "ether" is a object that maps date/time to pixel coordinates. - *================================================== + * */ -/*================================================== +/* * Linear Ether - *================================================== + * */ Timeline.LinearEther = function(params) { @@ -5601,9 +5602,9 @@ }; -/*================================================== +/* * Hot Zone Ether - *================================================== + * */ Timeline.HotZoneEther = function(params) { @@ -5828,9 +5829,9 @@ Timeline.HotZoneEther.prototype._getScale = function() { return this._interval / this._pixelsPerInterval; }; -/*================================================== +/* * Gregorian Ether Painter - *================================================== + * */ Timeline.GregorianEtherPainter = function(params) { @@ -5919,9 +5920,9 @@ }; -/*================================================== +/* * Hot Zone Gregorian Ether Painter - *================================================== + * */ Timeline.HotZoneGregorianEtherPainter = function(params) { @@ -6080,9 +6081,9 @@ } }; -/*================================================== +/* * Year Count Ether Painter - *================================================== + * */ Timeline.YearCountEtherPainter = function(params) { @@ -6169,9 +6170,9 @@ Timeline.YearCountEtherPainter.prototype.softPaint = function() { }; -/*================================================== +/* * Quarterly Ether Painter - *================================================== + * */ Timeline.QuarterlyEtherPainter = function(params) { @@ -6257,9 +6258,9 @@ Timeline.QuarterlyEtherPainter.prototype.softPaint = function() { }; -/*================================================== +/* * Ether Interval Marker Layout - *================================================== + * */ Timeline.EtherIntervalMarkerLayout = function(timeline, band, theme, align, showLine) { @@ -6363,9 +6364,9 @@ }; }; -/*================================================== +/* * Ether Highlight Layout - *================================================== + * */ Timeline.EtherHighlight = function(timeline, band, theme, backgroundLayer) { @@ -6404,9 +6405,9 @@ } } }; -/*================================================== +/* * Event Utils - *================================================== + * */ Timeline.EventUtils = {}; @@ -6421,7 +6422,7 @@ }; Timeline.EventUtils.decodeEventElID = function(elementID) { - /*================================================== + /* * * Use this function to decode an event element's id on a band (label div, * tape div or icon img). @@ -6447,7 +6448,7 @@ * by using Timeline.getTimeline, Timeline.getBand, or * Timeline.getEvent and passing in the element's id * - *================================================== + * */ var parts = elementID.split('-'); @@ -6467,9 +6468,9 @@ // elType should be one of {label | icon | tapeN | highlightN} return elType + "-tl-" + timeline.timelineID + "-" + band.getIndex() + "-" + evt.getID(); -};/*================================================== +};/* * Gregorian Date Labeller - *================================================== + * */ Timeline.GregorianDateLabeller = function(locale, timeZone) { @@ -6558,9 +6559,9 @@ return { text: text, emphasized: emphasized }; } -/*================================================== +/* * Default Event Source - *================================================== + * */ @@ -7125,12 +7126,12 @@ }; -/*================================================== +/* * Original Event Painter - *================================================== + * */ -/*================================================== +/* * * To enable a single event listener to monitor everything * on a Timeline, we need a way to map from an event's icon, @@ -7152,7 +7153,7 @@ * You can then retrieve the band/timeline objects and event object * by using Timeline.EventUtils.decodeEventElID * - *================================================== + * */ /* @@ -7818,9 +7819,9 @@ this._eventPaintListeners[i](this._band, op, evt, els); } }; -/*================================================== +/* * Detailed Event Painter - *================================================== + * */ // Note: a number of features from original-painter @@ -8509,9 +8510,9 @@ this._onSelectListeners[i](eventID); } }; -/*================================================== +/* * Overview Event Painter - *================================================== + * */ Timeline.OverviewEventPainter = function(params) { @@ -8767,9 +8768,9 @@ Timeline.OverviewEventPainter.prototype.showBubble = function(evt) { // not implemented }; -/*================================================== +/* * Compact Event Painter - *================================================== + * */ Timeline.CompactEventPainter = function(params) { @@ -9831,9 +9832,9 @@ this._onSelectListeners[i](eventIDs); } }; -/*================================================== +/* * Span Highlight Decorator - *================================================== + * */ Timeline.SpanHighlightDecorator = function(params) { @@ -9948,9 +9949,9 @@ Timeline.SpanHighlightDecorator.prototype.softPaint = function() { }; -/*================================================== +/* * Point Highlight Decorator - *================================================== + * */ Timeline.PointHighlightDecorator = function(params) { @@ -10015,9 +10016,9 @@ Timeline.PointHighlightDecorator.prototype.softPaint = function() { }; -/*================================================== +/* * Default Unit - *================================================== + * */ Timeline.NativeDateUnit = new Object(); @@ -10083,35 +10084,35 @@ return new Date(v.getTime() + n); }; -/*================================================== +/* * Common localization strings - *================================================== + * */ Timeline.strings["fr"] = { wikiLinkLabel: "Discute" }; -/*================================================== +/* * Localization of labellers.js - *================================================== + * */ Timeline.GregorianDateLabeller.monthNames["fr"] = [ "jan", "fev", "mar", "avr", "mai", "jui", "jui", "aou", "sep", "oct", "nov", "dec" ]; -/*================================================== +/* * Common localization strings - *================================================== + * */ Timeline.strings["en"] = { wikiLinkLabel: "Discuss" }; -/*================================================== +/* * Localization of labellers.js - *================================================== + * */ Timeline.GregorianDateLabeller.monthNames["en"] = [ diff -r f4d1d5d9ccbb -r 90f2f20367bc web/data/cubicweb.timeline-ext.js --- a/web/data/cubicweb.timeline-ext.js Tue Jul 27 12:36:03 2010 +0200 +++ b/web/data/cubicweb.timeline-ext.js Wed Nov 03 16:38:28 2010 +0100 @@ -1,49 +1,49 @@ -/* +/** * :organization: Logilab - * :copyright: 2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. + * :copyright: 2008-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. * :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr * */ - -/* provide our own custom date parser since the default +/** + * provide our own custom date parser since the default * one only understands iso8601 and gregorian dates */ SimileAjax.NativeDateUnit.getParser = Timeline.NativeDateUnit.getParser = function(format) { if (typeof format == "string") { - if (format.indexOf('%') != -1) { - return function(datestring) { - if (datestring) { - return strptime(datestring, format); - } - return null; - }; - } + if (format.indexOf('%') != - 1) { + return function(datestring) { + if (datestring) { + return strptime(datestring, format); + } + return null; + }; + } format = format.toLowerCase(); } if (format == "iso8601" || format == "iso 8601") { - return Timeline.DateTime.parseIso8601DateTime; + return Timeline.DateTime.parseIso8601DateTime; } return Timeline.DateTime.parseGregorianDateTime; }; /*** CUBICWEB EVENT PAINTER *****************************************************/ Timeline.CubicWebEventPainter = function(params) { -// Timeline.OriginalEventPainter.apply(this, arguments); - this._params = params; - this._onSelectListeners = []; + // Timeline.OriginalEventPainter.apply(this, arguments); + this._params = params; + this._onSelectListeners = []; - this._filterMatcher = null; - this._highlightMatcher = null; - this._frc = null; + this._filterMatcher = null; + this._highlightMatcher = null; + this._frc = null; - this._eventIdToElmt = {}; + this._eventIdToElmt = {}; }; Timeline.CubicWebEventPainter.prototype = new Timeline.OriginalEventPainter(); Timeline.CubicWebEventPainter.prototype._paintEventLabel = function( - evt, text, left, top, width, height, theme) { +evt, text, left, top, width, height, theme) { var doc = this._timeline.getDocument(); var labelDiv = doc.createElement("div"); @@ -54,15 +54,21 @@ labelDiv.style.top = top + "px"; if (evt._obj.onclick) { - labelDiv.appendChild(A({'href': evt._obj.onclick}, text)); + labelDiv.appendChild(A({ + 'href': evt._obj.onclick + }, + text)); } else if (evt._obj.image) { - labelDiv.appendChild(IMG({src: evt._obj.image, width: '30px', height: '30px'})); + labelDiv.appendChild(IMG({ + src: evt._obj.image, + width: '30px', + height: '30px' + })); } else { - labelDiv.innerHTML = text; + labelDiv.innerHTML = text; } - if(evt._title != null) - labelDiv.title = evt._title; + if (evt._title != null) labelDiv.title = evt._title; var color = evt.getTextColor(); if (color == null) { @@ -72,29 +78,31 @@ labelDiv.style.color = color; } var classname = evt.getClassName(); - if(classname) labelDiv.className +=' ' + classname; + if (classname) labelDiv.className += ' ' + classname; this._eventLayer.appendChild(labelDiv); return { - left: left, - top: top, - width: width, + left: left, + top: top, + width: width, height: height, - elmt: labelDiv + elmt: labelDiv }; }; +Timeline.CubicWebEventPainter.prototype._showBubble = function(x, y, evt) { + var div = DIV({ + id: 'xxx' + }); + var width = this._params.theme.event.bubble.width; + if (!evt._obj.bubbleUrl) { + evt.fillInfoBubble(div, this._params.theme, this._band.getLabeller()); + } + SimileAjax.WindowManager.cancelPopups(); + SimileAjax.Graphics.createBubbleForContentAndPoint(div, x, y, width); + if (evt._obj.bubbleUrl) { + jQuery('#xxx').loadxhtml(evt._obj.bubbleUrl, null, 'post', 'replace'); + } +}; -Timeline.CubicWebEventPainter.prototype._showBubble = function(x, y, evt) { - var div = DIV({id: 'xxx'}); - var width = this._params.theme.event.bubble.width; - if (!evt._obj.bubbleUrl) { - evt.fillInfoBubble(div, this._params.theme, this._band.getLabeller()); - } - SimileAjax.WindowManager.cancelPopups(); - SimileAjax.Graphics.createBubbleForContentAndPoint(div, x, y, width); - if (evt._obj.bubbleUrl) { - jQuery('#xxx').loadxhtml(evt._obj.bubbleUrl, null, 'post', 'replace'); - } -}; diff -r f4d1d5d9ccbb -r 90f2f20367bc web/data/cubicweb.widgets.js --- a/web/data/cubicweb.widgets.js Tue Jul 27 12:36:03 2010 +0200 +++ b/web/data/cubicweb.widgets.js Wed Nov 03 16:38:28 2010 +0100 @@ -1,4 +1,6 @@ -/* +/** + * Functions dedicated to widgets. + * * :organization: Logilab * :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. * :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr @@ -9,144 +11,176 @@ // widget namespace Widgets = {}; - -/* this function takes a DOM node defining a widget and +/** + * .. function:: buildWidget(wdgnode) + * + * this function takes a DOM node defining a widget and * instantiates / builds the appropriate widget class */ function buildWidget(wdgnode) { var wdgclass = Widgets[wdgnode.getAttribute('cubicweb:wdgtype')]; if (wdgclass) { - var wdg = new wdgclass(wdgnode); + return new wdgclass(wdgnode); } + return null; } -/* This function is called on load and is in charge to build +/** + * .. function:: buildWidgets(root) + * + * This function is called on load and is in charge to build * JS widgets according to DOM nodes found in the page */ function buildWidgets(root) { root = root || document; jQuery(root).find('.widget').each(function() { - if (this.getAttribute('cubicweb:loadtype') == 'auto') { - buildWidget(this); - } + if (this.getAttribute('cubicweb:loadtype') == 'auto') { + buildWidget(this); + } }); } - // we need to differenciate cases where initFacetBoxEvents is called // with one argument or without any argument. If we use `initFacetBoxEvents` // as the direct callback on the jQuery.ready event, jQuery will pass some argument // of his, so we use this small anonymous function instead. -jQuery(document).ready(function() {buildWidgets();}); +jQuery(document).ready(function() { + buildWidgets(); +}); +function postJSON(url, data, callback) { + return jQuery.post(url, data, callback, 'json'); +} + +function getJSON(url, data, callback) { + return jQuery.get(url, data, callback, 'json'); +} Widgets.SuggestField = defclass('SuggestField', null, { __init__: function(node, options) { - var multi = node.getAttribute('cubicweb:multi') || "no"; - options = options || {}; - options.multiple = (multi == "yes") ? true : false; - var dataurl = node.getAttribute('cubicweb:dataurl'); + var multi = node.getAttribute('cubicweb:multi') || "no"; + options = options || {}; + options.multiple = (multi == "yes") ? true: false; + var dataurl = node.getAttribute('cubicweb:dataurl'); var method = postJSON; - if (options.method == 'get'){ - method = function(url, data, callback) { - // We can't rely on jQuery.getJSON because the server - // might set the Content-Type's response header to 'text/plain' - jQuery.get(url, data, function(response) { - callback(evalJSON(response)); - }); - }; - } - var self = this; // closure - method(dataurl, null, function(data) { - // in case we received a list of couple, we assume that the first - // element is the real value to be sent, and the second one is the - // value to be displayed - if (data.length && data[0].length == 2) { - options.formatItem = function(row) { return row[1]; }; - self.hideRealValue(node); - self.setCurrentValue(node, data); - } - jQuery(node).autocomplete(data, options); - }); + if (options.method == 'get') { + method = function(url, data, callback) { + // We can't rely on jQuery.getJSON because the server + // might set the Content-Type's response header to 'text/plain' + jQuery.get(url, data, function(response) { + callback(cw.evalJSON(response)); + }); + }; + } + var self = this; // closure + method(dataurl, null, function(data) { + // in case we received a list of couple, we assume that the first + // element is the real value to be sent, and the second one is the + // value to be displayed + if (data.length && data[0].length == 2) { + options.formatItem = function(row) { + return row[1]; + }; + self.hideRealValue(node); + self.setCurrentValue(node, data); + } + jQuery(node).autocomplete(data, options); + }); }, hideRealValue: function(node) { - var hidden = INPUT({'type': "hidden", 'name': node.name, 'value': node.value}); - node.parentNode.appendChild(hidden); - // remove 'name' attribute from visible input so that it is not submitted - // and set correct value in the corresponding hidden field - jQuery(node).removeAttr('name').bind('result', function(_, row, _) { - hidden.value = row[0]; - }); + var hidden = INPUT({ + 'type': "hidden", + 'name': node.name, + 'value': node.value + }); + node.parentNode.appendChild(hidden); + // remove 'name' attribute from visible input so that it is not submitted + // and set correct value in the corresponding hidden field + jQuery(node).removeAttr('name').bind('result', function(_, row, _) { + hidden.value = row[0]; + }); }, setCurrentValue: function(node, data) { - // called when the data is loaded to reset the correct displayed - // value in the visible input field (typically replacing an eid - // by a displayable value) - var curvalue = node.value; - if (!node.value) { - return; - } - for (var i=0,length=data.length; i