# HG changeset patch # User Sylvain Thénault # Date 1279546622 -7200 # Node ID 97c55baefa0c306c995eb6e4e41009f3a1007ee9 # Parent 00b1b6b906cf8e014a0778eddaba964a696b6e20# Parent 5f9a9086c17163429dab737b301299920fcdd79e backport default into stable. STABLE IS NOW 3.9, default 3.10 diff -r 00b1b6b906cf -r 97c55baefa0c .hgtags --- a/.hgtags Thu Jul 15 12:03:13 2010 +0200 +++ b/.hgtags Mon Jul 19 15:37:02 2010 +0200 @@ -135,5 +135,11 @@ 5d05b08adeab1ea301e49ed8537e35ede6db92f6 cubicweb-debian-version-3.8.5-1 1a24c62aefc5e57f61be3d04affd415288e81904 cubicweb-version-3.8.6 607a90073911b6bb941a49b5ec0b0d2a9cd479af cubicweb-debian-version-3.8.6-1 +d9936c39d478b6701a4adef17bc28888ffa011c6 cubicweb-version-3.9.0 +eda4940ffef8b7d36127e68de63a52388374a489 cubicweb-debian-version-3.9.0-1 a1a334d934390043a4293a4ee42bdceb1343246e cubicweb-version-3.8.7 1cccf88d6dfe42986e1091de4c364b7b5814c54f cubicweb-debian-version-3.8.7-1 +4d75f743ed49dd7baf8bde7b0e475244933fa08e cubicweb-version-3.9.1 +9bd75af3dca36d7be5d25fc5ab1b89b34c811456 cubicweb-debian-version-3.9.1-1 +e51796b9caf389c224c6f66dcb8aa75bf1b82eff cubicweb-version-3.9.2 +8a23821dc1383e14a7e92a931b91bc6eed4d0af7 cubicweb-debian-version-3.9.2-1 diff -r 00b1b6b906cf -r 97c55baefa0c MANIFEST.in --- a/MANIFEST.in Thu Jul 15 12:03:13 2010 +0200 +++ b/MANIFEST.in Mon Jul 19 15:37:02 2010 +0200 @@ -5,13 +5,14 @@ include bin/cubicweb-* include man/cubicweb-ctl.1 -recursive-include doc README makefile *.conf *.py *.rst *.txt *.html *.png *.svg *.zargo *.dia +recursive-include doc README makefile *.conf *.css *.py *.rst *.txt *.html *.png *.svg *.zargo *.dia recursive-include misc *.py *.png *.display include web/views/*.pt recursive-include web/data external_resources *.js *.css *.py *.png *.gif *.ico *.ttf recursive-include web/wdoc *.rst *.png *.xml ChangeLog* +recursive-include devtools/data *.js *.css recursive-include i18n *.pot *.po recursive-include schemas *.py *.sql @@ -21,10 +22,15 @@ recursive-include sobjects/test/data bootstrap_cubes *.py recursive-include hooks/test/data bootstrap_cubes *.py recursive-include server/test/data bootstrap_cubes *.py source* -recursive-include web/test/data bootstrap_cubes *.py -recursive-include devtools/test/data bootstrap_cubes *.py *.txt +recursive-include devtools/test/data bootstrap_cubes *.py *.txt *.js +recursive-include web/test/data bootstrap_cubes pouet.css *.py + +recursive-include web/test/jstests *.js *.html *.css *.json +recursive-include web/test/windmill *.py recursive-include skeleton *.py *.css *.js *.po compat *.in *.tmpl +prune doc/book/en/.static/ +prune doc/book/fr/.static/ prune misc/cwfs prune goa diff -r 00b1b6b906cf -r 97c55baefa0c __init__.py --- a/__init__.py Thu Jul 15 12:03:13 2010 +0200 +++ b/__init__.py Mon Jul 19 15:37:02 2010 +0200 @@ -17,8 +17,8 @@ # with CubicWeb. If not, see . """CubicWeb is a generic framework to quickly build applications which describes relations between entitites. +""" -""" __docformat__ = "restructuredtext en" # ignore the pygments UserWarnings diff -r 00b1b6b906cf -r 97c55baefa0c __pkginfo__.py --- a/__pkginfo__.py Thu Jul 15 12:03:13 2010 +0200 +++ b/__pkginfo__.py Mon Jul 19 15:37:02 2010 +0200 @@ -22,7 +22,7 @@ modname = distname = "cubicweb" -numversion = (3, 8, 7) +numversion = (3, 9, 2) version = '.'.join(str(num) for num in numversion) description = "a repository of entities / relations for knowledge management" @@ -41,9 +41,9 @@ __depends__ = { 'logilab-common': '>= 0.50.2', - 'logilab-mtconverter': '>= 0.6.0', + 'logilab-mtconverter': '>= 0.8.0', 'rql': '>= 0.26.2', - 'yams': '>= 0.28.1', + 'yams': '>= 0.29.1', 'docutils': '>= 0.6', #gettext # for xgettext, msgcat, etc... # web dependancies @@ -52,7 +52,7 @@ 'Twisted': '', # XXX graphviz # server dependencies - 'logilab-database': '>= 1.0.5', + 'logilab-database': '>= 1.1.0', 'pysqlite': '>= 2.5.5', # XXX install pysqlite2 } @@ -77,6 +77,7 @@ join('server', 'test', 'data'), join('hooks', 'test', 'data'), join('web', 'test', 'data'), + join('devtools', 'data'), join('devtools', 'test', 'data'), 'schemas', 'skeleton'] diff -r 00b1b6b906cf -r 97c55baefa0c appobject.py --- a/appobject.py Thu Jul 15 12:03:13 2010 +0200 +++ b/appobject.py Mon Jul 19 15:37:02 2010 +0200 @@ -39,6 +39,92 @@ from logilab.common.decorators import classproperty from logilab.common.logging_ext import set_log_methods +from cubicweb.cwconfig import CubicWebConfiguration + +def class_regid(cls): + """returns a unique identifier for an appobject class""" + if 'id' in cls.__dict__: + warn('[3.6] %s.%s: id is deprecated, use __regid__' + % (cls.__module__, cls.__name__), DeprecationWarning) + cls.__regid__ = cls.id + if hasattr(cls, 'id') and not isinstance(cls.id, property): + return cls.id + return cls.__regid__ + +# helpers for debugging selectors +TRACED_OIDS = None + +def _trace_selector(cls, selector, args, ret): + # /!\ lltrace decorates pure function or __call__ method, this + # means argument order may be different + if isinstance(cls, Selector): + selname = str(cls) + vobj = args[0] + else: + selname = selector.__name__ + vobj = cls + if TRACED_OIDS == 'all' or class_regid(vobj) in TRACED_OIDS: + #SELECTOR_LOGGER.warning('selector %s returned %s for %s', selname, ret, cls) + print '%s -> %s for %s(%s)' % (selname, ret, vobj, vobj.__regid__) + +def lltrace(selector): + """use this decorator on your selectors so the becomes traceable with + :class:`traced_selection` + """ + # don't wrap selectors if not in development mode + if CubicWebConfiguration.mode == 'system': # XXX config.debug + return selector + def traced(cls, *args, **kwargs): + ret = selector(cls, *args, **kwargs) + if TRACED_OIDS is not None: + _trace_selector(cls, selector, args, ret) + return ret + traced.__name__ = selector.__name__ + traced.__doc__ = selector.__doc__ + return traced + +class traced_selection(object): + """ + Typical usage is : + + .. sourcecode:: python + + >>> from cubicweb.selectors import traced_selection + >>> with traced_selection(): + ... # some code in which you want to debug selectors + ... # for all objects + + Don't forget the 'from __future__ import with_statement' at the module top-level + if you're using python prior to 2.6. + + This will yield lines like this in the logs:: + + selector one_line_rset returned 0 for + + You can also give to :class:`traced_selection` the identifiers of objects on + which you want to debug selection ('oid1' and 'oid2' in the example above). + + .. sourcecode:: python + + >>> with traced_selection( ('regid1', 'regid2') ): + ... # some code in which you want to debug selectors + ... # for objects with __regid__ 'regid1' and 'regid2' + + A potentially usefull point to set up such a tracing function is + the `cubicweb.vregistry.Registry.select` method body. + """ + + def __init__(self, traced='all'): + self.traced = traced + + def __enter__(self): + global TRACED_OIDS + TRACED_OIDS = self.traced + + def __exit__(self, exctype, exc, traceback): + global TRACED_OIDS + TRACED_OIDS = None + return traceback is None # selector base classes and operations ######################################## @@ -175,6 +261,7 @@ class AndSelector(MultiSelector): """and-chained selectors (formerly known as chainall)""" + @lltrace def __call__(self, cls, *args, **kwargs): score = 0 for selector in self.selectors: @@ -187,6 +274,7 @@ class OrSelector(MultiSelector): """or-chained selectors (formerly known as chainfirst)""" + @lltrace def __call__(self, cls, *args, **kwargs): for selector in self.selectors: partscore = selector(cls, *args, **kwargs) @@ -199,6 +287,7 @@ def __init__(self, selector): self.selector = selector + @lltrace def __call__(self, cls, *args, **kwargs): score = self.selector(cls, *args, **kwargs) return int(not score) diff -r 00b1b6b906cf -r 97c55baefa0c cwconfig.py --- a/cwconfig.py Thu Jul 15 12:03:13 2010 +0200 +++ b/cwconfig.py Mon Jul 19 15:37:02 2010 +0200 @@ -296,8 +296,6 @@ # log_format = '%(asctime)s - [%(threadName)s] (%(name)s) %(levelname)s: %(message)s' # nor remove appobjects based on unused interface [???] cleanup_interface_sobjects = True - # debug mode - debugmode = False if (CWDEV and _forced_mode != 'system'): @@ -499,6 +497,13 @@ deps = dict((key, None) for key in deps) warn('[3.8] cube %s should define %s as a dict' % (cube, key), DeprecationWarning) + for depcube in deps: + try: + newname = CW_MIGRATION_MAP[depcube] + except KeyError: + pass + else: + deps[newname] = deps.pop(depcube) return deps @classmethod @@ -518,17 +523,17 @@ """ cubes = list(cubes) todo = cubes[:] + if with_recommends: + available = set(cls.available_cubes()) while todo: cube = todo.pop(0) for depcube in cls.cube_dependencies(cube): if depcube not in cubes: - depcube = CW_MIGRATION_MAP.get(depcube, depcube) cubes.append(depcube) todo.append(depcube) if with_recommends: for depcube in cls.cube_recommends(cube): - if depcube not in cubes: - depcube = CW_MIGRATION_MAP.get(depcube, depcube) + if depcube not in cubes and depcube in available: cubes.append(depcube) todo.append(depcube) return cubes @@ -663,12 +668,14 @@ vregpath.append(path + '.py') return vregpath - def __init__(self): + def __init__(self, debugmode=False): register_stored_procedures() ConfigurationMixIn.__init__(self) + self.debugmode = debugmode self.adjust_sys_path() self.load_defaults() - self.translations = {} + # will be properly initialized later by _gettext_init + self.translations = {'en': (unicode, lambda ctx, msgid: unicode(msgid) )} self._site_loaded = set() # don't register ReStructured Text directives by simple import, avoid pb # with eg sphinx. @@ -684,25 +691,23 @@ # overriden in CubicWebConfiguration self.cls_adjust_sys_path() - def init_log(self, logthreshold=None, debug=False, - logfile=None, syslog=False): + def init_log(self, logthreshold=None, logfile=None, syslog=False): """init the log service""" if logthreshold is None: - if debug: + if self.debugmode: logthreshold = 'DEBUG' else: logthreshold = self['log-threshold'] - self.debugmode = debug if sys.platform == 'win32': # no logrotate on win32, so use logging rotation facilities # for now, hard code weekly rotation every sunday, and 52 weeks kept # idea: make this configurable? - init_log(debug, syslog, logthreshold, logfile, self.log_format, + init_log(self.debugmode, syslog, logthreshold, logfile, self.log_format, rotation_parameters={'when': 'W6', # every sunday 'interval': 1, 'backupCount': 52}) else: - init_log(debug, syslog, logthreshold, logfile, self.log_format) + init_log(self.debugmode, syslog, logthreshold, logfile, self.log_format) # configure simpleTal logger logging.getLogger('simpleTAL').setLevel(logging.ERROR) @@ -844,12 +849,12 @@ return mdir @classmethod - def config_for(cls, appid, config=None): + def config_for(cls, appid, config=None, debugmode=False): """return a configuration instance for the given instance identifier """ config = config or guess_configuration(cls.instance_home(appid)) configcls = configuration_cls(config) - return configcls(appid) + return configcls(appid, debugmode) @classmethod def possible_configurations(cls, appid): @@ -909,17 +914,21 @@ """return default path to the pid file of the instance'server""" if self.mode == 'system': # XXX not under _INSTALL_PREFIX, right? - rtdir = env_path('CW_RUNTIME_DIR', '/var/run/cubicweb/', 'run time') + default = '/var/run/cubicweb/' else: import tempfile - rtdir = env_path('CW_RUNTIME_DIR', tempfile.gettempdir(), 'run time') + default = tempfile.gettempdir() + # runtime directory created on startup if necessary, don't check it + # exists + rtdir = env_path('CW_RUNTIME_DIR', default, 'run time', + checkexists=False) return join(rtdir, '%s-%s.pid' % (self.appid, self.name)) # instance methods used to get instance specific resources ############# - def __init__(self, appid): + def __init__(self, appid, debugmode=False): self.appid = appid - CubicWebNoAppConfiguration.__init__(self) + CubicWebNoAppConfiguration.__init__(self, debugmode) self._cubes = None self.load_file_configuration(self.main_config_file()) @@ -986,6 +995,29 @@ """write down current configuration""" self.generate_config(open(self.main_config_file(), 'w')) + def check_writeable_uid_directory(self, path): + """check given directory path exists, belongs to the user running the + server process and is writeable. + + If not, try to fix this, leting exception propagate when not possible. + """ + if not exists(path): + os.makedirs(path) + if self['uid']: + try: + uid = int(self['uid']) + except ValueError: + from pwd import getpwnam + uid = getpwnam(self['uid']).pw_uid + else: + uid = os.getuid() + fstat = os.stat(path) + if fstat.st_uid != uid: + os.chown(path, uid, os.getgid()) + import stat + if not (fstat.st_mode & stat.S_IWUSR): + os.chmod(path, fstat.st_mode | stat.S_IWUSR) + @cached def instance_md5_version(self): import hashlib @@ -1000,7 +1032,7 @@ super(CubicWebConfiguration, self).load_configuration() if self.apphome and self.set_language: # init gettext - self._set_language() + self._gettext_init() def _load_site_cubicweb(self, sitefile): # overriden to register cube specific options @@ -1009,12 +1041,12 @@ self.register_options(mod.options) self.load_defaults() - def init_log(self, logthreshold=None, debug=False, force=False): + def init_log(self, logthreshold=None, force=False): """init the log service""" if not force and hasattr(self, '_logging_initialized'): return self._logging_initialized = True - CubicWebNoAppConfiguration.init_log(self, logthreshold, debug, + CubicWebNoAppConfiguration.init_log(self, logthreshold, logfile=self.get('log-file')) # read a config file if it exists logconfig = join(self.apphome, 'logging.conf') @@ -1035,7 +1067,7 @@ if lang != 'en': yield lang - def _set_language(self): + def _gettext_init(self): """set language for gettext""" from gettext import translation path = join(self.apphome, 'i18n') @@ -1115,6 +1147,7 @@ def register_stored_procedures(): from logilab.database import FunctionDescr from rql.utils import register_function, iter_funcnode_variables + from rql.nodes import SortTerm, Constant, VariableRef global _EXT_REGISTERED if _EXT_REGISTERED: @@ -1160,6 +1193,34 @@ register_function(TEXT_LIMIT_SIZE) + class FTIRANK(FunctionDescr): + """return ranking of a variable that must be used as some has_text + relation subject in the query's restriction. Usually used to sort result + of full-text search by ranking. + """ + supported_backends = ('postgres',) + rtype = 'Float' + + def st_check_backend(self, backend, funcnode): + """overriden so that on backend not supporting fti ranking, the + function is removed when in an orderby clause, or replaced by a 1.0 + constant. + """ + if not self.supports(backend): + parent = funcnode.parent + while parent is not None and not isinstance(parent, SortTerm): + parent = parent.parent + if isinstance(parent, SortTerm): + parent.parent.remove(parent) + else: + funcnode.parent.replace(funcnode, Constant(1.0, 'Float')) + parent = funcnode + for vref in parent.iget_nodes(VariableRef): + vref.unregister_reference() + + register_function(FTIRANK) + + class FSPATH(FunctionDescr): """return path of some bytes attribute stored using the Bytes File-System Storage (bfss) diff -r 00b1b6b906cf -r 97c55baefa0c cwctl.py --- a/cwctl.py Thu Jul 15 12:03:13 2010 +0200 +++ b/cwctl.py Mon Jul 19 15:37:02 2010 +0200 @@ -17,9 +17,8 @@ # with CubicWeb. If not, see . """the cubicweb-ctl tool, based on logilab.common.clcommands to provide a pluggable commands system. - +""" -""" __docformat__ = "restructuredtext en" # *ctl module should limit the number of import to be imported as quickly as @@ -477,23 +476,23 @@ def start_instance(self, appid): """start the instance's server""" - debug = self['debug'] - force = self['force'] - loglevel = self['loglevel'] - config = cwcfg.config_for(appid) - if loglevel is not None: - loglevel = 'LOG_%s' % loglevel.upper() - config.global_set_option('log-threshold', loglevel) - config.init_log(loglevel, debug=debug, force=True) + config = cwcfg.config_for(appid, debugmode=self['debug']) + init_cmdline_log_threshold(config, self['loglevel']) if self['profile']: config.global_set_option('profile', self.config.profile) helper = self.config_helper(config, cmdname='start') pidf = config['pid-file'] - if exists(pidf) and not force: + if exists(pidf) and not self['force']: msg = "%s seems to be running. Remove %s by hand if necessary or use \ the --force option." raise ExecutionError(msg % (appid, pidf)) - helper.start_server(config, debug) + helper.start_server(config) + + +def init_cmdline_log_threshold(config, loglevel): + if loglevel is not None: + config.global_set_option('log-threshold', loglevel.upper()) + config.init_log(config['log-threshold'], force=True) class StopInstanceCommand(InstanceCommand): @@ -788,11 +787,15 @@ repository internals (session, etc...) so most migration commands won't be available. + Arguments after bare "--" string will not be processed by the shell command + You can use it to pass extra arguments to your script and expect for + them in '__args__' afterwards. + the identifier of the instance to connect. """ name = 'shell' - arguments = ' [batch command file]' + arguments = ' [batch command file(s)] [-- + + ''' + % data] + if server_data is not None: + host, port = server_data + html.append('') + html.append('') + html.append('') + + for dep in depends: + html.append(' ' % file_path(dep)) + + html.append(' ') + html.append(' '% (file_path(test_file),)) + html.append(''' + +
+
+

QUnit example

+

+

+
    + +''') + return u'\n'.join(html) + + + + + + + +if __name__ == '__main__': + unittest_main() diff -r 00b1b6b906cf -r 97c55baefa0c devtools/repotest.py --- a/devtools/repotest.py Thu Jul 15 12:03:13 2010 +0200 +++ b/devtools/repotest.py Mon Jul 19 15:37:02 2010 +0200 @@ -18,8 +18,8 @@ """some utilities to ease repository testing This module contains functions to initialize a new repository. +""" -""" __docformat__ = "restructuredtext en" from pprint import pprint @@ -134,24 +134,32 @@ schema._eid_index[rdef.eid] = rdef -from logilab.common.testlib import TestCase +from logilab.common.testlib import TestCase, mock_object +from logilab.database import get_db_helper + from rql import RQLHelper + from cubicweb.devtools.fake import FakeRepo, FakeSession from cubicweb.server import set_debug from cubicweb.server.querier import QuerierHelper from cubicweb.server.session import Session -from cubicweb.server.sources.rql2sql import remove_unused_solutions +from cubicweb.server.sources.rql2sql import SQLGenerator, remove_unused_solutions class RQLGeneratorTC(TestCase): - schema = None # set this in concret test + schema = backend = None # set this in concret test def setUp(self): self.repo = FakeRepo(self.schema) + self.repo.system_source = mock_object(dbdriver=self.backend) self.rqlhelper = RQLHelper(self.schema, special_relations={'eid': 'uid', - 'has_text': 'fti'}) + 'has_text': 'fti'}, + backend=self.backend) self.qhelper = QuerierHelper(self.repo, self.schema) ExecutionPlan._check_permissions = _dummy_check_permissions rqlannotation._select_principal = _select_principal + if self.backend is not None: + dbhelper = get_db_helper(self.backend) + self.o = SQLGenerator(self.schema, dbhelper) def tearDown(self): ExecutionPlan._check_permissions = _orig_check_permissions @@ -270,6 +278,7 @@ self.system = self.sources[-1] do_monkey_patch() self._dumb_sessions = [] # by hi-jacked parent setup + self.repo.vreg.rqlhelper.backend = 'postgres' # so FTIRANK is considered def add_source(self, sourcecls, uri): self.sources.append(sourcecls(self.repo, self.o.schema, diff -r 00b1b6b906cf -r 97c55baefa0c devtools/test/data/dbfill.conf --- a/devtools/test/data/dbfill.conf Thu Jul 15 12:03:13 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,22 +0,0 @@ -[BASE] -APPLICATION_SCHEMA = /home/adim/cvs_work/soft_prive/ginco/applications/crm/schema -APPLICATION_HOME = /home/adim/etc/erudi.d/crmadim # ??? -FAKEDB_NAME = crmtest -ENCODING = UTF-8 -HOST = crater -USER = adim -PASSWORD = adim - - -[ENTITIES] -default = 20 #means default is 20 entities -Person = 10 # means 10 Persons -Company = 5# means 5 companies - - -[RELATIONS] -Person works_for Company = 4 -Division subsidiary_of Company = 3 - -[DEFAULT_VALUES] -Person.firstname = data/firstnames.txt diff -r 00b1b6b906cf -r 97c55baefa0c devtools/test/data/js_examples/dep_1.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/test/data/js_examples/dep_1.js Mon Jul 19 15:37:02 2010 +0200 @@ -0,0 +1,1 @@ +a = 4; diff -r 00b1b6b906cf -r 97c55baefa0c devtools/test/data/js_examples/deps_2.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/test/data/js_examples/deps_2.js Mon Jul 19 15:37:02 2010 +0200 @@ -0,0 +1,1 @@ +b = a +2; diff -r 00b1b6b906cf -r 97c55baefa0c devtools/test/data/js_examples/test_simple_failure.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/test/data/js_examples/test_simple_failure.js Mon Jul 19 15:37:02 2010 +0200 @@ -0,0 +1,18 @@ +$(document).ready(function() { + + module("air"); + + test("test 1", function() { + equals(2, 4); + }); + + test("test 2", function() { + equals('', '45'); + equals('1024', '32'); + }); + + module("able"); + test("test 3", function() { + same(1, 1); + }); +}); diff -r 00b1b6b906cf -r 97c55baefa0c devtools/test/data/js_examples/test_simple_success.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/test/data/js_examples/test_simple_success.js Mon Jul 19 15:37:02 2010 +0200 @@ -0,0 +1,17 @@ +$(document).ready(function() { + + module("air"); + + test("test 1", function() { + equals(2, 2); + }); + + test("test 2", function() { + equals('45', '45'); + }); + + module("able"); + test("test 3", function() { + same(1, 1); + }); +}); diff -r 00b1b6b906cf -r 97c55baefa0c devtools/test/data/js_examples/test_with_dep.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/test/data/js_examples/test_with_dep.js Mon Jul 19 15:37:02 2010 +0200 @@ -0,0 +1,9 @@ +$(document).ready(function() { + + module("air"); + + test("test 1", function() { + equals(a, 4); + }); + +}); diff -r 00b1b6b906cf -r 97c55baefa0c devtools/test/data/js_examples/test_with_ordered_deps.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/test/data/js_examples/test_with_ordered_deps.js Mon Jul 19 15:37:02 2010 +0200 @@ -0,0 +1,9 @@ +$(document).ready(function() { + + module("air"); + + test("test 1", function() { + equals(b, 6); + }); + +}); diff -r 00b1b6b906cf -r 97c55baefa0c devtools/test/data/js_examples/utils.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/test/data/js_examples/utils.js Mon Jul 19 15:37:02 2010 +0200 @@ -0,0 +1,29 @@ +function datetuple(d) { + return [d.getFullYear(), d.getMonth()+1, d.getDate(), + d.getHours(), d.getMinutes()]; +} + +function pprint(obj) { + print('{'); + for(k in obj) { + print(' ' + k + ' = ' + obj[k]); + } + print('}'); +} + +function arrayrepr(array) { + return '[' + array.join(', ') + ']'; +} + +function assertArrayEquals(array1, array2) { + if (array1.length != array2.length) { + throw new crosscheck.AssertionFailure(array1.join(', ') + ' != ' + array2.join(', ')); + } + for (var i=0; i. -"""only for unit tests ! - -""" +"""only for unit tests !""" from cubicweb.view import EntityView -from cubicweb.selectors import implements +from cubicweb.selectors import is_instance HTML_PAGE = u""" @@ -31,7 +29,7 @@ class SimpleView(EntityView): __regid__ = 'simple' - __select__ = implements('Bug',) + __select__ = is_instance('Bug',) def call(self, **kwargs): self.cell_call(0, 0) @@ -41,7 +39,7 @@ class RaisingView(EntityView): __regid__ = 'raising' - __select__ = implements('Bug',) + __select__ = is_instance('Bug',) def cell_call(self, row, col): raise ValueError() diff -r 00b1b6b906cf -r 97c55baefa0c devtools/test/unittest_httptest.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/test/unittest_httptest.py Mon Jul 19 15:37:02 2010 +0200 @@ -0,0 +1,51 @@ +from logilab.common.testlib import TestCase, unittest_main, tag +from cubicweb.devtools.httptest import CubicWebServerTC + +import httplib +from os import path as osp + + +class TwistedCWAnonTC(CubicWebServerTC): + + def test_response(self): + try: + response = self.web_get() + except httplib.NotConnected, ex: + self.fail("Can't connection to test server: %s" % ex) + + def test_response_anon(self): + response = self.web_get() + self.assertEquals(response.status, httplib.OK) + + + def test_base_url(self): + if self.test_url not in self.web_get().read(): + self.fail('no mention of base url in retrieved page') + + +class TwistedCWIdentTC(CubicWebServerTC): + + anonymous_logged = False + + def test_response_denied(self): + response = self.web_get() + self.assertEquals(response.status, httplib.FORBIDDEN) + + def test_login(self): + response = self.web_get() + if response.status != httplib.FORBIDDEN: + self.skip('Already authenticated') + # login + self.web_login(self.admlogin, self.admpassword) + response = self.web_get() + self.assertEquals(response.status, httplib.OK) + # logout + self.web_logout() + response = self.web_get() + self.assertEquals(response.status, httplib.FORBIDDEN) + + + + +if __name__ == '__main__': + unittest_main() diff -r 00b1b6b906cf -r 97c55baefa0c devtools/test/unittest_qunit.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/test/unittest_qunit.py Mon Jul 19 15:37:02 2010 +0200 @@ -0,0 +1,31 @@ +from logilab.common.testlib import unittest_main +from cubicweb.devtools.qunit import make_qunit_html, QUnitTestCase + +from os import path as osp + +JSTESTDIR = osp.abspath(osp.join(osp.dirname(__file__), 'data', 'js_examples')) + + +def js(name): + return osp.join(JSTESTDIR, name) + +class QUnitTestCaseTC(QUnitTestCase): + + all_js_tests = ( + (js('test_simple_success.js'),), + (js('test_with_dep.js'), (js('dep_1.js'),)), + (js('test_with_ordered_deps.js'), (js('dep_1.js'), js('deps_2.js'),)), + ) + + + def test_simple_failure(self): + js_tests = list(self._test_qunit(js('test_simple_failure.js'))) + self.assertEquals(len(js_tests), 3) + test_1, test_2, test_3 = js_tests + self.assertRaises(self.failureException, test_1[0], *test_1[1:]) + self.assertRaises(self.failureException, test_2[0], *test_2[1:]) + test_3[0](*test_3[1:]) + + +if __name__ == '__main__': + unittest_main() diff -r 00b1b6b906cf -r 97c55baefa0c devtools/testlib.py --- a/devtools/testlib.py Thu Jul 15 12:03:13 2010 +0200 +++ b/devtools/testlib.py Mon Jul 19 15:37:02 2010 +0200 @@ -31,7 +31,7 @@ import yams.schema -from logilab.common.testlib import TestCase, InnerTest +from logilab.common.testlib import TestCase, InnerTest, Tags from logilab.common.pytest import nocoverage, pause_tracing, resume_tracing from logilab.common.debugger import Debugger from logilab.common.umessage import message_from_string @@ -163,6 +163,7 @@ appid = 'data' configcls = devtools.ApptestConfiguration reset_schema = reset_vreg = False # reset schema / vreg between tests + tags= TestCase.tags | Tags('cubicweb', 'cw_repo') @classproperty def config(cls): @@ -313,7 +314,7 @@ req.execute('SET X in_group G WHERE X eid %%(x)s, G name IN(%s)' % ','.join(repr(g) for g in groups), {'x': user.eid}) - user.clear_related_cache('in_group', 'subject') + user.cw_clear_relation_cache('in_group', 'subject') if commit: req.cnx.commit() return user @@ -633,10 +634,10 @@ view = viewsreg.select(vid, req, **kwargs) # set explicit test description if rset is not None: - self.set_description("testing %s, mod=%s (%s)" % ( + self.set_description("testing vid=%s defined in %s with (%s)" % ( vid, view.__module__, rset.printable_rql())) else: - self.set_description("testing %s, mod=%s (no rset)" % ( + self.set_description("testing vid=%s defined in %s without rset" % ( vid, view.__module__)) if template is None: # raw view testing, no template viewfunc = view.render @@ -704,7 +705,7 @@ validatorclass = self.content_type_validators.get(view.content_type, default_validator) if validatorclass is None: - return None + return output.strip() validator = validatorclass() if isinstance(validator, htmlparser.DTDValidator): # XXX remove used in progress widget, unknown in html dtd @@ -786,6 +787,8 @@ """base class for test with auto-populating of the database""" __abstract__ = True + tags = CubicWebTC.tags | Tags('autopopulated') + pdbclass = CubicWebDebugger # this is a hook to be able to define a list of rql queries # that are application dependent and cannot be guessed automatically @@ -911,6 +914,9 @@ class AutomaticWebTest(AutoPopulateTest): """import this if you wan automatic tests to be ran""" + + tags = AutoPopulateTest.tags | Tags('web', 'generated') + def setUp(self): AutoPopulateTest.setUp(self) # access to self.app for proper initialization of the authentication diff -r 00b1b6b906cf -r 97c55baefa0c doc/book/en/annexes/depends.rst --- a/doc/book/en/annexes/depends.rst Thu Jul 15 12:03:13 2010 +0200 +++ b/doc/book/en/annexes/depends.rst Mon Jul 19 15:37:02 2010 +0200 @@ -27,6 +27,9 @@ * logilab-common - http://www.logilab.org/project/logilab-common - http://pypi.python.org/pypi/logilab-common/ - included in the forest +* logilab-database - http://www.logilab.org/project/logilab-database - + http://pypi.python.org/pypi/logilab-database/ - included in the forest + * logilab-constraint - http://www.logilab.org/project/logilab-constraint - http://pypi.python.org/pypi/constraint/ - included in the forest @@ -44,7 +47,7 @@ To use network communication between cubicweb instances / clients: -* Pyro - http://pyro.sourceforge.net/ - http://pypi.python.org/pypi/Pyro +* Pyro - http://www.xs4all.nl/~irmen/pyro3/ - http://pypi.python.org/pypi/Pyro If you're using a Postgres database (recommended): diff -r 00b1b6b906cf -r 97c55baefa0c doc/book/en/annexes/docstrings-conventions.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/book/en/annexes/docstrings-conventions.rst Mon Jul 19 15:37:02 2010 +0200 @@ -0,0 +1,106 @@ +Javascript docstrings +===================== + +Whereas in Python source code we only need to include a module docstrings +using the directive `.. automodule:: mypythonmodule`, we will have to +explicitely define Javascript modules and functions in the doctrings since +there is no native directive to include Javascript files. + +Rest generation +--------------- + +`pyjsrest` is a small utility parsing Javascript doctrings and generating the +corresponding Restructured file used by Sphinx to generate HTML documentation. +This script will have the following structure:: + + =========== + filename.js + =========== + .. module:: filename.js + +We use the `.. module::` directive to register a javascript library +as a Python module for Sphinx. This provides an entry in the module index. + +The contents of the docstring found in the javascript file will be added as is +following the module declaration. No treatment will be done on the doctring. +All the documentation structure will be in the docstrings and will comply +with the following rules. + +Docstring structure +------------------- + +Basically we document javascript with RestructuredText docstring +following the same convention as documenting Python code. + +The doctring in Javascript files must be contained in standard +Javascript comment signs, starting with `/**` and ending with `*/`, +such as:: + + /** + * My comment starts here. + * This is the second line prefixed with a `*`. + * ... + * ... + * All the follwing line will be prefixed with a `*` followed by a space. + * ... + * ... + */ + + +Comments line prefixed by `//` will be ignored. They are reserved for source +code comments dedicated to developers. + + +Javscript functions docstring +----------------------------- + +By default, the `function` directive describes a module-level function. + +`function` directive +~~~~~~~~~~~~~~~~~~~~ + +Its purpose is to define the function prototype such as:: + + .. function:: loadxhtml(url, data, reqtype, mode) + +If any namespace is used, we should add it in the prototype for now, +until we define an appropriate directive. +:: + .. function:: jQuery.fn.loadxhtml(url, data, reqtype, mode) + +Function parameters +~~~~~~~~~~~~~~~~~~~ + +We will define function parameters as a bulleted list, where the +parameter name will be backquoted and followed by its description. + +Example of a javascript function docstring:: + + .. function:: loadxhtml(url, data, reqtype, mode) + + cubicweb loadxhtml plugin to make jquery handle xhtml response + + fetches `url` and replaces this's content with the result + + Its arguments are: + + * `url` + + * `mode`, how the replacement should be done (default is 'replace') + Possible values are : + - 'replace' to replace the node's content with the generated HTML + - 'swap' to replace the node itself with the generated HTML + - 'append' to append the generated HTML to the node's content + + +Optional parameter specification +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Javascript functions handle arguments not listed in the function signature. +In the javascript code, they will be flagged using `/* ... */`. In the docstring, +we flag those optional arguments the same way we would define it in +Python:: + + .. function:: asyncRemoteExec(fname, arg1=None, arg2=None) + + diff -r 00b1b6b906cf -r 97c55baefa0c doc/book/en/annexes/faq.rst --- a/doc/book/en/annexes/faq.rst Thu Jul 15 12:03:13 2010 +0200 +++ b/doc/book/en/annexes/faq.rst Mon Jul 19 15:37:02 2010 +0200 @@ -115,7 +115,7 @@ from cubicweb import dbapi - cnx = dbapi.connection(database='instance-id', user='admin', password='admin') + cnx = dbapi.connect(database='instance-id', user='admin', password='admin') cur = cnx.cursor() for name in ('Personal', 'Professional', 'Computers'): cur.execute('INSERT Blog B: B name %s', name) @@ -302,10 +302,10 @@ import pwd import sys - from logilab.common.db import get_connection + from logilab.database import get_connection def getlogin(): - """avoid usinng os.getlogin() because of strange tty / stdin problems + """avoid using os.getlogin() because of strange tty/stdin problems (man 3 getlogin) Another solution would be to use $LOGNAME, $USER or $USERNAME """ @@ -402,6 +402,20 @@ mydb=> update cw_cwuser set cw_upassword='qHO8282QN5Utg' where cw_login='joe'; UPDATE 1 +You can prefer use a migration script similar to this shell invocation instead:: + + $ cubicweb-ctl shell + >>> from cubicweb.server.utils import crypt_password + >>> crypted = crypt_password('joepass') + >>> rset = rql('Any U WHERE U is CWUser, U login "joe"') + >>> joe = rset.get_entity(0,0) + >>> joe.set_attributes(upassword=crypted) + +The more experimented people would use RQL request directly:: + + >>> rql('SET X upassword %(a)s WHERE X is CWUser, X login "joe"', + ... {'a': crypted}) + I've just created a user in a group and it doesn't work ! --------------------------------------------------------- diff -r 00b1b6b906cf -r 97c55baefa0c doc/book/en/annexes/index.rst --- a/doc/book/en/annexes/index.rst Thu Jul 15 12:03:13 2010 +0200 +++ b/doc/book/en/annexes/index.rst Mon Jul 19 15:37:02 2010 +0200 @@ -17,3 +17,5 @@ rql/index mercurial depends + javascript-api + docstrings-conventions diff -r 00b1b6b906cf -r 97c55baefa0c doc/book/en/annexes/rql/Graph-ex.gif Binary file doc/book/en/annexes/rql/Graph-ex.gif has changed diff -r 00b1b6b906cf -r 97c55baefa0c doc/book/en/annexes/rql/debugging.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/book/en/annexes/rql/debugging.rst Mon Jul 19 15:37:02 2010 +0200 @@ -0,0 +1,56 @@ +.. -*- coding: utf-8 -*- + +.. _DEBUGGING: + +Debugging RQL +------------- + +Available levels +~~~~~~~~~~~~~~~~ + +:DBG_NONE: + no debug information (current mode) + +:DBG_RQL: + rql execution information + +:DBG_SQL: + executed sql + +:DBG_REPO: + repository events + +:DBG_MS: + multi-sources + +:DBG_MORE: + more verbosity + +:DBG_ALL: + all level enabled + + +Enable verbose output +~~~~~~~~~~~~~~~~~~~~~ + +It may be interested to enable a verboser output to debug your RQL statements: + +.. sourcecode:: python + + from cubicweb import server + server.set_debug(server.DBG_RQL|server.DBG_SQL|server.DBG_ALL) + + +Detect largest RQL queries +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +See `Profiling and performance` chapter (see :ref:`PROFILING`). + + +API +~~~ + +.. autofunction:: cubicweb.server.set_debug + +.. autoclass:: cubicweb.server.debugged + diff -r 00b1b6b906cf -r 97c55baefa0c doc/book/en/annexes/rql/index.rst --- a/doc/book/en/annexes/rql/index.rst Thu Jul 15 12:03:13 2010 +0200 +++ b/doc/book/en/annexes/rql/index.rst Mon Jul 19 15:37:02 2010 +0200 @@ -4,8 +4,9 @@ This chapter describes the Relation Query Language syntax and its implementation in CubicWeb. .. toctree:: - :maxdepth: 1 + :maxdepth: 2 intro language + debugging implementation diff -r 00b1b6b906cf -r 97c55baefa0c doc/book/en/annexes/rql/intro.rst --- a/doc/book/en/annexes/rql/intro.rst Thu Jul 15 12:03:13 2010 +0200 +++ b/doc/book/en/annexes/rql/intro.rst Mon Jul 19 15:37:02 2010 +0200 @@ -7,8 +7,13 @@ Goals of RQL ~~~~~~~~~~~~ -The goal is to have a language making relations browsing easy. As -such, attributes will be regarded as cases of special relations (in +The goal is to have a semantic language in order to: + +- query relations in a clear syntax +- empowers access to data repository manipulation +- making attributes/relations browsing easy + +As such, attributes will be regarded as cases of special relations (in terms of usage, the user should see no syntactic difference between an attribute and a relation). @@ -40,6 +45,13 @@ conversion and basic types manipulation, which we may want to look at one time or another. Finally, the syntax is a little esoteric. +Datalog +``````` + +Datalog_ is a prolog derived query langage which applies to relational +databases. It is more expressive than RQL in that it accepts either +extensional_ and intensional_ predicates (or relations). As of now, +RQL only deals with intensional relations. The different types of queries ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -59,7 +71,91 @@ Remove entities or relations existing in the database. +RQL relation expressions +~~~~~~~~~~~~~~~~~~~~~~~~ + +RQL expressions apply to a live database defined by a +:ref:`datamodel_definition`. Apart from the main type, or head, of the +expression (search, insert, etc.) the most common constituent of an +RQL expression is a (set of) relation expression(s). + +An RQL relation expression contains three components: + +* the subject, which is an entity type +* the predicate, which is a relation definition (an arc of the schema) +* the object, which is either an attribute or a relation to another entity + +.. image:: Graph-ex.gif + :alt: + :align: center + +.. warning:: + + A relation is always expressed in the order: ``subject``, + ``predicate``, ``object``. + + It is important to determine if the entity type is subject or object + to construct a valid expression. Inverting the subject/object is an + error since the relation cannot be found in the schema. + + If one does not have access to the code, one can find the order by + looking at the schema image in manager views (the subject is located + at the beginning of the arrow). + +An example of two related relation expressions:: + + P works_for C, P name N + +RQL variables represent typed entities. The type of entities is +either automatically inferred (by looking at the possible relation +definitions, see :ref:`RelationDefinition`) or explicitely constrained +using the ``is`` meta relation. + +In the example above, we barely need to look at the schema. If +variable names (in the RQL expression) and relation type names (in the +schema) are expresssively designed, the human reader can infer as much +as the |cubicweb| querier. + +The ``P`` variable is used twice but it always represent the same set +of entities. Hence ``P works_for C`` and ``P name N`` must be +compatible in the sense that all the Ps (which *can* refer to +different entity types) must accept the ``works_for`` and ``name`` +relation types. This does restrict the set of possible values of P. + +Adding another relation expression:: + + P works_for C, P name N, C name "logilab" + +This further restricts the possible values of P through an indirect +constraint on the possible values of ``C``. The RQL-level unification_ +happening there is translated to one (or several) joins_ at the +database level. + +.. note:: + + In |cubicweb|, the term `relation` is often found without ambiguity + instead of `predicate`. This predicate is also known as the + `property` of the triple in `RDF concepts`_ -.. _Versa: http://uche.ogbuji.net/tech/rdf/versa/ +RQL Operators +~~~~~~~~~~~~~ + +An RQL expression's head can be completed using various operators such +as ``ORDERBY``, ``GROUPBY``, ``HAVING``, ``LIMIT`` etc. + +RQL relation expressions can be grouped with ``UNION`` or +``WITH``. Predicate oriented keywords such as ``EXISTS``, ``OR``, +``NOT`` are available. + +The complete zoo of RQL operators is described extensively in the +following chapter (:ref:`RQL`). + +.. _RDF concepts: http://www.w3.org/TR/rdf-concepts/ +.. _Versa: http://wiki.xml3k.org/Versa .. _SPARQL: http://www.w3.org/TR/rdf-sparql-query/ +.. _unification: http://en.wikipedia.org/wiki/Unification_(computing) +.. _joins: http://en.wikipedia.org/wiki/Join_(SQL) +.. _Datalog: http://en.wikipedia.org/wiki/Datalog +.. _intensional: http://en.wikipedia.org/wiki/Intensional_definition +.. _extensional: http://en.wikipedia.org/wiki/Extension_(predicate_logic) diff -r 00b1b6b906cf -r 97c55baefa0c doc/book/en/annexes/rql/language.rst --- a/doc/book/en/annexes/rql/language.rst Thu Jul 15 12:03:13 2010 +0200 +++ b/doc/book/en/annexes/rql/language.rst Mon Jul 19 15:37:02 2010 +0200 @@ -15,6 +15,7 @@ HAVING, ILIKE, IN, INSERT, LIKE, LIMIT, NOT, NOW, NULL, OFFSET, OR, ORDERBY, SET, TODAY, TRUE, UNION, WHERE, WITH + Variables and Typing ~~~~~~~~~~~~~~~~~~~~ @@ -29,10 +30,11 @@ There is a special type **Any**, referring to a non specific type. We can restrict the possible types for a variable using the -special relation **is**. +special relation **is** in the constraints. + The possible type(s) for each variable is derived from the schema -according to the constraints expressed above and thanks to the relations between -each variable. +according to the constraints expressed above and thanks to the relations +between each variable. Built-in types `````````````` @@ -63,7 +65,7 @@ of logical operators (see :ref:`PriorityOperators`). Mathematical Operators -``````````````````````` +`````````````````````` :: +, -, *, / @@ -74,7 +76,13 @@ =, <, <=, >=, >, ~=, IN, LIKE, ILIKE -* The operator `=` is the default operator. +* Syntax to use comparison operator: + + `VARIABLE relation operator VALUE` + +* The operator `=` is the default operator and can be omitted. + +* `relation` name is always attended * The operator `LIKE` equivalent to `~=` can be used with the special character `%` in a string to indicate that the chain @@ -89,7 +97,7 @@ * The operator `IN` provides a list of possible values: :: - Any X WHERE X name IN ( 'chauvat', 'fayolle', 'di mascio', 'thenault') + Any X WHERE X name IN ('chauvat', 'fayolle', 'di mascio', 'thenault') .. XXX nico: "A trick <> 'bar'" wouldn't it be more convenient than "NOT A trick 'bar'" ? @@ -100,16 +108,11 @@ `````````````````` 1. '*', '/' - 2. '+', '-' - -3. 'not' - -4 'and' - -5 'or' - -6 ',' +3. 'NOT' +4. 'AND' +5. 'OR' +6. ',' Search Query @@ -141,16 +144,39 @@ `````````````````` - For grouped queries (e.g. with a GROUPBY clause), all - selected variables should be grouped. + selected variables should be grouped at the right of the keyword. -- To group and/or sort by attributes, we can do: "X,L user U, U - login L GROUPBY L, X ORDERBY L" +- To group and/or sort by attributes, we can do:: + + X,L user U, U login L GROUPBY L, X ORDERBY L - If the sorting method (SORT_METHOD) is not specified, then the sorting is - ascendant. + ascendant (`ASC`). - Aggregate Functions: COUNT, MIN, MAX, AVG, SUM +Having +`````` + +The HAVING clause, as in SQL, has been originally introduced to restrict a query according to value returned by an aggregate function, e.g.:: + + Any X GROUPBY X WHERE X relation Y HAVING COUNT(Y) > 10 + +It may however be used for something else... + +In the WHERE clause, we are limited to 3-expression_, such thing can't be expressed directly as in the SQL's way. But this can be expressed using HAVING comparison expression. + +For instance, let's say you want to get people whose uppercased first name equals to another person uppercased first name:: + + Person X WHERE X firstname XFN, Y firstname YFN HAVING X > Y, UPPER(XFN) = UPPER(YFN) + +This open some new possibilities. Another example:: + + Person X WHERE X birthday XB HAVING YEAR(XB) = 2000 + +That lets you use transformation functions not only in selection but for restriction as well and to by-pass limitation of the WHERE clause, which was the major flaw in the RQL language. + +Notice that while we would like this to work without the HAVING clause, this can't be currently be done because it introduces an ambiguity in RQL's grammar that can't be handled by Yapps_, the parser's generator we're using. Negation ```````` @@ -170,9 +196,8 @@ Any A WHERE A comments B, A identity B -return all objects that comment themselves. The relation -`identity` is especially useful when defining the rules for securities -with `RQLExpressions`. +return all objects that comment themselves. The relation `identity` is +especially useful when defining the rules for securities with `RQLExpressions`. Limit / offset @@ -181,13 +206,6 @@ Any P ORDERBY N LIMIT 5 OFFSET 10 WHERE P is Person, P firstname N -Function calls -`````````````` -:: - - Any UPPER(N) WHERE P firstname N - -Functions on string: UPPER, LOWER Exists `````` @@ -199,8 +217,14 @@ OR EXISTS(T tags X, T name "priority") -Optional relations (Left outer join) -```````````````````````````````````` +Optional relations +`````````````````` + +It is a similar concept that the `Left outer join`_: + + the result of a left outer join (or simply left join) for table A and B + always contains all records of the "left" table (A), even if the + join-condition does not find any matching record in the "right" table (B). * They allow you to select entities related or not to another. @@ -218,12 +242,6 @@ Any T,P,V WHERE T is Ticket, T concerns P, T done_in V? -Having -`````` -:: - - Any X GROUPBY X WHERE X knows Y HAVING COUNT(Y) > 10 - Subqueries `````````` :: @@ -234,16 +252,29 @@ DISTINCT Any W, REF WITH W, REF BEING ( - (Any W, REF WHERE W is Workcase, W ref REF, + (Any W, REF WHERE W is Workcase, W ref REF, W concerned_by D, D name "Logilab") UNION (Any W, REF WHERE W is Workcase, W ref REF, ' W split_into WP, WP name "WP1") ) +Function calls +`````````````` +:: + + Any UPPER(N) WHERE P firstname N + Any LOWER(N) WHERE P firstname N + +Functions available on string: `UPPER`, `LOWER` + +.. XXX retrieve available function automatically + +For a performance issue, you can enrich the RQL dialect by RDMS (Relational database management system) functions. + Examples -```````` +~~~~~~~~ - *Search for the object of identifier 53* :: @@ -280,11 +311,11 @@ P is Person, (P interested_by T, T name 'training') OR (P city 'Paris') -- *The name and surname of all people* +- *The surname and firstname of all people* :: Any N, P WHERE - X is Person, X name N, X first_name P + X is Person, X name N, X firstname P Note that the selection of several entities generally force the use of "Any" because the type specification applies otherwise @@ -304,7 +335,7 @@ Insertion query -~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~ `INSERT` V1 (, V2) \ * `:` [ `WHERE` ] @@ -336,6 +367,7 @@ Update and relation creation queries ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + `SET` [ `WHERE` ] @@ -345,7 +377,7 @@ - *Renaming of the person named 'foo' to 'bar' with the first name changed* :: - SET X name 'bar', X first_name 'original' WHERE X is Person, X name 'foo' + SET X name 'bar', X firstname 'original' WHERE X is Person, X name 'foo' - *Insert a relation of type 'know' between objects linked by the relation of type 'friend'* @@ -356,6 +388,7 @@ Deletion query ~~~~~~~~~~~~~~ + `DELETE` ( V) | (V1 relation v2 ),... [ `WHERE` ] @@ -372,6 +405,7 @@ DELETE X friend Y WHERE X is Person, X name 'foo' + Virtual RQL relations ~~~~~~~~~~~~~~~~~~~~~ @@ -381,6 +415,13 @@ * `has_text`: relation to use to query the full text index (only for entities having fulltextindexed attributes). -* `identity`: relation to use to tell that a RQL variable should be +* `identity`: `Identity`_ relation to use to tell that a RQL variable should be the same as another (but you've to use two different rql variables for querying purpose) + +* `is`: relation to enforce possible types for a variable + + + +.. _Yapps: http://theory.stanford.edu/~amitp/yapps/ +.. _Left outer join: http://en.wikipedia.org/wiki/Join_(SQL)#Left_outer_join diff -r 00b1b6b906cf -r 97c55baefa0c doc/book/en/devrepo/datamodel/definition.rst --- a/doc/book/en/devrepo/datamodel/definition.rst Thu Jul 15 12:03:13 2010 +0200 +++ b/doc/book/en/devrepo/datamodel/definition.rst Mon Jul 19 15:37:02 2010 +0200 @@ -1,5 +1,7 @@ .. -*- coding: utf-8 -*- +.. _datamodel_definition: + Yams *schema* ------------- @@ -11,6 +13,8 @@ .. _`Yams`: http://www.logilab.org/project/yams +.. _datamodel_overview: + Overview ~~~~~~~~ @@ -408,7 +412,7 @@ * special relations "has__permission" can not be used - +.. _yams_example: Defining your schema using yams ------------------------------- @@ -494,15 +498,15 @@ means that you need two separate entities that implement the `ITree` interface and get the result from `.children()` which ever entity is concerned. -Inheritance -``````````` -XXX feed me +.. Inheritance +.. ``````````` +.. XXX feed me Definition of relations ~~~~~~~~~~~~~~~~~~~~~~~ -XXX add note about defining relation type / definition +.. XXX add note about defining relation type / definition A relation is defined by a Python class heriting `RelationType`. The name of the class corresponds to the name of the type. The class then contains @@ -546,7 +550,7 @@ :Historical note: It has been historically possible to use `ObjectRelation` which - defines a relation in the opposite direction. This feature is soon to be + defines a relation in the opposite direction. This feature is deprecated and therefore should not be used in newly written code. :Future deprecation note: diff -r 00b1b6b906cf -r 97c55baefa0c doc/book/en/devrepo/entityclasses/adapters.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/book/en/devrepo/entityclasses/adapters.rst Mon Jul 19 15:37:02 2010 +0200 @@ -0,0 +1,170 @@ +.. _adapters: + +Interfaces and Adapters +----------------------- + +Interfaces are the same thing as object-oriented programming +`interfaces`_. Adapter refers to a well-known `adapter`_ design +pattern that helps separating concerns in object oriented +applications. + +.. _`interfaces`: http://java.sun.com/docs/books/tutorial/java/concepts/interface.html +.. _`adapter`: http://en.wikipedia.org/wiki/Adapter_pattern + +In |cubicweb| adapters provide logical functionalities +to entity types. They are introduced in version `3.9`. Before that one +had to implements Interfaces in entity classes to achieve a similar goal. However, +hte problem with this approch is that is clutters the entity class's namespace, exposing +name collision risks with schema attributes/relations or even methods names +(different interfaces may define the same method with not necessarily the same +behaviour expected). + +Definition of an adapter is quite trivial. An excerpt from cubicweb +itself (found in :mod:`cubicweb.entities.adapters`): + +.. sourcecode:: python + + + class ITreeAdapter(EntityAdapter): + """This adapter has to be overriden to be configured using the + tree_relation, child_role and parent_role class attributes to + benefit from this default implementation + """ + __regid__ = 'ITree' + + child_role = 'subject' + parent_role = 'object' + + def children_rql(self): + """returns RQL to get children """ + return self.entity.cw_related_rql(self.tree_relation, self.parent_role) + +The adapter object has ``self.entity`` attribute which represents the +entity being adapted. + +Specializing and binding an adapter +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. sourcecode:: python + + from cubicweb.entities.adapters import ITreeAdapter + + class MyEntityITreeAdapter(ITreeAdapter): + __select__ = is_instance('MyEntity') + tree_relation = 'filed_under' + +The ITreeAdapter here provides a default implementation. The +tree_relation class attribute is actually used by this implementation +to help implement correct behaviour. + +Here we provide a specific implementation which will be bound for +``MyEntity`` entity type (the `adaptee`). + + +Selecting on an adapter +~~~~~~~~~~~~~~~~~~~~~~~ + +There is an ``adaptable`` selector which can be used instead of +``implements``. + +.. _interfaces_to_adapters: + +Converting code from Interfaces/Mixins to Adapters +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Here we go with a small example. Before: + +.. sourcecode:: python + + from cubicweb.selectors import implements + from cubicweb.interfaces import ITree + from cubicweb.mixins import ITreeMixIn + + class MyEntity(ITreeMixIn, AnyEntity): + __implements__ = AnyEntity.__implements__ + (ITree,) + + + class ITreeView(EntityView): + __select__ = implements('ITree') + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + children = entity.children() + +After: + +.. sourcecode:: python + + from cubicweb.selectors import adaptable, implements + from cubicweb.entities.adapters import ITreeAdapter + + class MyEntityITreeAdapter(ITreeAdapter): + __select__ = implements('MyEntity') + + class ITreeView(EntityView): + __select__ = adaptable('ITree') + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + itree = entity.cw_adapt_to('ITree') + children = itree.children() + +As we can see, the interface/mixin duality disappears and the entity +class itself is completely freed from these concerns. When you want +to use the ITree interface of an entity, call its `cw_adapt_to` method +to get an adapter for this interface, then access to members of the +interface on the adapter + +Let's look at an example where we defined everything ourselves. We +start from: + +.. sourcecode:: python + + class IFoo(Interface): + def bar(self, *args): + raise NotImplementedError + + class MyEntity(AnyEntity): + __regid__ = 'MyEntity' + __implements__ = AnyEntity.__implements__ + (IFoo,) + + def bar(self, *args): + return sum(captain.age for captain in self.captains) + + class FooView(EntityView): + __regid__ = 'mycube.fooview' + __select__ = implements('IFoo') + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + self.w('bar: %s' % entity.bar()) + +Converting to: + +.. sourcecode:: python + + class IFooAdapter(EntityAdapter): + __regid__ = 'IFoo' + __select__ = is_instance('MyEntity') + + def bar(self, *args): + return sum(captain.age for captain in self.entity.captains) + + class FooView(EntityView): + __regid__ = 'mycube.fooview' + __select__ = adaptable('IFoo') + + def cell_call(self, row, col): + entity = self.cw_rset.get_entity(row, col) + self.w('bar: %s' % entity.cw_adapt_to('IFoo').bar()) + +.. note:: + + When migrating an entity method to an adapter, the code can be moved as is + except for the `self` of the entity class, which in the adapter must become `self.entity`. + +Adapters defined in the library +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. automodule:: cubicweb.entities.adapters + :members: + +More are defined in web/views. diff -r 00b1b6b906cf -r 97c55baefa0c doc/book/en/devrepo/entityclasses/application-logic.rst --- a/doc/book/en/devrepo/entityclasses/application-logic.rst Thu Jul 15 12:03:13 2010 +0200 +++ b/doc/book/en/devrepo/entityclasses/application-logic.rst Mon Jul 19 15:37:02 2010 +0200 @@ -1,5 +1,5 @@ -How to use entities objects ---------------------------- +How to use entities objects and adapters +---------------------------------------- The previous chapters detailed the classes and methods available to the developper at the so-called `ORM`_ level. However they say little @@ -7,9 +7,9 @@ .. _`ORM`: http://en.wikipedia.org/wiki/Object-relational_mapping -Entities objects are used in the repository and web sides of -CubicWeb. On the repository side of things, one should manipulate them -in Hooks and Operations. +Entities objects (and their adapters) are used in the repository and +web sides of CubicWeb. On the repository side of things, one should +manipulate them in Hooks and Operations. Hooks and Operations provide support for the implementation of rules such as computed attributes, coherency invariants, etc (they play the @@ -32,21 +32,22 @@ wire. There is no way state can be shared between these processes (there is a specific API for that). Hence, it is not possible to use entity objects as messengers between these components of an -application. It means that an attribute set as in `obj.x = 42`, +application. It means that an attribute set as in ``obj.x = 42``, whether or not x is actually an entity schema attribute, has a short life span, limited to the hook, operation or view within which the object was built. Setting an attribute or relation value can be done in the context of a -Hook/Operation, using the obj.set_attributes(x=42) notation or a plain +Hook/Operation, using the obj.set_relations(x=42) notation or a plain RQL SET expression. In views, it would be preferable to encapsulate the necessary logic in -a method of the concerned entity class(es). But of course, this advice -is also reasonnable for Hooks/Operations, though the separation of -concerns here is less stringent than in the case of views. +a method of an adapter for the concerned entity class(es). But of +course, this advice is also reasonnable for Hooks/Operations, though +the separation of concerns here is less stringent than in the case of +views. -This leads to the practical role of entity objects: it's where an +This leads to the practical role of objects adapters: it's where an important part of the application logic lie (the other part being located in the Hook/Operations). @@ -58,26 +59,31 @@ .. sourcecode:: python - class Project(TreeMixIn, AnyEntity): + from cubicweb.entities.adapters import ITreeAdapter + + class ProjectAdapter(ITreeAdapter): + __select__ = implements('Project') + tree_relation = 'subproject_of' + + class Project(AnyEntity): __regid__ = 'Project' - __implements__ = AnyEntity.__implements__ + (ITree,) fetch_attrs, fetch_order = fetch_config(('name', 'description', 'description_format', 'summary')) TICKET_DEFAULT_STATE_RESTR = 'S name IN ("created","identified","released","scheduled")' - tree_attribute = 'subproject_of' - parent_target = 'subject' - children_target = 'object' - def dc_title(self): return self.name -First we see that it uses an ITree interface and the TreeMixIn default -implementation. The attributes `tree_attribute`, `parent_target` and -`children_target` are used by the TreeMixIn code. This is typically -used in views concerned with the representation of tree-like -structures (CubicWeb provides several such views). +The fact that the `Project` entity type implements an ``ITree`` +interface is materialized by the ``ProjectAdapter`` class (inheriting +the pre-defined ``ITreeAdapter`` whose __regid__ is of course +``ITree``), which will be selected on `Project` entity types because +of its selector. On this adapter, we redefine the ``tree_relation`` +attribute of the ITreeAdapter class. + +This is typically used in views concerned with the representation of +tree-like structures (CubicWeb provides several such views). It is important that the views themselves try not to implement this logic, not only because such views would be hardly applyable to other @@ -89,7 +95,17 @@ about the transitive closure of the child relation). This is a further argument to implement it at entity class level. -The `dc_title` method provides a (unicode string) value likely to be +The fetch_attrs, fetch_order class attributes are parameters of the +`ORM`_ layer. They tell which attributes should be loaded at once on +entity object instantiation (by default, only the eid is known, other +attributes are loaded on demand), and which attribute is to be used to +order the .related() and .unrelated() methods output. + +We can observe the big TICKET_DEFAULT_STATE_RESTR is a pure +application domain piece of data. There is, of course, no limitation +to the amount of class attributes of this kind. + +The ``dc_title`` method provides a (unicode string) value likely to be consummed by views, but note that here we do not care about output encodings. We care about providing data in the most universal format possible, because the data could be used by a web view (which would be @@ -97,17 +113,14 @@ oriented output (which would have the necessary context about the needed byte stream encoding). -The fetch_attrs, fetch_order class attributes are parameters of the -`ORM`_ layer. They tell which attributes should be loaded at once on -entity object instantiation (by default, only the eid is known, other -attributes are loaded on demand), and which attribute is to be used to -order the .related() and .unrelated() methods output. +.. note:: -Finally, we can observe the big TICKET_DEFAULT_STATE_RESTR is a pure -application domain piece of data. There is, of course, no limitation -to the amount of class attributes of this kind. + The dublin code `dc_xxx` methods are not moved to an adapter as they + are extremely prevalent in cubicweb and assorted cubes and should be + available for all entity types. -Let us now dig into more substantial pieces of code. +Let us now dig into more substantial pieces of code, continuing the +Project class. .. sourcecode:: python @@ -151,7 +164,7 @@ * it is NOT concerned with database coherency (this is the realm of Hooks/Operations); in other words, it assumes a coherent world -* it is NOT concerned with end-user interfaces +* it is NOT (directly) concerned with end-user interfaces * however it can be used in both contexts diff -r 00b1b6b906cf -r 97c55baefa0c doc/book/en/devrepo/entityclasses/data-as-objects.rst --- a/doc/book/en/devrepo/entityclasses/data-as-objects.rst Thu Jul 15 12:03:13 2010 +0200 +++ b/doc/book/en/devrepo/entityclasses/data-as-objects.rst Mon Jul 19 15:37:02 2010 +0200 @@ -4,23 +4,22 @@ Python-level access to persistent data is provided by the :class:`Entity ` class. -An entity class is bound to a schema entity type. Descriptors are added when +.. XXX this part is not clear. refactor it. + +An entity class is bound to a schema entity type. Descriptors are added when classes are registered in order to initialize the class according to its schema: -* we can access the defined attributes in the schema thanks to the attributes of - the same name on instances (typed value) +* the attributes defined in the schema appear as attributes of these classes -* we can access the defined relations in the schema thanks to the relations of - the same name on instances (entities instances list) - +* the relations defined in the schema appear as attributes of these classes, + but are lists of instances `Formatting and output generation`: * `view(__vid, __registry='views', **kwargs)`, applies the given view to the entity (and returns an unicode string) -* `absolute_url(*args, **kwargs)`, returns an absolute URL to access the primary view - of an entity +* `absolute_url(*args, **kwargs)`, returns an absolute URL including the base-url * `rest_path()`, returns a relative REST URL to get the entity @@ -31,7 +30,7 @@ `Data handling`: * `as_rset()`, converts the entity into an equivalent result set simulating the - request `Any X WHERE X eid _eid_` + request `Any X WHERE X eid _eid_` * `complete(skip_bytes=True)`, executes a request that recovers at once all the missing attributes of an entity @@ -52,10 +51,10 @@ values given named parameters * `set_relations(**kwargs)`, add relations to the given object. To - set a relation where this entity is the object of the relation, - use `reverse_` as argument name. Values may be an - entity, a list of entities, or None (meaning that all relations of - the given type from or to this object should be deleted). + set a relation where this entity is the object of the relation, + use `reverse_` as argument name. Values may be an + entity, a list of entities, or None (meaning that all relations of + the given type from or to this object should be deleted). * `copy_relations(ceid)`, copies the relations of the entities having the eid given in the parameters on the current entity @@ -66,7 +65,7 @@ The :class:`AnyEntity` class ---------------------------- -To provide a specific behavior for each entity, we have to define a class +To provide a specific behavior for each entity, we can define a class inheriting from `cubicweb.entities.AnyEntity`. In general, we define this class in `mycube.entities` module (or in a submodule if we want to split code among multiple files) so that it will be available on both server and client side. @@ -111,7 +110,7 @@ `Misc methods`: * `after_deletion_path`, return (path, parameters) which should be - used as redirect information when this entity is being deleted + used as redirect information when this entity is being deleted * `pre_web_edit`, callback called by the web editcontroller when an entity will be created/modified, to let a chance to do some entity @@ -139,5 +138,18 @@ one in OTHER_CUBE. These types are stored in the `etype` section of the `vregistry`. -Notice this is different than yams schema inheritance. +Notice this is different than yams schema inheritance, which is an +experimental undocumented feature. + + +Application logic +----------------- +While a lot of custom behaviour and application logic can be +implemented using entity classes, the programmer must be aware that +adding new attributes and method on an entity class adds may shadow +schema-level attribute or relation definitions. + +To keep entities clean (mostly data structures plus a few universal +methods such as listed above), one should use `adapters` (see +:ref:`adapters`). diff -r 00b1b6b906cf -r 97c55baefa0c doc/book/en/devrepo/entityclasses/index.rst --- a/doc/book/en/devrepo/entityclasses/index.rst Thu Jul 15 12:03:13 2010 +0200 +++ b/doc/book/en/devrepo/entityclasses/index.rst Mon Jul 19 15:37:02 2010 +0200 @@ -9,5 +9,5 @@ data-as-objects load-sort - interfaces + adapters application-logic diff -r 00b1b6b906cf -r 97c55baefa0c doc/book/en/devrepo/entityclasses/interfaces.rst --- a/doc/book/en/devrepo/entityclasses/interfaces.rst Thu Jul 15 12:03:13 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,65 +0,0 @@ -Interfaces ----------- - -This is the same thing as object-oriented programming `interfaces`_. - -.. _`interfaces`: http://java.sun.com/docs/books/tutorial/java/concepts/interface.html - -Definition of an interface is quite trivial. An example from cubicweb -itself (found in cubicweb/interfaces.py): - -.. sourcecode:: python - - class ITree(Interface): - - def parent(self): - """returns the parent entity""" - - def children(self): - """returns the item's children""" - - def children_rql(self): - """returns RQL to get children""" - - def iterchildren(self): - """iterates over the item's children""" - - def is_leaf(self): - """returns true if this node as no child""" - - def is_root(self): - """returns true if this node has no parent""" - - def root(self): - """returns the root object""" - - -Declaration of interfaces implemented by a class -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -.. sourcecode:: python - - from cubicweb.interfaces import ITree - from cubicweb.mixins import TreeMixIn - - class MyEntity(TreeMixIn, AnyEntity): - __regid__ = 'MyEntity' - __implements__ = AnyEntity.__implements__ + ('ITree',) - - tree_attribute = 'filed_under' - -The TreeMixIn here provides a default implementation for the -interface. The tree_attribute class attribute is actually used by this -implementation to help implement correct behaviour. - -Interfaces (and some implementations as mixins) defined in the library -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -.. automodule:: cubicweb.interfaces - :members: - -.. automodule:: cubicweb.mixins - :members: - - - diff -r 00b1b6b906cf -r 97c55baefa0c doc/book/en/devrepo/profiling.rst --- a/doc/book/en/devrepo/profiling.rst Thu Jul 15 12:03:13 2010 +0200 +++ b/doc/book/en/devrepo/profiling.rst Mon Jul 19 15:37:02 2010 +0200 @@ -1,3 +1,5 @@ +.. _PROFILING: + Profiling and performance ========================= diff -r 00b1b6b906cf -r 97c55baefa0c doc/book/en/devrepo/vreg.rst --- a/doc/book/en/devrepo/vreg.rst Thu Jul 15 12:03:13 2010 +0200 +++ b/doc/book/en/devrepo/vreg.rst Mon Jul 19 15:37:02 2010 +0200 @@ -37,6 +37,7 @@ .. autoclass:: cubicweb.appobject.yes .. autoclass:: cubicweb.selectors.match_kwargs .. autoclass:: cubicweb.selectors.appobject_selectable +.. autoclass:: cubicweb.selectors.adaptable Result set selectors @@ -66,7 +67,7 @@ match or not according to entity's (instance or class) properties. .. autoclass:: cubicweb.selectors.non_final_entity -.. autoclass:: cubicweb.selectors.implements +.. autoclass:: cubicweb.selectors.is_instance .. autoclass:: cubicweb.selectors.score_entity .. autoclass:: cubicweb.selectors.rql_condition .. autoclass:: cubicweb.selectors.relation_possible @@ -75,6 +76,8 @@ .. autoclass:: cubicweb.selectors.partial_has_related_entities .. autoclass:: cubicweb.selectors.has_permission .. autoclass:: cubicweb.selectors.has_add_permission +.. autoclass:: cubicweb.selectors.has_mimetype +.. autoclass:: cubicweb.selectors.implements Logged user selectors diff -r 00b1b6b906cf -r 97c55baefa0c doc/book/en/devweb/edition/form.rst --- a/doc/book/en/devweb/edition/form.rst Thu Jul 15 12:03:13 2010 +0200 +++ b/doc/book/en/devweb/edition/form.rst Mon Jul 19 15:37:02 2010 +0200 @@ -1,3 +1,5 @@ +.. _webform: + HTML form construction ---------------------- diff -r 00b1b6b906cf -r 97c55baefa0c doc/book/en/devweb/js.rst --- a/doc/book/en/devweb/js.rst Thu Jul 15 12:03:13 2010 +0200 +++ b/doc/book/en/devweb/js.rst Mon Jul 19 15:37:02 2010 +0200 @@ -350,3 +350,47 @@ There is also javascript support for massmailing, gmap (google maps), fckcwconfig (fck editor), timeline, calendar, goa (CubicWeb over AppEngine), flot (charts drawing), tabs and bookmarks. + +API +~~~ + +.. toctree:: + :maxdepth: 1 + + js_api/index + + +Testing javascript +~~~~~~~~~~~~~~~~~~~~~~ + +You with the ``cubicweb.qunit.QUnitTestCase`` can include standard Qunit tests +inside the python unittest run . You simply have to define a new class that +inherit from ``QUnitTestCase`` and register your javascript test file in the +``all_js_tests`` lclass attribut. This ``all_js_tests`` is a sequence a +3-tuple ( ,] []): + +The should contains the qunit test. defines the list +of javascript file that must be imported before the test script. Dependencies +are included their definition order. are additional files copied in the +test directory. both and are optionnal. +``jquery.js`` is preincluded in for all test. + +.. sourcecode:: python + + from cubicweb.qunit import QUnitTestCase + + class MyQUnitTest(QUnitTestCase): + + all_js_tests = ( + ("relative/path/to/my_simple_testcase.js",) + ("relative/path/to/my_qunit_testcase.js",( + "rel/path/to/dependency_1.js", + "rel/path/to/dependency_2.js",)), + ("relative/path/to/my_complexe_qunit_testcase.js",( + "rel/path/to/dependency_1.js", + "rel/path/to/dependency_2.js", + ),( + "rel/path/file_dependency.html", + "path/file_dependency.json") + ), + ) diff -r 00b1b6b906cf -r 97c55baefa0c doc/book/en/devweb/views/breadcrumbs.rst --- a/doc/book/en/devweb/views/breadcrumbs.rst Thu Jul 15 12:03:13 2010 +0200 +++ b/doc/book/en/devweb/views/breadcrumbs.rst Mon Jul 19 15:37:02 2010 +0200 @@ -8,11 +8,11 @@ ~~~~~~~ Breadcrumbs are displayed by default in the header section (see -:ref:`the_main_template_sections`). With the default main -template, the header section is composed by the logo, the application -name, breadcrumbs and, at the most right, the login box. Breadcrumbs -are displayed just next to the application name, thus breadcrumbs -begin with a separator. +:ref:`the_main_template_sections`). With the default main template, +the header section is composed by the logo, the application name, +breadcrumbs and, at the most right, the login box. Breadcrumbs are +displayed just next to the application name, thus they begin with a +separator. Here is the header section of the CubicWeb's forge: @@ -22,29 +22,31 @@ :mod:`cubicweb.web.views.ibreadcrumbs`: - `BreadCrumbEntityVComponent`: displayed for a result set with one line - if the entity implements the ``IBreadCrumbs`` interface. + if the entity is adaptable to ``IBreadCrumbsAdapter``. - `BreadCrumbETypeVComponent`: displayed for a result set with more than - one line, but with all entities of the same type which implement the - ``IBreadCrumbs`` interface. + one line, but with all entities of the same type which can adapt to + ``IBreadCrumbsAdapter``. - `BreadCrumbAnyRSetVComponent`: displayed for any other result set. Building breadcrumbs ~~~~~~~~~~~~~~~~~~~~ -The ``IBreadCrumbs`` interface is defined in the -:mod:`cubicweb.interfaces` module. It specifies that an entity which -implements this interface must have a ``breadcrumbs`` method. +The ``IBreadCrumbsAdapter`` adapter is defined in the +:mod:`cubicweb.web.views.ibreadcrumbs` module. It specifies that an +entity which implements this interface must have a ``breadcrumbs`` and +a ``parent_entity`` method. A default implementation for each is +provided. This implementation expoits the ITreeAdapter. .. note:: Redefining the breadcrumbs is the hammer way to do it. Another way - is to define the `parent` method on an entity (as defined in the - `ITree` interface). If available, it will be used to compute - breadcrumbs. + is to define an `ITreeAdapter` adapter on an entity type. If + available, it will be used to compute breadcrumbs. -Here is the API of the ``breadcrumbs`` method: +Here is the API of the ``IBreadCrumbsAdapter`` class: -.. automethod:: cubicweb.interfaces.IBreadCrumbs.breadcrumbs +.. automethod:: cubicweb.web.views.ibreadcrumbs.IBreadCrumbs.parent_entity +.. automethod:: cubicweb.web.views.ibreadcrumbs.IBreadCrumbs.breadcrumbs If the breadcrumbs method return a list of entities, the ``cubicweb.web.views.ibreadcrumbs.BreadCrumbView`` is used to display diff -r 00b1b6b906cf -r 97c55baefa0c doc/book/en/devweb/views/index.rst --- a/doc/book/en/devweb/views/index.rst Thu Jul 15 12:03:13 2010 +0200 +++ b/doc/book/en/devweb/views/index.rst Mon Jul 19 15:37:02 2010 +0200 @@ -12,6 +12,7 @@ views basetemplates primary + reledit baseviews startup boxes diff -r 00b1b6b906cf -r 97c55baefa0c doc/book/en/devweb/views/reledit.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/book/en/devweb/views/reledit.rst Mon Jul 19 15:37:02 2010 +0200 @@ -0,0 +1,113 @@ +.. _reledit: + +The "Click and Edit" (also `reledit`) View +------------------------------------------ + +The principal way to update data through the Web UI is through the +`modify` action on entities, which brings a full form. This is +described in the :ref:`webform` chapter. + +There is however another way to perform piecewise edition of entities +and relations, using a specific `reledit` (for *relation edition*) +view from the :mod:`cubicweb.web.views.reledit` module. + +This is typically applied from the default Primary View (see +:ref:`primary_view`) on the attributes and relation section. It makes +small editions more convenient. + +Of course, this can be used customely in any other view. Here come +some explanation about its capabilities and instructions on the way to +use it. + +Using `reledit` +*************** + +Let's start again with a simple example: + +.. sourcecode:: python + + class Company(EntityType): + name = String(required=True, unique=True) + boss = SubjectRelation('Person', cardinality='1*') + status = SubjectRelation('File', cardinality='?*', composite='subject') + +In some view code we might want to show these attributes/relations and +allow the user to edit each of them in turn without having to leave +the current page. We would write code as below: + +.. sourcecode:: python + + company.view('reledit', rtype='name', default_value='') # editable name attribute + company.view('reledit', rtype='boss') # editable boss relation + company.view('reledit', rtype='status') # editable attribute-like relation + +If one wanted to edit the company from a boss's point of view, one +would have to indicate the proper relation's role. By default the role +is `subject`. + +.. sourcecode:: python + + person.view('reledit', rtype='boss', role='object') + +Each of these will provide with a different editing widget. The `name` +attribute will obviously get a text input field. The `boss` relation +will be edited through a selection box, allowing to pick another +`Person` as boss. The `status` relation, given that it defines Company +as a composite entity with one file inside, will provide additional actions + +* to `add` a `File` when there is one +* to `delete` the `File` (if the cardinality allows it) + +Moreover, editing the relation or using the `add` action leads to an +embedded edition/creation form allowing edition of the target entity +(which is `File` in our example) instead of merely allowing to choose +amongst existing files. + +The `reledit_ctrl` rtag +*********************** + +The behaviour of reledited attributes/relations can be finely +controlled using the reledit_ctrl rtag, defined in +:mod:`cubicweb.web.uicfg`. + +This rtag provides three control variables: + +* ``default_value`` +* ``reload``, to specificy if edition of the relation entails a full page + reload, which defaults to False +* ``noedit``, to explicitly inhibit edition + +Let's see how to use these controls. + +.. sourcecode:: python + + from logilab.mtconverter import xml_escape + from cubicweb.web.uicfg import reledit_ctrl + reledit_ctrl.tag_attribute(('Company', 'name'), + {'reload': lambda x:x.eid, + 'default_value': xml_escape(u'')}) + reledit_ctrl.tag_object_of(('*', 'boss', 'Person'), {'noedit': True}) + +The `default_value` needs to be an xml escaped unicode string. + +The `noedit` attribute is convenient to programmatically disable some +relation edition on views that apply it systematically (the prime +example being the primary view). Here we use it to forbid changing the +`boss` relation from a `Person` side (as it could have unwanted +effects). + +Finally, the `reload` key accepts either a boolean, an eid or an +unicode string representing an url. If an eid is provided, it will be +internally transformed into an url. The eid/url case helps when one +needs to reload and the current url is inappropriate. A common case is +edition of a key attribute, which is part of the current url. If one +user changed the Company's name from `lozilab` to `logilab`, reloading +on http://myapp/company/lozilab would fail. Providing the entity's +eid, then, forces to reload on something like http://myapp/company/42, +which always work. + + + + + + diff -r 00b1b6b906cf -r 97c55baefa0c doc/book/en/makefile --- a/doc/book/en/makefile Thu Jul 15 12:03:13 2010 +0200 +++ b/doc/book/en/makefile Mon Jul 19 15:37:02 2010 +0200 @@ -11,6 +11,10 @@ PAPER = #BUILDDIR = build BUILDDIR = ~/tmp/cwdoc +CWDIR = ../../.. +JSDIR = ${CWDIR}/web/data +JSTORST = ${CWDIR}/doc/tools/pyjsrest.py +BUILDJS = devweb/js_api # Internal variables for sphinx PAPEROPT_a4 = -D latex_paper_size=a4 @@ -18,6 +22,7 @@ ALLSPHINXOPTS = -d ${BUILDDIR}/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + .PHONY: help clean html web pickle htmlhelp latex changes linkcheck help: @@ -36,6 +41,7 @@ rm -rf apidoc/ rm -f *.html -rm -rf ${BUILDDIR}/* + -rm -rf ${BUILDJS} all: ${TARGET} apidoc html @@ -48,12 +54,16 @@ epydoc --html -o apidoc -n "cubicweb" --exclude=setup --exclude=__pkginfo__ ../../../ # run sphinx ### -html: +html: js mkdir -p ${BUILDDIR}/html ${BUILDDIR}/doctrees $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) ${BUILDDIR}/html @echo @echo "Build finished. The HTML pages are in ${BUILDDIR}/html." +js: + mkdir -p ${BUILDJS} + $(JSTORST) -p ${JSDIR} -o ${BUILDJS} + pickle: mkdir -p ${BUILDDIR}/pickle ${BUILDDIR}/doctrees $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) ${BUILDDIR}/pickle diff -r 00b1b6b906cf -r 97c55baefa0c doc/book/en/tutorials/base/maintemplate.rst --- a/doc/book/en/tutorials/base/maintemplate.rst Thu Jul 15 12:03:13 2010 +0200 +++ b/doc/book/en/tutorials/base/maintemplate.rst Mon Jul 19 15:37:02 2010 +0200 @@ -123,8 +123,8 @@ .. image:: ../../images/lax-book_06-simple-main-template_en.png -XXX -[WRITE ME] +.. XXX +.. [WRITE ME] * customize MainTemplate and show that everything in the user interface can be changed diff -r 00b1b6b906cf -r 97c55baefa0c doc/book/en/tutorials/index.rst --- a/doc/book/en/tutorials/index.rst Thu Jul 15 12:03:13 2010 +0200 +++ b/doc/book/en/tutorials/index.rst Mon Jul 19 15:37:02 2010 +0200 @@ -17,3 +17,4 @@ base/index advanced/index + tools/windmill.rst diff -r 00b1b6b906cf -r 97c55baefa0c doc/book/en/tutorials/tools/windmill.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/book/en/tutorials/tools/windmill.rst Mon Jul 19 15:37:02 2010 +0200 @@ -0,0 +1,152 @@ +========================== +Use Windmill with CubicWeb +========================== + +Windmill_ implements cross browser testing, in-browser recording and playback, +and functionality for fast accurate debugging and test environment integration. + +.. _Windmill: http://www.getwindmill.com/ + +`Online features list `_ is available. + + +Installation +============ + +Windmill +-------- + +You have to install Windmill manually for now. If you're using Debian, there is +no binary package (`yet `_). + +The simplest solution is to use a *setuptools/pip* command (for a clean +environment, take a look to the `virtualenv +`_ project as well):: + + pip install windmill + curl -O http://github.com/windmill/windmill/tarball/master + +However, the Windmill project doesn't release frequently. Our recommandation is +to used the last snapshot of the Git repository: + +.. sourcecode:: shell + + git clone git://github.com/windmill/windmill.git HEAD + cd windmill + python setup.py develop + +Install instructions are `available `_. + +Be sure to have the windmill module in your PYTHONPATH afterwards:: + + python -c "import windmill" + +X dummy +------- + +In order to reduce unecessary system load from your test machines, It's +recommended to use X dummy server for testing the Unix web clients, you need a +dummy video X driver (as xserver-xorg-video-dummy package in Debian) coupled +with a light X server as `Xvfb `_. + + The dummy driver is a special driver available with the XFree86 DDX. To use + the dummy driver, simply substitue it for your normal card driver in the + Device section of your xorg.conf configuration file. For example, if you + normally uses an ati driver, then you will have a Device section with + Driver "ati" to let the X server know that you want it to load and use the + ati driver; however, for these conformance tests, you would change that + line to Driver "dummy" and remove any other ati specific options from the + Device section. + + *From: http://www.x.org/wiki/XorgTesting* + +Then, you can run the X server with the following command : + + /usr/bin/X11/Xvfb :1 -ac -screen 0 1280x1024x8 -fbdir /tmp + + +Windmill usage +============== + +Record your use case +-------------------- + +- start your instance manually +- start Windmill_ with url site as last argument (read Usage_ or use *'-h'* + option to find required command line arguments) +- use the record button +- click on save to obtain python code of your use case +- copy the content to a new file in a *windmill* directory + +.. _Usage: http://wiki.github.com/windmill/windmill/running-tests + +If you are using firefox as client, consider the "firebug" option. + +You can refine the test by the *loadtest* windmill option: + + windmill -m firebug loadtest= + +But use the internal windmill shell to explore available commands: + + windmill -m firebug shell + +.. sourcecode:: python + + >>> load_test() + >>> run_test() + + + +Integrate Windmill tests into CubicWeb +====================================== + +Run your tests +-------------- + +You can easily run your windmill test suite through `pytest` or :mod:`unittest`. +You have to copy a *test_windmill.py* file from :mod:`web.test`. + +By default, CubicWeb will use **firefox** as the default browser and will try +to run test instance server on localhost. In the general case, You've no need +to change anything. + +Check the :class:`cubicweb.devtools.cwwindmill.CubicWebServerTC` class for server +parameters and :class:`cubicweb.devtools.cwwindmill.CubicWebWindmillUseCase` for +Windmill configuration. + +Best practises +-------------- + +Don't run another instance on the same port. You risk to silence some +regressions (test runner will automatically fail in further versions). + +Start your use case by using an assert on the expected primary url page. +Otherwise all your tests could fail without clear explanation of the used +navigation. + +In the same location of the *test_windmill.py*, create a *windmill/* with your +windmill recorded use cases. + +Then, you can launch the test series with:: + + % pytest test/test_windmill.py + +For instance, you can start CubicWeb framework use tests by:: + + % pytest web/test/test_windmill.py + + +Preferences +=========== + +A *.windmill/prefs.py* could be used to redefine default configuration values. + +.. define CubicWeb preferences in the parent test case instead with a dedicated firefox profile + +For managing browser extensions, read `advanced topic chapter +`_. + +More configuration examples could be seen in *windmill/conf/global_settings.py* +as template. + + diff -r 00b1b6b906cf -r 97c55baefa0c doc/refactoring-the-css-with-uiprops.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/refactoring-the-css-with-uiprops.rst Mon Jul 19 15:37:02 2010 +0200 @@ -0,0 +1,73 @@ +========================================= +Refactoring the CSSs with UI properties +========================================= + +Overview +========= + +Managing styles progressively became difficult in CubicWeb. The +introduction of uiprops is an attempt to fix this problem. + +The goal is to make it possible to use variables in our CSSs. + +These variables are defined or computed in the uiprops.py python file +and inserted in the CSS using the Python string interpolation syntax. + +A quick example, put in ``uiprops.py``:: + + defaultBgColor = '#eee' + +and in your css:: + + body { background-color: %(defaultBgColor)s; } + + +The good practices are: + +- define a variable in uiprops to avoid repetitions in the CSS + (colors, borders, fonts, etc.) + +- define a variable in uiprops when you need to compute values + (compute a color palette, etc.) + +The algorithm implemented in CubicWeb is the following: + +- read uiprops file while walk up the chain of cube dependencies: if + cube myblog depends on cube comment, the variables defined in myblog + will have precedence over the ones in comment + +- replace the %(varname)s in all the CSSs of all the cubes + +Keep in mind that the browser will then interpret the CSSs and apply +the standard cascading mechanism. + +FAQ +==== + +- How do I keep the old style? + + Put ``STYLESHEET = [data('cubicweb.old.css')]`` in your uiprops.py + file and think about something else. + +- What are the changes in cubicweb.css? + + Version 3.9.0 of cubicweb changed the following in the default html + markup and css: + + =============== ================================== + old new + =============== ================================== + .navcol #navColumnLeft, #navColumnRight + #contentcol #contentColumn + .footer #footer + .logo #logo + .simpleMessage .loginMessage + .appMsg (styles are removed from css) + .searchMessage (styles are removed from css) + =============== ================================== + + Introduction of the new cubicweb.reset.css based on Eric Meyer's + reset css. + + Lots of margin, padding, etc. + diff -r 00b1b6b906cf -r 97c55baefa0c doc/tools/pyjsrest.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/tools/pyjsrest.py Mon Jul 19 15:37:02 2010 +0200 @@ -0,0 +1,110 @@ +#!/usr/bin/env python +""" +Parser for Javascript comments. +""" +from __future__ import with_statement + +import sys, os, getopt, re + +def clean_comment(match): + comment = match.group() + comment = strip_stars(comment) + return comment + +# Rest utilities +def rest_title(title, level, level_markups=['=', '=', '-', '~', '+', '`']): + size = len(title) + if level == 0: + return '\n'.join((level_markups[level] * size, title, level_markups[0] * size)) + '\n' + return '\n'.join(('\n' + title, level_markups[level] * size)) + '\n' + +def get_doc_comments(text): + """ + Return a list of all documentation comments in the file text. Each + comment is a pair, with the first element being the comment text and + the second element being the line after it, which may be needed to + guess function & arguments. + + >>> get_doc_comments(read_file('examples/module.js'))[0][0][:40] + '/**\n * This is the module documentation.' + >>> get_doc_comments(read_file('examples/module.js'))[1][0][7:50] + 'This is documentation for the first method.' + >>> get_doc_comments(read_file('examples/module.js'))[1][1] + 'function the_first_function(arg1, arg2) ' + >>> get_doc_comments(read_file('examples/module.js'))[2][0] + '/** This is the documentation for the second function. */' + + """ + return [clean_comment(match) for match in re.finditer('/\*\*.*?\*/', + text, re.DOTALL|re.MULTILINE)] + +RE_STARS = re.compile('^\s*?\* ?', re.MULTILINE) + + +def strip_stars(doc_comment): + """ + Strip leading stars from a doc comment. + + >>> strip_stars('/** This is a comment. */') + 'This is a comment.' + >>> strip_stars('/**\n * This is a\n * multiline comment. */') + 'This is a\n multiline comment.' + >>> strip_stars('/** \n\t * This is a\n\t * multiline comment. \n*/') + 'This is a\n multiline comment.' + + """ + return RE_STARS.sub('', doc_comment[3:-2]).strip() + +def parse_js_files(args=sys.argv): + """ + Main command-line invocation. + """ + try: + opts, args = getopt.gnu_getopt(args[1:], 'p:o:h', [ + 'jspath=', 'output=', 'help']) + opts = dict(opts) + except getopt.GetoptError: + usage() + sys.exit(2) + + rst_dir = opts.get('--output') or opts.get('-o') + if rst_dir is None and len(args) != 1: + rst_dir = 'apidocs' + js_dir = opts.get('--jspath') or opts.get('-p') + if not os.path.exists(os.path.join(rst_dir)): + os.makedirs(os.path.join(rst_dir)) + + f_index = open(os.path.join(rst_dir, 'index.rst'), 'wb') + f_index.write(''' +.. toctree:: + :maxdepth: 1 + +''' +) + for js_path, js_dirs, js_files in os.walk(js_dir): + rst_path = re.sub('%s%s*' % (js_dir, os.path.sep), '', js_path) + for js_file in js_files: + if not js_file.endswith('.js'): + continue + if not os.path.exists(os.path.join(rst_dir, rst_path)): + os.makedirs(os.path.join(rst_dir, rst_path)) + rst_content = extract_rest(js_path, js_file) + filename = os.path.join(rst_path, js_file[:-3]) + # add to index + f_index.write(' %s\n' % filename) + # save rst file + with open(os.path.join(rst_dir, filename) + '.rst', 'wb') as f_rst: + f_rst.write(rst_content) + f_index.close() + +def extract_rest(js_dir, js_file): + js_filepath = os.path.join(js_dir, js_file) + filecontent = open(js_filepath, 'U').read() + comments = get_doc_comments(filecontent) + rst = rest_title(js_file, 0) + rst += '.. module:: %s\n\n' % js_file + rst += '\n\n'.join(comments) + return rst + +if __name__ == '__main__': + parse_js_files() diff -r 00b1b6b906cf -r 97c55baefa0c entities/__init__.py --- a/entities/__init__.py Thu Jul 15 12:03:13 2010 +0200 +++ b/entities/__init__.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""base application's entities class implementation: `AnyEntity` +"""base application's entities class implementation: `AnyEntity`""" -""" __docformat__ = "restructuredtext en" from warnings import warn @@ -28,33 +27,13 @@ from cubicweb import Unauthorized, typed_eid from cubicweb.entity import Entity -from cubicweb.interfaces import IBreadCrumbs, IFeed - class AnyEntity(Entity): """an entity instance has e_schema automagically set on the class and instances have access to their issuing cursor """ __regid__ = 'Any' - __implements__ = (IBreadCrumbs, IFeed) - - fetch_attrs = ('modification_date',) - @classmethod - def fetch_order(cls, attr, var): - """class method used to control sort order when multiple entities of - this type are fetched - """ - return cls.fetch_unrelated_order(attr, var) - - @classmethod - def fetch_unrelated_order(cls, attr, var): - """class method used to control sort order when multiple entities of - this type are fetched to use in edition (eg propose them to create a - new relation on an edited entity). - """ - if attr == 'modification_date': - return '%s DESC' % var - return None + __implements__ = () # meta data api ########################################################### @@ -63,7 +42,7 @@ for rschema, attrschema in self.e_schema.attribute_definitions(): if rschema.meta: continue - value = self.get_value(rschema.type) + value = self.cw_attr_value(rschema.type) if value: # make the value printable (dates, floats, bytes, etc.) return self.printable_value(rschema.type, value, attrschema.type, @@ -120,32 +99,6 @@ except (Unauthorized, IndexError): return None - def breadcrumbs(self, view=None, recurs=False): - path = [self] - if hasattr(self, 'parent'): - parent = self.parent() - if parent is not None: - try: - path = parent.breadcrumbs(view, True) + [self] - except TypeError: - warn("breadcrumbs method's now takes two arguments " - "(view=None, recurs=False), please update", - DeprecationWarning) - path = parent.breadcrumbs(view) + [self] - if not recurs: - if view is None: - if 'vtitle' in self._cw.form: - # embeding for instance - path.append( self._cw.form['vtitle'] ) - elif view.__regid__ != 'primary' and hasattr(view, 'title'): - path.append( self._cw._(view.title) ) - return path - - ## IFeed interface ######################################################## - - def rss_feed_url(self): - return self.absolute_url(vid='rss') - # abstractions making the whole things (well, some at least) working ###### def sortvalue(self, rtype=None): @@ -154,7 +107,7 @@ """ if rtype is None: return self.dc_title().lower() - value = self.get_value(rtype) + value = self.cw_attr_value(rtype) # do not restrict to `unicode` because Bytes will return a `str` value if isinstance(value, basestring): return self.printable_value(rtype, format='text/plain').lower() @@ -189,35 +142,8 @@ self.__linkto[(rtype, role)] = linkedto return linkedto - # edit controller callbacks ############################################### - - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - if hasattr(self, 'parent') and self.parent(): - return self.parent().rest_path(), {} - return str(self.e_schema).lower(), {} - - def pre_web_edit(self): - """callback called by the web editcontroller when an entity will be - created/modified, to let a chance to do some entity specific stuff. - - Do nothing by default. - """ - pass - # server side helpers ##################################################### - def notification_references(self, view): - """used to control References field of email send on notification - for this entity. `view` is the notification view. - - Should return a list of eids which can be used to generate message ids - of previously sent email - """ - return () - # XXX: store a reference to the AnyEntity class since it is hijacked in goa # configuration and we need the actual reference to avoid infinite loops # in mro diff -r 00b1b6b906cf -r 97c55baefa0c entities/adapters.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/entities/adapters.py Mon Jul 19 15:37:02 2010 +0200 @@ -0,0 +1,443 @@ +# copyright 2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""some basic entity adapter implementations, for interfaces used in the +framework itself. +""" + +__docformat__ = "restructuredtext en" + +from itertools import chain +from warnings import warn + +from logilab.mtconverter import TransformError +from logilab.common.decorators import cached + +from cubicweb.view import EntityAdapter, implements_adapter_compat +from cubicweb.selectors import implements, is_instance, relation_possible +from cubicweb.interfaces import IDownloadable, ITree, IProgress, IMileStone + + +class IEmailableAdapter(EntityAdapter): + __regid__ = 'IEmailable' + __select__ = relation_possible('primary_email') | relation_possible('use_email') + + def get_email(self): + if getattr(self.entity, 'primary_email', None): + return self.entity.primary_email[0].address + if getattr(self.entity, 'use_email', None): + return self.entity.use_email[0].address + return None + + def allowed_massmail_keys(self): + """returns a set of allowed email substitution keys + + The default is to return the entity's attribute list but you might + override this method to allow extra keys. For instance, a Person + class might want to return a `companyname` key. + """ + return set(rschema.type + for rschema, attrtype in self.entity.e_schema.attribute_definitions() + if attrtype.type not in ('Password', 'Bytes')) + + def as_email_context(self): + """returns the dictionary as used by the sendmail controller to + build email bodies. + + NOTE: the dictionary keys should match the list returned by the + `allowed_massmail_keys` method. + """ + return dict( (attr, getattr(self.entity, attr)) + for attr in self.allowed_massmail_keys() ) + + +class INotifiableAdapter(EntityAdapter): + __regid__ = 'INotifiable' + __select__ = is_instance('Any') + + @implements_adapter_compat('INotifiableAdapter') + def notification_references(self, view): + """used to control References field of email send on notification + for this entity. `view` is the notification view. + + Should return a list of eids which can be used to generate message + identifiers of previously sent email(s) + """ + itree = self.entity.cw_adapt_to('ITree') + if itree is not None: + return itree.path()[:-1] + return () + + +class IFTIndexableAdapter(EntityAdapter): + __regid__ = 'IFTIndexable' + __select__ = is_instance('Any') + + def fti_containers(self, _done=None): + if _done is None: + _done = set() + entity = self.entity + _done.add(entity.eid) + containers = tuple(entity.e_schema.fulltext_containers()) + if containers: + for rschema, target in containers: + if target == 'object': + targets = getattr(entity, rschema.type) + else: + targets = getattr(entity, 'reverse_%s' % rschema) + for entity in targets: + if entity.eid in _done: + continue + for container in entity.cw_adapt_to('IFTIndexable').fti_containers(_done): + yield container + yielded = True + else: + yield entity + + # weight in ABCD + entity_weight = 1.0 + attr_weight = {} + + def get_words(self): + """used by the full text indexer to get words to index + + this method should only be used on the repository side since it depends + on the logilab.database package + + :rtype: list + :return: the list of indexable word of this entity + """ + from logilab.database.fti import tokenize + # take care to cases where we're modyfying the schema + entity = self.entity + pending = self._cw.transaction_data.setdefault('pendingrdefs', set()) + words = {} + for rschema in entity.e_schema.indexable_attributes(): + if (entity.e_schema, rschema) in pending: + continue + weight = self.attr_weight.get(rschema, 'C') + try: + value = entity.printable_value(rschema, format='text/plain') + except TransformError: + continue + except: + self.exception("can't add value of %s to text index for entity %s", + rschema, entity.eid) + continue + if value: + words.setdefault(weight, []).extend(tokenize(value)) + for rschema, role in entity.e_schema.fulltext_relations(): + if role == 'subject': + for entity_ in getattr(entity, rschema.type): + merge_weight_dict(words, entity_.cw_adapt_to('IFTIndexable').get_words()) + else: # if role == 'object': + for entity_ in getattr(entity, 'reverse_%s' % rschema.type): + merge_weight_dict(words, entity_.cw_adapt_to('IFTIndexable').get_words()) + return words + +def merge_weight_dict(maindict, newdict): + for weight, words in newdict.iteritems(): + maindict.setdefault(weight, []).extend(words) + +class IDownloadableAdapter(EntityAdapter): + """interface for downloadable entities""" + __regid__ = 'IDownloadable' + __select__ = implements(IDownloadable, warn=False) # XXX for bw compat, else should be abstract + + @implements_adapter_compat('IDownloadable') + def download_url(self, **kwargs): # XXX not really part of this interface + """return an url to download entity's content""" + raise NotImplementedError + @implements_adapter_compat('IDownloadable') + def download_content_type(self): + """return MIME type of the downloadable content""" + raise NotImplementedError + @implements_adapter_compat('IDownloadable') + def download_encoding(self): + """return encoding of the downloadable content""" + raise NotImplementedError + @implements_adapter_compat('IDownloadable') + def download_file_name(self): + """return file name of the downloadable content""" + raise NotImplementedError + @implements_adapter_compat('IDownloadable') + def download_data(self): + """return actual data of the downloadable content""" + raise NotImplementedError + + +class ITreeAdapter(EntityAdapter): + """This adapter has to be overriden to be configured using the + tree_relation, child_role and parent_role class attributes to + benefit from this default implementation + """ + __regid__ = 'ITree' + __select__ = implements(ITree, warn=False) # XXX for bw compat, else should be abstract + + child_role = 'subject' + parent_role = 'object' + + @property + def tree_relation(self): + warn('[3.9] tree_attribute is deprecated, define tree_relation on a custom ' + 'ITree for %s instead' % (self.entity.__class__), + DeprecationWarning) + return self.entity.tree_attribute + + @implements_adapter_compat('ITree') + def children_rql(self): + """returns RQL to get children + + XXX should be removed from the public interface + """ + return self.entity.cw_related_rql(self.tree_relation, self.parent_role) + + @implements_adapter_compat('ITree') + def different_type_children(self, entities=True): + """return children entities of different type as this entity. + + according to the `entities` parameter, return entity objects or the + equivalent result set + """ + res = self.entity.related(self.tree_relation, self.parent_role, + entities=entities) + eschema = self.entity.e_schema + if entities: + return [e for e in res if e.e_schema != eschema] + return res.filtered_rset(lambda x: x.e_schema != eschema, self.entity.cw_col) + + @implements_adapter_compat('ITree') + def same_type_children(self, entities=True): + """return children entities of the same type as this entity. + + according to the `entities` parameter, return entity objects or the + equivalent result set + """ + res = self.entity.related(self.tree_relation, self.parent_role, + entities=entities) + eschema = self.entity.e_schema + if entities: + return [e for e in res if e.e_schema == eschema] + return res.filtered_rset(lambda x: x.e_schema is eschema, self.entity.cw_col) + + @implements_adapter_compat('ITree') + def is_leaf(self): + """returns true if this node as no child""" + return len(self.children()) == 0 + + @implements_adapter_compat('ITree') + def is_root(self): + """returns true if this node has no parent""" + return self.parent() is None + + @implements_adapter_compat('ITree') + def root(self): + """return the root object""" + return self._cw.entity_from_eid(self.path()[0]) + + @implements_adapter_compat('ITree') + def parent(self): + """return the parent entity if any, else None (e.g. if we are on the + root) + """ + try: + return self.entity.related(self.tree_relation, self.child_role, + entities=True)[0] + except (KeyError, IndexError): + return None + + @implements_adapter_compat('ITree') + def children(self, entities=True, sametype=False): + """return children entities + + according to the `entities` parameter, return entity objects or the + equivalent result set + """ + if sametype: + return self.same_type_children(entities) + else: + return self.entity.related(self.tree_relation, self.parent_role, + entities=entities) + + @implements_adapter_compat('ITree') + def iterparents(self, strict=True): + def _uptoroot(self): + curr = self + while True: + curr = curr.parent() + if curr is None: + break + yield curr + curr = curr.cw_adapt_to('ITree') + if not strict: + return chain([self.entity], _uptoroot(self)) + return _uptoroot(self) + + @implements_adapter_compat('ITree') + def iterchildren(self, _done=None): + """iterates over the item's children""" + if _done is None: + _done = set() + for child in self.children(): + if child.eid in _done: + self.error('loop in %s tree', child.__regid__.lower()) + continue + yield child + _done.add(child.eid) + + @implements_adapter_compat('ITree') + def prefixiter(self, _done=None): + if _done is None: + _done = set() + if self.entity.eid in _done: + return + _done.add(self.entity.eid) + yield self.entity + for child in self.same_type_children(): + for entity in child.cw_adapt_to('ITree').prefixiter(_done): + yield entity + + @cached + @implements_adapter_compat('ITree') + def path(self): + """returns the list of eids from the root object to this object""" + path = [] + adapter = self + entity = adapter.entity + while entity is not None: + if entity.eid in path: + self.error('loop in %s tree', entity.__regid__.lower()) + break + path.append(entity.eid) + try: + # check we are not jumping to another tree + if (adapter.tree_relation != self.tree_relation or + adapter.child_role != self.child_role): + break + entity = adapter.parent() + adapter = entity.cw_adapt_to('ITree') + except AttributeError: + break + path.reverse() + return path + + +class IProgressAdapter(EntityAdapter): + """something that has a cost, a state and a progression. + + You should at least override progress_info an in_progress methods on concret + implementations. + """ + __regid__ = 'IProgress' + __select__ = implements(IProgress, warn=False) # XXX for bw compat, should be abstract + + @property + @implements_adapter_compat('IProgress') + def cost(self): + """the total cost""" + return self.progress_info()['estimated'] + + @property + @implements_adapter_compat('IProgress') + def revised_cost(self): + return self.progress_info().get('estimatedcorrected', self.cost) + + @property + @implements_adapter_compat('IProgress') + def done(self): + """what is already done""" + return self.progress_info()['done'] + + @property + @implements_adapter_compat('IProgress') + def todo(self): + """what remains to be done""" + return self.progress_info()['todo'] + + @implements_adapter_compat('IProgress') + def progress_info(self): + """returns a dictionary describing progress/estimated cost of the + version. + + - mandatory keys are (''estimated', 'done', 'todo') + + - optional keys are ('notestimated', 'notestimatedcorrected', + 'estimatedcorrected') + + 'noestimated' and 'notestimatedcorrected' should default to 0 + 'estimatedcorrected' should default to 'estimated' + """ + raise NotImplementedError + + @implements_adapter_compat('IProgress') + def finished(self): + """returns True if status is finished""" + return not self.in_progress() + + @implements_adapter_compat('IProgress') + def in_progress(self): + """returns True if status is not finished""" + raise NotImplementedError + + @implements_adapter_compat('IProgress') + def progress(self): + """returns the % progress of the task item""" + try: + return 100. * self.done / self.revised_cost + except ZeroDivisionError: + # total cost is 0 : if everything was estimated, task is completed + if self.progress_info().get('notestimated'): + return 0. + return 100 + + @implements_adapter_compat('IProgress') + def progress_class(self): + return '' + + +class IMileStoneAdapter(IProgressAdapter): + __regid__ = 'IMileStone' + __select__ = implements(IMileStone, warn=False) # XXX for bw compat, should be abstract + + parent_type = None # specify main task's type + + @implements_adapter_compat('IMileStone') + def get_main_task(self): + """returns the main ITask entity""" + raise NotImplementedError + + @implements_adapter_compat('IMileStone') + def initial_prevision_date(self): + """returns the initial expected end of the milestone""" + raise NotImplementedError + + @implements_adapter_compat('IMileStone') + def eta_date(self): + """returns expected date of completion based on what remains + to be done + """ + raise NotImplementedError + + @implements_adapter_compat('IMileStone') + def completion_date(self): + """returns date on which the subtask has been completed""" + raise NotImplementedError + + @implements_adapter_compat('IMileStone') + def contractors(self): + """returns the list of persons supposed to work on this task""" + raise NotImplementedError diff -r 00b1b6b906cf -r 97c55baefa0c entities/authobjs.py --- a/entities/authobjs.py Thu Jul 15 12:03:13 2010 +0200 +++ b/entities/authobjs.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""entity classes user and group entities +"""entity classes user and group entities""" -""" __docformat__ = "restructuredtext en" from logilab.common.decorators import cached diff -r 00b1b6b906cf -r 97c55baefa0c entities/lib.py --- a/entities/lib.py Thu Jul 15 12:03:13 2010 +0200 +++ b/entities/lib.py Mon Jul 19 15:37:02 2010 +0200 @@ -48,13 +48,13 @@ @property def email_of(self): - return self.reverse_use_email and self.reverse_use_email[0] + return self.reverse_use_email and self.reverse_use_email[0] or None @property def prefered(self): return self.prefered_form and self.prefered_form[0] or self - @deprecated('use .prefered') + @deprecated('[3.6] use .prefered') def canonical_form(self): return self.prefered_form and self.prefered_form[0] or self @@ -89,14 +89,6 @@ return self.display_address() return super(EmailAddress, self).printable_value(attr, value, attrtype, format) - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - if self.email_of: - return self.email_of.rest_path(), {} - return super(EmailAddress, self).after_deletion_path() - class Bookmark(AnyEntity): """customized class for Bookmark entities""" @@ -133,12 +125,6 @@ except UnknownProperty: return u'' - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - return 'view', {} - class CWCache(AnyEntity): """Cache""" diff -r 00b1b6b906cf -r 97c55baefa0c entities/schemaobjs.py --- a/entities/schemaobjs.py Thu Jul 15 12:03:13 2010 +0200 +++ b/entities/schemaobjs.py Mon Jul 19 15:37:02 2010 +0200 @@ -115,14 +115,6 @@ scard, self.relation_type[0].name, ocard, self.to_entity[0].name) - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - if self.relation_type: - return self.relation_type[0].rest_path(), {} - return super(CWRelation, self).after_deletion_path() - @property def rtype(self): return self.relation_type[0] @@ -139,6 +131,7 @@ rschema = self._cw.vreg.schema.rschema(self.rtype.name) return rschema.rdefs[(self.stype.name, self.otype.name)] + class CWAttribute(CWRelation): __regid__ = 'CWAttribute' @@ -160,14 +153,6 @@ def dc_title(self): return '%s(%s)' % (self.cstrtype[0].name, self.value or u'') - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - if self.reverse_constrained_by: - return self.reverse_constrained_by[0].rest_path(), {} - return super(CWConstraint, self).after_deletion_path() - @property def type(self): return self.cstrtype[0].name @@ -201,14 +186,6 @@ def check_expression(self, *args, **kwargs): return self._rqlexpr().check(*args, **kwargs) - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - if self.expression_of: - return self.expression_of.rest_path(), {} - return super(RQLExpression, self).after_deletion_path() - class CWPermission(AnyEntity): __regid__ = 'CWPermission' @@ -218,12 +195,3 @@ if self.label: return '%s (%s)' % (self._cw._(self.name), self.label) return self._cw._(self.name) - - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - permissionof = getattr(self, 'reverse_require_permission', ()) - if len(permissionof) == 1: - return permissionof[0].rest_path(), {} - return super(CWPermission, self).after_deletion_path() diff -r 00b1b6b906cf -r 97c55baefa0c entities/test/unittest_base.py --- a/entities/test/unittest_base.py Thu Jul 15 12:03:13 2010 +0200 +++ b/entities/test/unittest_base.py Mon Jul 19 15:37:02 2010 +0200 @@ -27,7 +27,7 @@ from cubicweb.devtools.testlib import CubicWebTC from cubicweb import ValidationError -from cubicweb.interfaces import IMileStone, IWorkflowable +from cubicweb.interfaces import IMileStone, ICalendarable from cubicweb.entities import AnyEntity @@ -106,7 +106,7 @@ def test_allowed_massmail_keys(self): e = self.execute('CWUser U WHERE U login "member"').get_entity(0, 0) # Bytes/Password attributes should be omited - self.assertEquals(e.allowed_massmail_keys(), + self.assertEquals(e.cw_adapt_to('IEmailable').allowed_massmail_keys(), set(('surname', 'firstname', 'login', 'last_login_time', 'creation_date', 'modification_date', 'cwuri', 'eid')) ) @@ -115,8 +115,9 @@ class InterfaceTC(CubicWebTC): def test_nonregr_subclasses_and_mixins_interfaces(self): + from cubicweb.entities.wfobjs import WorkflowableMixIn + WorkflowableMixIn.__implements__ = (ICalendarable,) CWUser = self.vreg['etypes'].etype_class('CWUser') - self.failUnless(implements(CWUser, IWorkflowable)) class MyUser(CWUser): __implements__ = (IMileStone,) self.vreg._loadedmods[__name__] = {} @@ -126,10 +127,10 @@ # a copy is done systematically self.failUnless(issubclass(MyUser_, MyUser)) self.failUnless(implements(MyUser_, IMileStone)) - self.failUnless(implements(MyUser_, IWorkflowable)) + self.failUnless(implements(MyUser_, ICalendarable)) # original class should not have beed modified, only the copy self.failUnless(implements(MyUser, IMileStone)) - self.failIf(implements(MyUser, IWorkflowable)) + self.failIf(implements(MyUser, ICalendarable)) class SpecializedEntityClassesTC(CubicWebTC): diff -r 00b1b6b906cf -r 97c55baefa0c entities/test/unittest_wfobjs.py --- a/entities/test/unittest_wfobjs.py Thu Jul 15 12:03:13 2010 +0200 +++ b/entities/test/unittest_wfobjs.py Mon Jul 19 15:37:02 2010 +0200 @@ -100,35 +100,38 @@ def test_workflow_base(self): e = self.create_user('toto') - self.assertEquals(e.state, 'activated') - e.change_state('deactivated', u'deactivate 1') + iworkflowable = e.cw_adapt_to('IWorkflowable') + self.assertEquals(iworkflowable.state, 'activated') + iworkflowable.change_state('deactivated', u'deactivate 1') self.commit() - e.change_state('activated', u'activate 1') + iworkflowable.change_state('activated', u'activate 1') self.commit() - e.change_state('deactivated', u'deactivate 2') + iworkflowable.change_state('deactivated', u'deactivate 2') self.commit() - e.clear_related_cache('wf_info_for', 'object') + e.cw_clear_relation_cache('wf_info_for', 'object') self.assertEquals([tr.comment for tr in e.reverse_wf_info_for], ['deactivate 1', 'activate 1', 'deactivate 2']) - self.assertEquals(e.latest_trinfo().comment, 'deactivate 2') + self.assertEquals(iworkflowable.latest_trinfo().comment, 'deactivate 2') def test_possible_transitions(self): user = self.execute('CWUser X').get_entity(0, 0) - trs = list(user.possible_transitions()) + iworkflowable = user.cw_adapt_to('IWorkflowable') + trs = list(iworkflowable.possible_transitions()) self.assertEquals(len(trs), 1) self.assertEquals(trs[0].name, u'deactivate') self.assertEquals(trs[0].destination(None).name, u'deactivated') # test a std user get no possible transition cnx = self.login('member') # fetch the entity using the new session - trs = list(cnx.user().possible_transitions()) + trs = list(cnx.user().cw_adapt_to('IWorkflowable').possible_transitions()) self.assertEquals(len(trs), 0) def _test_manager_deactivate(self, user): - user.clear_related_cache('in_state', 'subject') + iworkflowable = user.cw_adapt_to('IWorkflowable') + user.cw_clear_relation_cache('in_state', 'subject') self.assertEquals(len(user.in_state), 1) - self.assertEquals(user.state, 'deactivated') - trinfo = user.latest_trinfo() + self.assertEquals(iworkflowable.state, 'deactivated') + trinfo = iworkflowable.latest_trinfo() self.assertEquals(trinfo.previous_state.name, 'activated') self.assertEquals(trinfo.new_state.name, 'deactivated') self.assertEquals(trinfo.comment, 'deactivate user') @@ -137,7 +140,8 @@ def test_change_state(self): user = self.user() - user.change_state('deactivated', comment=u'deactivate user') + iworkflowable = user.cw_adapt_to('IWorkflowable') + iworkflowable.change_state('deactivated', comment=u'deactivate user') trinfo = self._test_manager_deactivate(user) self.assertEquals(trinfo.transition, None) @@ -154,33 +158,36 @@ def test_fire_transition(self): user = self.user() - user.fire_transition('deactivate', comment=u'deactivate user') + iworkflowable = user.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate', comment=u'deactivate user') user.clear_all_caches() - self.assertEquals(user.state, 'deactivated') + self.assertEquals(iworkflowable.state, 'deactivated') self._test_manager_deactivate(user) trinfo = self._test_manager_deactivate(user) self.assertEquals(trinfo.transition.name, 'deactivate') def test_goback_transition(self): - wf = self.session.user.current_workflow + wf = self.session.user.cw_adapt_to('IWorkflowable').current_workflow asleep = wf.add_state('asleep') - wf.add_transition('rest', (wf.state_by_name('activated'), wf.state_by_name('deactivated')), - asleep) + wf.add_transition('rest', (wf.state_by_name('activated'), + wf.state_by_name('deactivated')), + asleep) wf.add_transition('wake up', asleep) user = self.create_user('stduser') - user.fire_transition('rest') + iworkflowable = user.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('rest') self.commit() - user.fire_transition('wake up') + iworkflowable.fire_transition('wake up') self.commit() - self.assertEquals(user.state, 'activated') - user.fire_transition('deactivate') + self.assertEquals(iworkflowable.state, 'activated') + iworkflowable.fire_transition('deactivate') self.commit() - user.fire_transition('rest') + iworkflowable.fire_transition('rest') self.commit() - user.fire_transition('wake up') + iworkflowable.fire_transition('wake up') self.commit() user.clear_all_caches() - self.assertEquals(user.state, 'deactivated') + self.assertEquals(iworkflowable.state, 'deactivated') # XXX test managers can change state without matching transition @@ -189,18 +196,18 @@ self.create_user('tutu') cnx = self.login('tutu') req = self.request() - member = req.entity_from_eid(self.member.eid) + iworkflowable = req.entity_from_eid(self.member.eid).cw_adapt_to('IWorkflowable') ex = self.assertRaises(ValidationError, - member.fire_transition, 'deactivate') + iworkflowable.fire_transition, 'deactivate') self.assertEquals(ex.errors, {'by_transition-subject': "transition may not be fired"}) cnx.close() cnx = self.login('member') req = self.request() - member = req.entity_from_eid(self.member.eid) - member.fire_transition('deactivate') + iworkflowable = req.entity_from_eid(self.member.eid).cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') cnx.commit() ex = self.assertRaises(ValidationError, - member.fire_transition, 'activate') + iworkflowable.fire_transition, 'activate') self.assertEquals(ex.errors, {'by_transition-subject': "transition may not be fired"}) def test_fire_transition_owned_by(self): @@ -250,43 +257,44 @@ [(swfstate2, state2), (swfstate3, state3)]) self.assertEquals(swftr1.destination(None).eid, swfstate1.eid) # workflows built, begin test - self.group = self.request().create_entity('CWGroup', name=u'grp1') + group = self.request().create_entity('CWGroup', name=u'grp1') self.commit() - self.assertEquals(self.group.current_state.eid, state1.eid) - self.assertEquals(self.group.current_workflow.eid, mwf.eid) - self.assertEquals(self.group.main_workflow.eid, mwf.eid) - self.assertEquals(self.group.subworkflow_input_transition(), None) - self.group.fire_transition('swftr1', u'go') + iworkflowable = group.cw_adapt_to('IWorkflowable') + self.assertEquals(iworkflowable.current_state.eid, state1.eid) + self.assertEquals(iworkflowable.current_workflow.eid, mwf.eid) + self.assertEquals(iworkflowable.main_workflow.eid, mwf.eid) + self.assertEquals(iworkflowable.subworkflow_input_transition(), None) + iworkflowable.fire_transition('swftr1', u'go') self.commit() - self.group.clear_all_caches() - self.assertEquals(self.group.current_state.eid, swfstate1.eid) - self.assertEquals(self.group.current_workflow.eid, swf.eid) - self.assertEquals(self.group.main_workflow.eid, mwf.eid) - self.assertEquals(self.group.subworkflow_input_transition().eid, swftr1.eid) - self.group.fire_transition('tr1', u'go') + group.clear_all_caches() + self.assertEquals(iworkflowable.current_state.eid, swfstate1.eid) + self.assertEquals(iworkflowable.current_workflow.eid, swf.eid) + self.assertEquals(iworkflowable.main_workflow.eid, mwf.eid) + self.assertEquals(iworkflowable.subworkflow_input_transition().eid, swftr1.eid) + iworkflowable.fire_transition('tr1', u'go') self.commit() - self.group.clear_all_caches() - self.assertEquals(self.group.current_state.eid, state2.eid) - self.assertEquals(self.group.current_workflow.eid, mwf.eid) - self.assertEquals(self.group.main_workflow.eid, mwf.eid) - self.assertEquals(self.group.subworkflow_input_transition(), None) + group.clear_all_caches() + self.assertEquals(iworkflowable.current_state.eid, state2.eid) + self.assertEquals(iworkflowable.current_workflow.eid, mwf.eid) + self.assertEquals(iworkflowable.main_workflow.eid, mwf.eid) + self.assertEquals(iworkflowable.subworkflow_input_transition(), None) # force back to swfstate1 is impossible since we can't any more find # subworkflow input transition ex = self.assertRaises(ValidationError, - self.group.change_state, swfstate1, u'gadget') + iworkflowable.change_state, swfstate1, u'gadget') self.assertEquals(ex.errors, {'to_state-subject': "state doesn't belong to entity's workflow"}) self.rollback() # force back to state1 - self.group.change_state('state1', u'gadget') - self.group.fire_transition('swftr1', u'au') - self.group.clear_all_caches() - self.group.fire_transition('tr2', u'chapeau') + iworkflowable.change_state('state1', u'gadget') + iworkflowable.fire_transition('swftr1', u'au') + group.clear_all_caches() + iworkflowable.fire_transition('tr2', u'chapeau') self.commit() - self.group.clear_all_caches() - self.assertEquals(self.group.current_state.eid, state3.eid) - self.assertEquals(self.group.current_workflow.eid, mwf.eid) - self.assertEquals(self.group.main_workflow.eid, mwf.eid) - self.assertListEquals(parse_hist(self.group.workflow_history), + group.clear_all_caches() + self.assertEquals(iworkflowable.current_state.eid, state3.eid) + self.assertEquals(iworkflowable.current_workflow.eid, mwf.eid) + self.assertEquals(iworkflowable.main_workflow.eid, mwf.eid) + self.assertListEquals(parse_hist(iworkflowable.workflow_history), [('state1', 'swfstate1', 'swftr1', 'go'), ('swfstate1', 'swfstate2', 'tr1', 'go'), ('swfstate2', 'state2', 'swftr1', 'exiting from subworkflow subworkflow'), @@ -337,8 +345,9 @@ self.commit() group = self.request().create_entity('CWGroup', name=u'grp1') self.commit() + iworkflowable = group.cw_adapt_to('IWorkflowable') for trans in ('identify', 'release', 'close'): - group.fire_transition(trans) + iworkflowable.fire_transition(trans) self.commit() @@ -362,6 +371,7 @@ self.commit() group = self.request().create_entity('CWGroup', name=u'grp1') self.commit() + iworkflowable = group.cw_adapt_to('IWorkflowable') for trans, nextstate in (('identify', 'xsigning'), ('xabort', 'created'), ('identify', 'xsigning'), @@ -369,10 +379,10 @@ ('release', 'xsigning'), ('xabort', 'identified') ): - group.fire_transition(trans) + iworkflowable.fire_transition(trans) self.commit() group.clear_all_caches() - self.assertEquals(group.state, nextstate) + self.assertEquals(iworkflowable.state, nextstate) class CustomWorkflowTC(CubicWebTC): @@ -389,35 +399,38 @@ self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', {'wf': wf.eid, 'x': self.member.eid}) self.member.clear_all_caches() - self.assertEquals(self.member.state, 'activated')# no change before commit + iworkflowable = self.member.cw_adapt_to('IWorkflowable') + self.assertEquals(iworkflowable.state, 'activated')# no change before commit self.commit() self.member.clear_all_caches() - self.assertEquals(self.member.current_workflow.eid, wf.eid) - self.assertEquals(self.member.state, 'asleep') - self.assertEquals(self.member.workflow_history, ()) + self.assertEquals(iworkflowable.current_workflow.eid, wf.eid) + self.assertEquals(iworkflowable.state, 'asleep') + self.assertEquals(iworkflowable.workflow_history, ()) def test_custom_wf_replace_state_keep_history(self): """member in inital state with some history, state is redirected and state change is recorded to history """ - self.member.fire_transition('deactivate') - self.member.fire_transition('activate') + iworkflowable = self.member.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') + iworkflowable.fire_transition('activate') wf = add_wf(self, 'CWUser') wf.add_state('asleep', initial=True) self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', {'wf': wf.eid, 'x': self.member.eid}) self.commit() self.member.clear_all_caches() - self.assertEquals(self.member.current_workflow.eid, wf.eid) - self.assertEquals(self.member.state, 'asleep') - self.assertEquals(parse_hist(self.member.workflow_history), + self.assertEquals(iworkflowable.current_workflow.eid, wf.eid) + self.assertEquals(iworkflowable.state, 'asleep') + self.assertEquals(parse_hist(iworkflowable.workflow_history), [('activated', 'deactivated', 'deactivate', None), ('deactivated', 'activated', 'activate', None), ('activated', 'asleep', None, 'workflow changed to "CWUser"')]) def test_custom_wf_no_initial_state(self): """try to set a custom workflow which has no initial state""" - self.member.fire_transition('deactivate') + iworkflowable = self.member.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') wf = add_wf(self, 'CWUser') wf.add_state('asleep') self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', @@ -438,7 +451,8 @@ """member in some state shared by the new workflow, nothing has to be done """ - self.member.fire_transition('deactivate') + iworkflowable = self.member.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') wf = add_wf(self, 'CWUser') wf.add_state('asleep', initial=True) self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', @@ -447,12 +461,12 @@ self.execute('DELETE X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', {'wf': wf.eid, 'x': self.member.eid}) self.member.clear_all_caches() - self.assertEquals(self.member.state, 'asleep')# no change before commit + self.assertEquals(iworkflowable.state, 'asleep')# no change before commit self.commit() self.member.clear_all_caches() - self.assertEquals(self.member.current_workflow.name, "default user workflow") - self.assertEquals(self.member.state, 'activated') - self.assertEquals(parse_hist(self.member.workflow_history), + self.assertEquals(iworkflowable.current_workflow.name, "default user workflow") + self.assertEquals(iworkflowable.state, 'activated') + self.assertEquals(parse_hist(iworkflowable.workflow_history), [('activated', 'deactivated', 'deactivate', None), ('deactivated', 'asleep', None, 'workflow changed to "CWUser"'), ('asleep', 'activated', None, 'workflow changed to "default user workflow"'),]) @@ -473,28 +487,29 @@ def test_auto_transition_fired(self): wf = self.setup_custom_wf() user = self.create_user('member') + iworkflowable = user.cw_adapt_to('IWorkflowable') self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', {'wf': wf.eid, 'x': user.eid}) self.commit() user.clear_all_caches() - self.assertEquals(user.state, 'asleep') - self.assertEquals([t.name for t in user.possible_transitions()], + self.assertEquals(iworkflowable.state, 'asleep') + self.assertEquals([t.name for t in iworkflowable.possible_transitions()], ['rest']) - user.fire_transition('rest') + iworkflowable.fire_transition('rest') self.commit() user.clear_all_caches() - self.assertEquals(user.state, 'asleep') - self.assertEquals([t.name for t in user.possible_transitions()], + self.assertEquals(iworkflowable.state, 'asleep') + self.assertEquals([t.name for t in iworkflowable.possible_transitions()], ['rest']) - self.assertEquals(parse_hist(user.workflow_history), + self.assertEquals(parse_hist(iworkflowable.workflow_history), [('asleep', 'asleep', 'rest', None)]) user.set_attributes(surname=u'toto') # fulfill condition self.commit() - user.fire_transition('rest') + iworkflowable.fire_transition('rest') self.commit() user.clear_all_caches() - self.assertEquals(user.state, 'dead') - self.assertEquals(parse_hist(user.workflow_history), + self.assertEquals(iworkflowable.state, 'dead') + self.assertEquals(parse_hist(iworkflowable.workflow_history), [('asleep', 'asleep', 'rest', None), ('asleep', 'asleep', 'rest', None), ('asleep', 'dead', 'sick', None),]) @@ -505,7 +520,8 @@ self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', {'wf': wf.eid, 'x': user.eid}) self.commit() - self.assertEquals(user.state, 'dead') + iworkflowable = user.cw_adapt_to('IWorkflowable') + self.assertEquals(iworkflowable.state, 'dead') def test_auto_transition_initial_state_fired(self): wf = self.execute('Any WF WHERE ET default_workflow WF, ' @@ -517,14 +533,15 @@ self.commit() user = self.create_user('member', surname=u'toto') self.commit() - self.assertEquals(user.state, 'dead') + iworkflowable = user.cw_adapt_to('IWorkflowable') + self.assertEquals(iworkflowable.state, 'dead') class WorkflowHooksTC(CubicWebTC): def setUp(self): CubicWebTC.setUp(self) - self.wf = self.session.user.current_workflow + self.wf = self.session.user.cw_adapt_to('IWorkflowable').current_workflow self.session.set_pool() self.s_activated = self.wf.state_by_name('activated').eid self.s_deactivated = self.wf.state_by_name('deactivated').eid @@ -572,8 +589,9 @@ def test_transition_checking1(self): cnx = self.login('stduser') user = cnx.user(self.session) + iworkflowable = user.cw_adapt_to('IWorkflowable') ex = self.assertRaises(ValidationError, - user.fire_transition, 'activate') + iworkflowable.fire_transition, 'activate') self.assertEquals(self._cleanup_msg(ex.errors['by_transition-subject']), u"transition isn't allowed from") cnx.close() @@ -581,8 +599,9 @@ def test_transition_checking2(self): cnx = self.login('stduser') user = cnx.user(self.session) + iworkflowable = user.cw_adapt_to('IWorkflowable') ex = self.assertRaises(ValidationError, - user.fire_transition, 'dummy') + iworkflowable.fire_transition, 'dummy') self.assertEquals(self._cleanup_msg(ex.errors['by_transition-subject']), u"transition isn't allowed from") cnx.close() @@ -591,15 +610,16 @@ cnx = self.login('stduser') session = self.session user = cnx.user(session) - user.fire_transition('deactivate') + iworkflowable = user.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') cnx.commit() session.set_pool() ex = self.assertRaises(ValidationError, - user.fire_transition, 'deactivate') + iworkflowable.fire_transition, 'deactivate') self.assertEquals(self._cleanup_msg(ex.errors['by_transition-subject']), u"transition isn't allowed from") # get back now - user.fire_transition('activate') + iworkflowable.fire_transition('activate') cnx.commit() cnx.close() diff -r 00b1b6b906cf -r 97c55baefa0c entities/wfobjs.py --- a/entities/wfobjs.py Thu Jul 15 12:03:13 2010 +0200 +++ b/entities/wfobjs.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,9 +15,13 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""workflow definition and history related entities +"""workflow handling: +* entity types defining workflow (Workflow, State, Transition...) +* workflow history (TrInfo) +* adapter for workflowable entities (IWorkflowableAdapter) """ + __docformat__ = "restructuredtext en" from warnings import warn @@ -27,7 +31,8 @@ from logilab.common.compat import any from cubicweb.entities import AnyEntity, fetch_config -from cubicweb.interfaces import IWorkflowable +from cubicweb.view import EntityAdapter +from cubicweb.selectors import relation_possible from cubicweb.mixins import MI_REL_TRIGGERS class WorkflowException(Exception): pass @@ -47,15 +52,6 @@ return any(et for et in self.reverse_default_workflow if et.name == etype) - # XXX define parent() instead? what if workflow of multiple types? - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - if self.workflow_of: - return self.workflow_of[0].rest_path(), {'vid': 'workflow'} - return super(Workflow, self).after_deletion_path() - def iter_workflows(self, _done=None): """return an iterator on actual workflows, eg this workflow and its subworkflows @@ -177,7 +173,7 @@ {'os': todelstate.eid, 'ns': replacement.eid}) execute('SET X to_state NS WHERE X to_state OS, OS eid %(os)s, NS eid %(ns)s', {'os': todelstate.eid, 'ns': replacement.eid}) - todelstate.delete() + todelstate.cw_delete() class BaseTransition(AnyEntity): @@ -226,14 +222,6 @@ return False return True - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - if self.transition_of: - return self.transition_of[0].rest_path(), {} - return super(BaseTransition, self).after_deletion_path() - def set_permissions(self, requiredgroups=(), conditions=(), reset=True): """set or add (if `reset` is False) groups and conditions for this transition @@ -277,7 +265,7 @@ try: return self.destination_state[0] except IndexError: - return entity.latest_trinfo().previous_state + return entity.cw_adapt_to('IWorkflowable').latest_trinfo().previous_state def potential_destinations(self): try: @@ -288,9 +276,6 @@ for previousstate in tr.reverse_allowed_transition: yield previousstate - def parent(self): - return self.workflow - class WorkflowTransition(BaseTransition): """customized class for WorkflowTransition entities""" @@ -331,7 +316,7 @@ return None if tostateeid is None: # go back to state from which we've entered the subworkflow - return entity.subworkflow_input_trinfo().previous_state + return entity.cw_adapt_to('IWorkflowable').subworkflow_input_trinfo().previous_state return self._cw.entity_from_eid(tostateeid) @cached @@ -358,9 +343,6 @@ def destination(self): return self.destination_state and self.destination_state[0] or None - def parent(self): - return self.reverse_subworkflow_exit[0] - class State(AnyEntity): """customized class for State entities""" @@ -371,10 +353,7 @@ @property def workflow(self): # take care, may be missing in multi-sources configuration - return self.state_of and self.state_of[0] - - def parent(self): - return self.workflow + return self.state_of and self.state_of[0] or None class TrInfo(AnyEntity): @@ -399,22 +378,99 @@ def transition(self): return self.by_transition and self.by_transition[0] or None - def parent(self): - return self.for_entity - class WorkflowableMixIn(object): """base mixin providing workflow helper methods for workflowable entities. This mixin will be automatically set on class supporting the 'in_state' relation (which implies supporting 'wf_info_for' as well) """ - __implements__ = (IWorkflowable,) + + @property + @deprecated('[3.5] use printable_state') + def displayable_state(self): + return self._cw._(self.state) + @property + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').main_workflow") + def main_workflow(self): + return self.cw_adapt_to('IWorkflowable').main_workflow + @property + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').current_workflow") + def current_workflow(self): + return self.cw_adapt_to('IWorkflowable').current_workflow + @property + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').current_state") + def current_state(self): + return self.cw_adapt_to('IWorkflowable').current_state + @property + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').state") + def state(self): + return self.cw_adapt_to('IWorkflowable').state + @property + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').printable_state") + def printable_state(self): + return self.cw_adapt_to('IWorkflowable').printable_state + @property + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').workflow_history") + def workflow_history(self): + return self.cw_adapt_to('IWorkflowable').workflow_history + + @deprecated('[3.5] get transition from current workflow and use its may_be_fired method') + def can_pass_transition(self, trname): + """return the Transition instance if the current user can fire the + transition with the given name, else None + """ + tr = self.current_workflow and self.current_workflow.transition_by_name(trname) + if tr and tr.may_be_fired(self.eid): + return tr + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').cwetype_workflow()") + def cwetype_workflow(self): + return self.cw_adapt_to('IWorkflowable').main_workflow() + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').latest_trinfo()") + def latest_trinfo(self): + return self.cw_adapt_to('IWorkflowable').latest_trinfo() + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').possible_transitions()") + def possible_transitions(self, type='normal'): + return self.cw_adapt_to('IWorkflowable').possible_transitions(type) + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').fire_transition()") + def fire_transition(self, tr, comment=None, commentformat=None): + return self.cw_adapt_to('IWorkflowable').fire_transition(tr, comment, commentformat) + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').change_state()") + def change_state(self, statename, comment=None, commentformat=None, tr=None): + return self.cw_adapt_to('IWorkflowable').change_state(statename, comment, commentformat, tr) + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').subworkflow_input_trinfo()") + def subworkflow_input_trinfo(self): + return self.cw_adapt_to('IWorkflowable').subworkflow_input_trinfo() + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').subworkflow_input_transition()") + def subworkflow_input_transition(self): + return self.cw_adapt_to('IWorkflowable').subworkflow_input_transition() + + +MI_REL_TRIGGERS[('in_state', 'subject')] = WorkflowableMixIn + + + +class IWorkflowableAdapter(WorkflowableMixIn, EntityAdapter): + """base adapter providing workflow helper methods for workflowable entities. + """ + __regid__ = 'IWorkflowable' + __select__ = relation_possible('in_state') + + @cached + def cwetype_workflow(self): + """return the default workflow for entities of this type""" + # XXX CWEType method + wfrset = self._cw.execute('Any WF WHERE ET default_workflow WF, ' + 'ET name %(et)s', {'et': self.entity.__regid__}) + if wfrset: + return wfrset.get_entity(0, 0) + self.warning("can't find any workflow for %s", self.entity.__regid__) + return None @property def main_workflow(self): """return current workflow applied to this entity""" - if self.custom_workflow: - return self.custom_workflow[0] + if self.entity.custom_workflow: + return self.entity.custom_workflow[0] return self.cwetype_workflow() @property @@ -425,14 +481,14 @@ @property def current_state(self): """return current state entity""" - return self.in_state and self.in_state[0] or None + return self.entity.in_state and self.entity.in_state[0] or None @property def state(self): """return current state name""" try: - return self.in_state[0].name - except IndexError: + return self.current_state.name + except AttributeError: self.warning('entity %s has no state', self) return None @@ -449,26 +505,15 @@ """return the workflow history for this entity (eg ordered list of TrInfo entities) """ - return self.reverse_wf_info_for + return self.entity.reverse_wf_info_for def latest_trinfo(self): """return the latest transition information for this entity""" try: - return self.reverse_wf_info_for[-1] + return self.workflow_history[-1] except IndexError: return None - @cached - def cwetype_workflow(self): - """return the default workflow for entities of this type""" - # XXX CWEType method - wfrset = self._cw.execute('Any WF WHERE ET default_workflow WF, ' - 'ET name %(et)s', {'et': self.__regid__}) - if wfrset: - return wfrset.get_entity(0, 0) - self.warning("can't find any workflow for %s", self.__regid__) - return None - def possible_transitions(self, type='normal'): """generates transition that MAY be fired for the given entity, expected to be in this state @@ -483,16 +528,44 @@ {'x': self.current_state.eid, 'type': type, 'wfeid': self.current_workflow.eid}) for tr in rset.entities(): - if tr.may_be_fired(self.eid): + if tr.may_be_fired(self.entity.eid): yield tr + def subworkflow_input_trinfo(self): + """return the TrInfo which has be recorded when this entity went into + the current sub-workflow + """ + if self.main_workflow.eid == self.current_workflow.eid: + return # doesn't make sense + subwfentries = [] + for trinfo in self.workflow_history: + if (trinfo.transition and + trinfo.previous_state.workflow.eid != trinfo.new_state.workflow.eid): + # entering or leaving a subworkflow + if (subwfentries and + subwfentries[-1].new_state.workflow.eid == trinfo.previous_state.workflow.eid and + subwfentries[-1].previous_state.workflow.eid == trinfo.new_state.workflow.eid): + # leave + del subwfentries[-1] + else: + # enter + subwfentries.append(trinfo) + if not subwfentries: + return None + return subwfentries[-1] + + def subworkflow_input_transition(self): + """return the transition which has went through the current sub-workflow + """ + return getattr(self.subworkflow_input_trinfo(), 'transition', None) + def _add_trinfo(self, comment, commentformat, treid=None, tseid=None): kwargs = {} if comment is not None: kwargs['comment'] = comment if commentformat is not None: kwargs['comment_format'] = commentformat - kwargs['wf_info_for'] = self + kwargs['wf_info_for'] = self.entity if treid is not None: kwargs['by_transition'] = self._cw.entity_from_eid(treid) if tseid is not None: @@ -532,51 +605,3 @@ stateeid = state.eid # XXX try to find matching transition? return self._add_trinfo(comment, commentformat, tr and tr.eid, stateeid) - - def subworkflow_input_trinfo(self): - """return the TrInfo which has be recorded when this entity went into - the current sub-workflow - """ - if self.main_workflow.eid == self.current_workflow.eid: - return # doesn't make sense - subwfentries = [] - for trinfo in self.workflow_history: - if (trinfo.transition and - trinfo.previous_state.workflow.eid != trinfo.new_state.workflow.eid): - # entering or leaving a subworkflow - if (subwfentries and - subwfentries[-1].new_state.workflow.eid == trinfo.previous_state.workflow.eid and - subwfentries[-1].previous_state.workflow.eid == trinfo.new_state.workflow.eid): - # leave - del subwfentries[-1] - else: - # enter - subwfentries.append(trinfo) - if not subwfentries: - return None - return subwfentries[-1] - - def subworkflow_input_transition(self): - """return the transition which has went through the current sub-workflow - """ - return getattr(self.subworkflow_input_trinfo(), 'transition', None) - - def clear_all_caches(self): - super(WorkflowableMixIn, self).clear_all_caches() - clear_cache(self, 'cwetype_workflow') - - @deprecated('[3.5] get transition from current workflow and use its may_be_fired method') - def can_pass_transition(self, trname): - """return the Transition instance if the current user can fire the - transition with the given name, else None - """ - tr = self.current_workflow and self.current_workflow.transition_by_name(trname) - if tr and tr.may_be_fired(self.eid): - return tr - - @property - @deprecated('[3.5] use printable_state') - def displayable_state(self): - return self._cw._(self.state) - -MI_REL_TRIGGERS[('in_state', 'subject')] = WorkflowableMixIn diff -r 00b1b6b906cf -r 97c55baefa0c entity.py --- a/entity.py Thu Jul 15 12:03:13 2010 +0200 +++ b/entity.py Mon Jul 19 15:37:02 2010 +0200 @@ -19,11 +19,12 @@ __docformat__ = "restructuredtext en" +from copy import copy from warnings import warn from logilab.common import interface -from logilab.common.compat import all from logilab.common.decorators import cached +from logilab.common.deprecation import deprecated from logilab.mtconverter import TransformData, TransformError, xml_escape from rql.utils import rqlvar_maker @@ -51,7 +52,7 @@ return '1' -class Entity(AppObject, dict): +class Entity(AppObject): """an entity instance has e_schema automagically set on the class and instances has access to their issuing cursor. @@ -106,10 +107,10 @@ if not interface.implements(cls, iface): interface.extend(cls, iface) if role == 'subject': - setattr(cls, rschema.type, SubjectRelation(rschema)) + attr = rschema.type else: attr = 'reverse_%s' % rschema.type - setattr(cls, attr, ObjectRelation(rschema)) + setattr(cls, attr, Relation(rschema, role)) if mixins: # see etype class instantation in cwvreg.ETypeRegistry.etype_class method: # due to class dumping, cls is the generated top level class with actual @@ -124,6 +125,24 @@ cls.__bases__ = tuple(mixins) cls.info('plugged %s mixins on %s', mixins, cls) + fetch_attrs = ('modification_date',) + @classmethod + def fetch_order(cls, attr, var): + """class method used to control sort order when multiple entities of + this type are fetched + """ + return cls.fetch_unrelated_order(attr, var) + + @classmethod + def fetch_unrelated_order(cls, attr, var): + """class method used to control sort order when multiple entities of + this type are fetched to use in edition (eg propose them to create a + new relation on an edited entity). + """ + if attr == 'modification_date': + return '%s DESC' % var + return None + @classmethod def fetch_rql(cls, user, restriction=None, fetchattrs=None, mainvar='X', settype=True, ordermethod='fetch_order'): @@ -269,17 +288,17 @@ def __init__(self, req, rset=None, row=None, col=0): AppObject.__init__(self, req, rset=rset, row=row, col=col) - dict.__init__(self) - self._related_cache = {} + self._cw_related_cache = {} if rset is not None: self.eid = rset[row][col] else: self.eid = None - self._is_saved = True + self._cw_is_saved = True + self.cw_attr_cache = {} def __repr__(self): return '' % ( - self.e_schema, self.eid, self.keys(), id(self)) + self.e_schema, self.eid, self.cw_attr_cache.keys(), id(self)) def __json_encode__(self): """custom json dumps hook to dump the entity's eid @@ -298,12 +317,18 @@ def __cmp__(self, other): raise NotImplementedError('comparison not implemented for %s' % self.__class__) + def __contains__(self, key): + return key in self.cw_attr_cache + + def __iter__(self): + return iter(self.cw_attr_cache) + def __getitem__(self, key): if key == 'eid': warn('[3.7] entity["eid"] is deprecated, use entity.eid instead', DeprecationWarning, stacklevel=2) return self.eid - return super(Entity, self).__getitem__(key) + return self.cw_attr_cache[key] def __setitem__(self, attr, value): """override __setitem__ to update self.edited_attributes. @@ -321,13 +346,13 @@ DeprecationWarning, stacklevel=2) self.eid = value else: - super(Entity, self).__setitem__(attr, value) + self.cw_attr_cache[attr] = value # don't add attribute into skip_security if already in edited # attributes, else we may accidentaly skip a desired security check if hasattr(self, 'edited_attributes') and \ attr not in self.edited_attributes: self.edited_attributes.add(attr) - self.skip_security_attributes.add(attr) + self._cw_skip_security_attributes.add(attr) def __delitem__(self, attr): """override __delitem__ to update self.edited_attributes on cleanup of @@ -345,28 +370,35 @@ del self.entity['load_left'] """ - super(Entity, self).__delitem__(attr) + del self.cw_attr_cache[attr] if hasattr(self, 'edited_attributes'): self.edited_attributes.remove(attr) + def clear(self): + self.cw_attr_cache.clear() + + def get(self, key, default=None): + return self.cw_attr_cache.get(key, default) + def setdefault(self, attr, default): """override setdefault to update self.edited_attributes""" - super(Entity, self).setdefault(attr, default) + value = self.cw_attr_cache.setdefault(attr, default) # don't add attribute into skip_security if already in edited # attributes, else we may accidentaly skip a desired security check if hasattr(self, 'edited_attributes') and \ attr not in self.edited_attributes: self.edited_attributes.add(attr) - self.skip_security_attributes.add(attr) + self._cw_skip_security_attributes.add(attr) + return value def pop(self, attr, default=_marker): """override pop to update self.edited_attributes on cleanup of undesired changes introduced in the entity's dict. See `__delitem__` """ if default is _marker: - value = super(Entity, self).pop(attr) + value = self.cw_attr_cache.pop(attr) else: - value = super(Entity, self).pop(attr, default) + value = self.cw_attr_cache.pop(attr, default) if hasattr(self, 'edited_attributes') and attr in self.edited_attributes: self.edited_attributes.remove(attr) return value @@ -377,27 +409,24 @@ for attr, value in values.items(): self[attr] = value # use self.__setitem__ implementation - def rql_set_value(self, attr, value): - """call by rql execution plan when some attribute is modified - - don't use dict api in such case since we don't want attribute to be - added to skip_security_attributes. - """ - super(Entity, self).__setitem__(attr, value) + def cw_adapt_to(self, interface): + """return an adapter the entity to the given interface name. - def pre_add_hook(self): - """hook called by the repository before doing anything to add the entity - (before_add entity hooks have not been called yet). This give the - occasion to do weird stuff such as autocast (File -> Image for instance). - - This method must return the actual entity to be added. + return None if it can not be adapted. """ - return self + try: + cache = self._cw_adapters_cache + except AttributeError: + self._cw_adapters_cache = cache = {} + try: + return cache[interface] + except KeyError: + adapter = self._cw.vreg['adapters'].select_or_none( + interface, self._cw, entity=self) + cache[interface] = adapter + return adapter - def set_eid(self, eid): - self.eid = eid - - def has_eid(self): + def has_eid(self): # XXX cw_has_eid """return True if the entity has an attributed eid (False meaning that the entity has to be created """ @@ -407,38 +436,34 @@ except (ValueError, TypeError): return False - def is_saved(self): + def cw_is_saved(self): """during entity creation, there is some time during which the entity - has an eid attributed though it's not saved (eg during before_add_entity - hooks). You can use this method to ensure the entity has an eid *and* is - saved in its source. + has an eid attributed though it's not saved (eg during + 'before_add_entity' hooks). You can use this method to ensure the entity + has an eid *and* is saved in its source. """ - return self.has_eid() and self._is_saved + return self.has_eid() and self._cw_is_saved @cached - def metainformation(self): + def cw_metainformation(self): res = dict(zip(('type', 'source', 'extid'), self._cw.describe(self.eid))) res['source'] = self._cw.source_defs()[res['source']] return res - def clear_local_perm_cache(self, action): - for rqlexpr in self.e_schema.get_rqlexprs(action): - self._cw.local_perm_cache.pop((rqlexpr.eid, (('x', self.eid),)), None) - - def check_perm(self, action): + def cw_check_perm(self, action): self.e_schema.check_perm(self._cw, action, eid=self.eid) - def has_perm(self, action): + def cw_has_perm(self, action): return self.e_schema.has_perm(self._cw, action, eid=self.eid) - def view(self, __vid, __registry='views', w=None, **kwargs): + def view(self, __vid, __registry='views', w=None, **kwargs): # XXX cw_view """shortcut to apply a view on this entity""" view = self._cw.vreg[__registry].select(__vid, self._cw, rset=self.cw_rset, row=self.cw_row, col=self.cw_col, **kwargs) return view.render(row=self.cw_row, col=self.cw_col, w=w, **kwargs) - def absolute_url(self, *args, **kwargs): + def absolute_url(self, *args, **kwargs): # XXX cw_url """return an absolute url to view this entity""" # use *args since we don't want first argument to be "anonymous" to # avoid potential clash with kwargs @@ -451,7 +476,7 @@ # the object for use in the relation is tricky # XXX search_state is web specific if getattr(self._cw, 'search_state', ('normal',))[0] == 'normal': - kwargs['base_url'] = self.metainformation()['source'].get('base-url') + kwargs['base_url'] = self.cw_metainformation()['source'].get('base-url') if method in (None, 'view'): try: kwargs['_restpath'] = self.rest_path(kwargs.get('base_url')) @@ -463,7 +488,7 @@ kwargs['rql'] = 'Any X WHERE X eid %s' % self.eid return self._cw.build_url(method, **kwargs) - def rest_path(self, use_ext_eid=False): + def rest_path(self, use_ext_eid=False): # XXX cw_rest_path """returns a REST-like (relative) path for this entity""" mainattr, needcheck = self._rest_attr_info() etype = str(self.e_schema) @@ -486,12 +511,12 @@ path += '/eid' if mainattr == 'eid': if use_ext_eid: - value = self.metainformation()['extid'] + value = self.cw_metainformation()['extid'] else: value = self.eid return '%s/%s' % (path, self._cw.url_quote(value)) - def attr_metadata(self, attr, metadata): + def cw_attr_metadata(self, attr, metadata): """return a metadata for an attribute (None if unspecified)""" value = getattr(self, '%s_%s' % (attr, metadata), None) if value is None and metadata == 'encoding': @@ -499,7 +524,7 @@ return value def printable_value(self, attr, value=_marker, attrtype=None, - format='text/html', displaytime=True): + format='text/html', displaytime=True): # XXX cw_printable_value """return a displayable value (i.e. unicode string) which may contains html tags """ @@ -518,16 +543,16 @@ # description... if props.internationalizable: value = self._cw._(value) - attrformat = self.attr_metadata(attr, 'format') + attrformat = self.cw_attr_metadata(attr, 'format') if attrformat: - return self.mtc_transform(value, attrformat, format, - self._cw.encoding) + return self._cw_mtc_transform(value, attrformat, format, + self._cw.encoding) elif attrtype == 'Bytes': - attrformat = self.attr_metadata(attr, 'format') + attrformat = self.cw_attr_metadata(attr, 'format') if attrformat: - encoding = self.attr_metadata(attr, 'encoding') - return self.mtc_transform(value.getvalue(), attrformat, format, - encoding) + encoding = self.cw_attr_metadata(attr, 'encoding') + return self._cw_mtc_transform(value.getvalue(), attrformat, format, + encoding) return u'' value = printable_value(self._cw, attrtype, value, props, displaytime=displaytime) @@ -535,8 +560,8 @@ value = xml_escape(value) return value - def mtc_transform(self, data, format, target_format, encoding, - _engine=ENGINE): + def _cw_mtc_transform(self, data, format, target_format, encoding, + _engine=ENGINE): trdata = TransformData(data, format, encoding, appobject=self) data = _engine.convert(trdata, target_format).decode() if format == 'text/html': @@ -545,7 +570,13 @@ # entity cloning ########################################################## - def copy_relations(self, ceid): + def cw_copy(self): + thecopy = copy(self) + thecopy.cw_attr_cache = copy(self.cw_attr_cache) + thecopy._cw_related_cache = {} + return thecopy + + def copy_relations(self, ceid): # XXX cw_copy_relations """copy relations of the object with the given eid on this object (this method is called on the newly created copy, and ceid designates the original entity). @@ -574,7 +605,7 @@ rql = 'SET X %s V WHERE X eid %%(x)s, Y eid %%(y)s, Y %s V' % ( rschema.type, rschema.type) execute(rql, {'x': self.eid, 'y': ceid}) - self.clear_related_cache(rschema.type, 'subject') + self.cw_clear_relation_cache(rschema.type, 'subject') for rschema in self.e_schema.object_relations(): if rschema.meta: continue @@ -592,36 +623,32 @@ rql = 'SET V %s X WHERE X eid %%(x)s, Y eid %%(y)s, V %s Y' % ( rschema.type, rschema.type) execute(rql, {'x': self.eid, 'y': ceid}) - self.clear_related_cache(rschema.type, 'object') + self.cw_clear_relation_cache(rschema.type, 'object') # data fetching methods ################################################### @cached - def as_rset(self): + def as_rset(self): # XXX .cw_as_rset """returns a resultset containing `self` information""" rset = ResultSet([(self.eid,)], 'Any X WHERE X eid %(x)s', {'x': self.eid}, [(self.__regid__,)]) rset.req = self._cw return rset - def to_complete_relations(self): + def _cw_to_complete_relations(self): """by default complete final relations to when calling .complete()""" for rschema in self.e_schema.subject_relations(): if rschema.final: continue targets = rschema.objects(self.e_schema) - if len(targets) > 1: - # ambigous relations, the querier doesn't handle - # outer join correctly in this case - continue if rschema.inlined: matching_groups = self._cw.user.matching_groups - rdef = rschema.rdef(self.e_schema, targets[0]) - if matching_groups(rdef.get_groups('read')) and \ - all(matching_groups(e.get_groups('read')) for e in targets): + if all(matching_groups(e.get_groups('read')) and + rschema.rdef(self.e_schema, e).get_groups('read') + for e in targets): yield rschema, 'subject' - def to_complete_attributes(self, skip_bytes=True, skip_pwd=True): + def _cw_to_complete_attributes(self, skip_bytes=True, skip_pwd=True): for rschema, attrschema in self.e_schema.attribute_definitions(): # skip binary data by default if skip_bytes and attrschema.type == 'Bytes': @@ -638,7 +665,7 @@ yield attr _cw_completed = False - def complete(self, attributes=None, skip_bytes=True, skip_pwd=True): + def complete(self, attributes=None, skip_bytes=True, skip_pwd=True): # XXX cw_complete """complete this entity by adding missing attributes (i.e. query the repository to fill the entity) @@ -655,9 +682,9 @@ V = varmaker.next() rql = ['WHERE %s eid %%(x)s' % V] selected = [] - for attr in (attributes or self.to_complete_attributes(skip_bytes, skip_pwd)): + for attr in (attributes or self._cw_to_complete_attributes(skip_bytes, skip_pwd)): # if attribute already in entity, nothing to do - if self.has_key(attr): + if self.cw_attr_cache.has_key(attr): continue # case where attribute must be completed, but is not yet in entity var = varmaker.next() @@ -667,26 +694,20 @@ lastattr = len(selected) + 1 if attributes is None: # fetch additional relations (restricted to 0..1 relations) - for rschema, role in self.to_complete_relations(): + for rschema, role in self._cw_to_complete_relations(): rtype = rschema.type - if self.relation_cached(rtype, role): + if self.cw_relation_cached(rtype, role): continue + # at this point we suppose that: + # * this is a inlined relation + # * entity (self) is the subject + # * user has read perm on the relation and on the target entity + assert rschema.inlined + assert role == 'subject' var = varmaker.next() - targettype = rschema.targets(self.e_schema, role)[0] - rdef = rschema.role_rdef(self.e_schema, targettype, role) - card = rdef.role_cardinality(role) - assert card in '1?', '%s %s %s %s' % (self.e_schema, rtype, - role, card) - if role == 'subject': - if card == '1': - rql.append('%s %s %s' % (V, rtype, var)) - else: - rql.append('%s %s %s?' % (V, rtype, var)) - else: - if card == '1': - rql.append('%s %s %s' % (var, rtype, V)) - else: - rql.append('%s? %s %s' % (var, rtype, V)) + # keep outer join anyway, we don't want .complete to crash on + # missing mandatory relation (see #1058267) + rql.append('%s %s %s?' % (V, rtype, var)) selected.append(((rtype, role), var)) if selected: # select V, we need it as the left most selected variable @@ -706,9 +727,9 @@ rrset.req = self._cw else: rrset = self._cw.eid_rset(value) - self.set_related_cache(rtype, role, rrset) + self.cw_set_relation_cache(rtype, role, rrset) - def get_value(self, name): + def cw_attr_value(self, name): """get value for the attribute relation , query the repository to get the value if necessary. @@ -716,9 +737,9 @@ :param name: name of the attribute to get """ try: - value = self[name] + value = self.cw_attr_cache[name] except KeyError: - if not self.is_saved(): + if not self.cw_is_saved(): return None rql = "Any A WHERE X eid %%(x)s, X %s A" % name try: @@ -740,7 +761,7 @@ self[name] = value = None return value - def related(self, rtype, role='subject', limit=None, entities=False): + def related(self, rtype, role='subject', limit=None, entities=False): # XXX .cw_related """returns a resultset of related entities :param role: is the role played by 'self' in the relation ('subject' or 'object') @@ -748,19 +769,19 @@ :param entities: if True, the entites are returned; if False, a result set is returned """ try: - return self.related_cache(rtype, role, entities, limit) + return self._cw_relation_cache(rtype, role, entities, limit) except KeyError: pass if not self.has_eid(): if entities: return [] return self.empty_rset() - rql = self.related_rql(rtype, role) + rql = self.cw_related_rql(rtype, role) rset = self._cw.execute(rql, {'x': self.eid}) - self.set_related_cache(rtype, role, rset) + self.cw_set_relation_cache(rtype, role, rset) return self.related(rtype, role, limit, entities) - def related_rql(self, rtype, role='subject', targettypes=None): + def cw_related_rql(self, rtype, role='subject', targettypes=None): rschema = self._cw.vreg.schema[rtype] if role == 'subject': restriction = 'E eid %%(x)s, E %s X' % rtype @@ -809,7 +830,7 @@ # generic vocabulary methods ############################################## - def unrelated_rql(self, rtype, targettype, role, ordermethod=None, + def cw_unrelated_rql(self, rtype, targettype, role, ordermethod=None, vocabconstraints=True): """build a rql to fetch `targettype` entities unrelated to this entity using (rtype, role) relation. @@ -871,12 +892,12 @@ return rql, args def unrelated(self, rtype, targettype, role='subject', limit=None, - ordermethod=None): + ordermethod=None): # XXX .cw_unrelated """return a result set of target type objects that may be related by a given relation, with self as subject or object """ try: - rql, args = self.unrelated_rql(rtype, targettype, role, ordermethod) + rql, args = self.cw_unrelated_rql(rtype, targettype, role, ordermethod) except Unauthorized: return self._cw.empty_rset() if limit is not None: @@ -884,18 +905,19 @@ rql = '%s LIMIT %s WHERE %s' % (before, limit, after) return self._cw.execute(rql, args) - # relations cache handling ################################################ + # relations cache handling ################################################# - def relation_cached(self, rtype, role): - """return true if the given relation is already cached on the instance + def cw_relation_cached(self, rtype, role): + """return None if the given relation isn't already cached on the + instance, else the content of the cache (a 2-uple (rset, entities)). """ - return self._related_cache.get('%s_%s' % (rtype, role)) + return self._cw_related_cache.get('%s_%s' % (rtype, role)) - def related_cache(self, rtype, role, entities=True, limit=None): + def _cw_relation_cache(self, rtype, role, entities=True, limit=None): """return values for the given relation if it's cached on the instance, else raise `KeyError` """ - res = self._related_cache['%s_%s' % (rtype, role)][entities] + res = self._cw_related_cache['%s_%s' % (rtype, role)][entities] if limit is not None and limit < len(res): if entities: res = res[:limit] @@ -903,10 +925,10 @@ res = res.limit(limit) return res - def set_related_cache(self, rtype, role, rset, col=0): + def cw_set_relation_cache(self, rtype, role, rset): """set cached values for the given relation""" if rset: - related = list(rset.entities(col)) + related = list(rset.entities(0)) rschema = self._cw.vreg.schema.rschema(rtype) if role == 'subject': rcard = rschema.rdef(self.e_schema, related[0].e_schema).cardinality[1] @@ -916,23 +938,24 @@ target = 'subject' if rcard in '?1': for rentity in related: - rentity._related_cache['%s_%s' % (rtype, target)] = ( + rentity._cw_related_cache['%s_%s' % (rtype, target)] = ( self.as_rset(), (self,)) else: related = () - self._related_cache['%s_%s' % (rtype, role)] = (rset, related) + self._cw_related_cache['%s_%s' % (rtype, role)] = (rset, related) - def clear_related_cache(self, rtype=None, role=None): + def cw_clear_relation_cache(self, rtype=None, role=None): """clear cached values for the given relation or the entire cache if no relation is given """ if rtype is None: - self._related_cache = {} + self._cw_related_cache = {} + self._cw_adapters_cache = {} else: assert role - self._related_cache.pop('%s_%s' % (rtype, role), None) + self._cw_related_cache.pop('%s_%s' % (rtype, role), None) - def clear_all_caches(self): + def clear_all_caches(self): # XXX cw_clear_all_caches """flush all caches on this entity. Further attributes/relations access will triggers new database queries to get back values. @@ -942,10 +965,9 @@ # clear attributes cache haseid = 'eid' in self self._cw_completed = False - self.clear() + self.cw_attr_cache.clear() # clear relations cache - for rschema, _, role in self.e_schema.relation_definitions(): - self.clear_related_cache(rschema.type, role) + self.cw_clear_relation_cache() # rest path unique cache try: del self.__unique @@ -954,10 +976,10 @@ # raw edition utilities ################################################### - def set_attributes(self, **kwargs): + def set_attributes(self, **kwargs): # XXX cw_set_attributes _check_cw_unsafe(kwargs) assert kwargs - assert self._is_saved, "should not call set_attributes while entity "\ + assert self.cw_is_saved(), "should not call set_attributes while entity "\ "hasn't been saved yet" relations = [] for key in kwargs: @@ -972,7 +994,7 @@ # edited_attributes / skip_security_attributes machinery self.update(kwargs) - def set_relations(self, **kwargs): + def set_relations(self, **kwargs): # XXX cw_set_relations """add relations to the given object. To set a relation where this entity is the object of the relation, use 'reverse_' as argument name. @@ -996,28 +1018,42 @@ restr, ','.join(str(r.eid) for r in values)), {'x': self.eid}) - def delete(self, **kwargs): + def cw_delete(self, **kwargs): assert self.has_eid(), self.eid self._cw.execute('DELETE %s X WHERE X eid %%(x)s' % self.e_schema, {'x': self.eid}, **kwargs) # server side utilities ################################################### + def _cw_rql_set_value(self, attr, value): + """call by rql execution plan when some attribute is modified + + don't use dict api in such case since we don't want attribute to be + added to skip_security_attributes. + + This method is for internal use, you should not use it. + """ + self.cw_attr_cache[attr] = value + + def _cw_clear_local_perm_cache(self, action): + for rqlexpr in self.e_schema.get_rqlexprs(action): + self._cw.local_perm_cache.pop((rqlexpr.eid, (('x', self.eid),)), None) + @property - def skip_security_attributes(self): + def _cw_skip_security_attributes(self): try: - return self._skip_security_attributes + return self.__cw_skip_security_attributes except: - self._skip_security_attributes = set() - return self._skip_security_attributes + self.__cw_skip_security_attributes = set() + return self.__cw_skip_security_attributes - def set_defaults(self): + def _cw_set_defaults(self): """set default values according to the schema""" for attr, value in self.e_schema.defaults(): - if not self.has_key(attr): + if not self.cw_attr_cache.has_key(attr): self[str(attr)] = value - def check(self, creation=False): + def _cw_check(self, creation=False): """check this entity against its schema. Only final relation are checked here, constraint on actual relations are checked in hooks """ @@ -1040,60 +1076,33 @@ self.e_schema.check(self, creation=creation, _=_, relations=relations) - def fti_containers(self, _done=None): - if _done is None: - _done = set() - _done.add(self.eid) - containers = tuple(self.e_schema.fulltext_containers()) - if containers: - for rschema, target in containers: - if target == 'object': - targets = getattr(self, rschema.type) - else: - targets = getattr(self, 'reverse_%s' % rschema) - for entity in targets: - if entity.eid in _done: - continue - for container in entity.fti_containers(_done): - yield container - yielded = True - else: - yield self + @deprecated('[3.9] use entity.cw_attr_value(attr)') + def get_value(self, name): + return self.cw_attr_value(name) - def get_words(self): - """used by the full text indexer to get words to index + @deprecated('[3.9] use entity.cw_delete()') + def delete(self, **kwargs): + return self.cw_delete(**kwargs) - this method should only be used on the repository side since it depends - on the logilab.database package + @deprecated('[3.9] use entity.cw_attr_metadata(attr, metadata)') + def attr_metadata(self, attr, metadata): + return self.cw_attr_metadata(attr, metadata) - :rtype: list - :return: the list of indexable word of this entity - """ - from logilab.database.fti import tokenize - # take care to cases where we're modyfying the schema - pending = self._cw.transaction_data.setdefault('pendingrdefs', set()) - words = [] - for rschema in self.e_schema.indexable_attributes(): - if (self.e_schema, rschema) in pending: - continue - try: - value = self.printable_value(rschema, format='text/plain') - except TransformError: - continue - except: - self.exception("can't add value of %s to text index for entity %s", - rschema, self.eid) - continue - if value: - words += tokenize(value) - for rschema, role in self.e_schema.fulltext_relations(): - if role == 'subject': - for entity in getattr(self, rschema.type): - words += entity.get_words() - else: # if role == 'object': - for entity in getattr(self, 'reverse_%s' % rschema.type): - words += entity.get_words() - return words + @deprecated('[3.9] use entity.cw_has_perm(action)') + def has_perm(self, action): + return self.cw_has_perm(action) + + @deprecated('[3.9] use entity.cw_set_relation_cache(rtype, role, rset)') + def set_related_cache(self, rtype, role, rset): + self.cw_set_relation_cache(rtype, role, rset) + + @deprecated('[3.9] use entity.cw_clear_relation_cache(rtype, role, rset)') + def clear_related_cache(self, rtype=None, role=None): + self.cw_clear_relation_cache(rtype, role) + + @deprecated('[3.9] use entity.cw_related_rql(rtype, [role, [targettypes]])') + def related_rql(self, rtype, role='subject', targettypes=None): + return self.cw_related_rql(rtype, role, targettypes) # attribute and relation descriptors ########################################## @@ -1108,18 +1117,18 @@ def __get__(self, eobj, eclass): if eobj is None: return self - return eobj.get_value(self._attrname) + return eobj.cw_attr_value(self._attrname) def __set__(self, eobj, value): eobj[self._attrname] = value + class Relation(object): """descriptor that controls schema relation access""" - _role = None # for pylint - def __init__(self, rschema): - self._rschema = rschema + def __init__(self, rschema, role): self._rtype = rschema.type + self._role = role def __get__(self, eobj, eclass): if eobj is None: @@ -1131,14 +1140,6 @@ raise NotImplementedError -class SubjectRelation(Relation): - """descriptor that controls schema relation access""" - _role = 'subject' - -class ObjectRelation(Relation): - """descriptor that controls schema relation access""" - _role = 'object' - from logging import getLogger from cubicweb import set_log_methods set_log_methods(Entity, getLogger('cubicweb.entity')) diff -r 00b1b6b906cf -r 97c55baefa0c etwist/request.py --- a/etwist/request.py Thu Jul 15 12:03:13 2010 +0200 +++ b/etwist/request.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""Twisted request handler for CubicWeb +"""Twisted request handler for CubicWeb""" -""" __docformat__ = "restructuredtext en" from datetime import datetime @@ -55,9 +54,9 @@ return self._twreq.method def relative_path(self, includeparams=True): - """return the normalized path of the request (ie at least relative - to the instance's root, but some other normalization may be needed - so that the returned path may be used to compare to generated urls + """return the normalized path of the request (ie at least relative to + the instance's root, but some other normalization may be needed so that + the returned path may be used to compare to generated urls :param includeparams: boolean indicating if GET form parameters should be kept in the path @@ -68,8 +67,8 @@ return path def get_header(self, header, default=None, raw=True): - """return the value associated with the given input header, - raise KeyError if the header is not set + """return the value associated with the given input header, raise + KeyError if the header is not set """ if raw: return self._headers_in.getRawHeaders(header, [default])[0] diff -r 00b1b6b906cf -r 97c55baefa0c etwist/server.py --- a/etwist/server.py Thu Jul 15 12:03:13 2010 +0200 +++ b/etwist/server.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""twisted server for CubicWeb web instances +"""twisted server for CubicWeb web instances""" -""" __docformat__ = "restructuredtext en" import sys @@ -39,11 +38,11 @@ from twisted.web import static, resource from twisted.web.server import NOT_DONE_YET -from cubicweb.web import dumps from logilab.common.decorators import monkeypatch from cubicweb import AuthenticationError, ConfigurationError, CW_EVENT_MANAGER +from cubicweb.utils import json_dumps from cubicweb.web import Redirect, DirectResponse, StatusResponse, LogOut from cubicweb.web.application import CubicWebPublisher from cubicweb.web.http_headers import generateDateTime @@ -99,12 +98,11 @@ class CubicWebRootResource(resource.Resource): - def __init__(self, config, debug=None): - self.debugmode = debug + def __init__(self, config, vreg=None): self.config = config # instantiate publisher here and not in init_publisher to get some # checks done before daemonization (eg versions consistency) - self.appli = CubicWebPublisher(config, debug=self.debugmode) + self.appli = CubicWebPublisher(config, vreg=vreg) self.base_url = config['base-url'] self.https_url = config['https-url'] self.children = {} @@ -118,8 +116,6 @@ # when we have an in-memory repository, clean unused sessions every XX # seconds and properly shutdown the server if config.repo_method == 'inmemory': - reactor.addSystemEventTrigger('before', 'shutdown', - self.shutdown_event) if config.pyro_enabled(): # if pyro is enabled, we have to register to the pyro name # server, create a pyro daemon, and create a task to handle pyro @@ -127,7 +123,10 @@ self.pyro_daemon = self.appli.repo.pyro_register() self.pyro_listen_timeout = 0.02 self.appli.repo.looping_task(1, self.pyro_loop_event) - self.appli.repo.start_looping_tasks() + if config.mode != 'test': + reactor.addSystemEventTrigger('before', 'shutdown', + self.shutdown_event) + self.appli.repo.start_looping_tasks() self.set_url_rewriter() CW_EVENT_MANAGER.bind('after-registry-reload', self.set_url_rewriter) @@ -156,6 +155,9 @@ pre_path = request.path.split('/')[1:] if pre_path[0] == 'https': pre_path.pop(0) + uiprops = self.config.https_uiprops + else: + uiprops = self.config.uiprops directory = pre_path[0] # Anything in data/, static/, fckeditor/ and the generated versioned # data directory is treated as static files @@ -165,7 +167,7 @@ if directory == 'static': return File(self.config.static_directory) if directory == 'fckeditor': - return File(self.config.ext_resources['FCKEDITOR_PATH']) + return File(uiprops['FCKEDITOR_PATH']) if directory != 'data': # versioned directory, use specific file with http cache # headers so their are cached for a very long time @@ -173,10 +175,10 @@ else: cls = File if path == 'fckeditor': - return cls(self.config.ext_resources['FCKEDITOR_PATH']) + return cls(uiprops['FCKEDITOR_PATH']) if path == directory: # recurse return self - datadir = self.config.locate_resource(path) + datadir, path = self.config.locate_resource(path) if datadir is None: return self # recurse self.debug('static file %s from %s', path, datadir) @@ -187,7 +189,10 @@ def render(self, request): """Render a page from the root resource""" # reload modified files in debug mode - if self.debugmode: + if self.config.debugmode: + self.config.uiprops.reload_if_needed() + if self.https_url: + self.config.https_uiprops.reload_if_needed() self.appli.vreg.reload_if_needed() if self.config['profile']: # default profiler don't trace threads return self.render_request(request) @@ -312,12 +317,12 @@ self.setResponseCode(http.BAD_REQUEST) if path in JSON_PATHS: # XXX better json path detection self.setHeader('content-type',"application/json") - body = dumps({'reason': 'request max size exceeded'}) + body = json_dumps({'reason': 'request max size exceeded'}) elif path in FRAME_POST_PATHS: # XXX better frame post path detection self.setHeader('content-type',"text/html") body = ('' % dumps( (False, 'request max size exceeded', None) )) + '' % json_dumps( (False, 'request max size exceeded', None) )) else: self.setHeader('content-type',"text/html") body = ("Processing Failed" @@ -394,20 +399,22 @@ LOGGER = getLogger('cubicweb.twisted') set_log_methods(CubicWebRootResource, LOGGER) -def run(config, debug): +def run(config, vreg=None, debug=None): + if debug is not None: + config.debugmode = debug + config.check_writeable_uid_directory(config.appdatahome) # create the site - root_resource = CubicWebRootResource(config, debug) + root_resource = CubicWebRootResource(config, vreg=vreg) website = server.Site(root_resource) # serve it via standard HTTP on port set in the configuration port = config['port'] or 8080 reactor.listenTCP(port, website) - logger = getLogger('cubicweb.twisted') - if not debug: + if not config.debugmode: if sys.platform == 'win32': raise ConfigurationError("Under windows, you must use the service management " "commands (e.g : 'net start my_instance)'") from logilab.common.daemon import daemonize - print 'instance starting in the background' + LOGGER.info('instance started in the background on %s', root_resource.base_url) if daemonize(config['pid-file']): return # child process root_resource.init_publisher() # before changing uid @@ -419,7 +426,7 @@ uid = getpwnam(config['uid']).pw_uid os.setuid(uid) root_resource.start_service() - logger.info('instance started on %s', root_resource.base_url) + LOGGER.info('instance started on %s', root_resource.base_url) # avoid annoying warnign if not in Main Thread signals = threading.currentThread().getName() == 'MainThread' if config['profile']: diff -r 00b1b6b906cf -r 97c55baefa0c etwist/twctl.py --- a/etwist/twctl.py Thu Jul 15 12:03:13 2010 +0200 +++ b/etwist/twctl.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,9 +15,7 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""cubicweb-clt handlers for twisted - -""" +"""cubicweb-clt handlers for twisted""" from cubicweb.toolsutils import CommandHandler from cubicweb.web.webctl import WebCreateHandler @@ -32,9 +30,9 @@ cmdname = 'start' cfgname = 'twisted' - def start_server(self, config, debug): + def start_server(self, config): from cubicweb.etwist import server - server.run(config, debug) + server.run(config) class TWStopHandler(CommandHandler): cmdname = 'stop' diff -r 00b1b6b906cf -r 97c55baefa0c goa/appobjects/components.py --- a/goa/appobjects/components.py Thu Jul 15 12:03:13 2010 +0200 +++ b/goa/appobjects/components.py Mon Jul 19 15:37:02 2010 +0200 @@ -98,7 +98,7 @@ def sendmail(self, recipient, subject, body): sender = '%s <%s>' % ( self.req.user.dc_title() or self.config['sender-name'], - self.req.user.get_email() or self.config['sender-addr']) + self.req.user.cw_adapt_to('IEmailable').get_email() or self.config['sender-addr']) mail.send_mail(sender=sender, to=recipient, subject=subject, body=body) diff -r 00b1b6b906cf -r 97c55baefa0c goa/db.py --- a/goa/db.py Thu Jul 15 12:03:13 2010 +0200 +++ b/goa/db.py Mon Jul 19 15:37:02 2010 +0200 @@ -233,7 +233,7 @@ return self.req.datastore_get(self.eid) except AttributeError: # self.req is not a server session return Get(self.eid) - self.set_defaults() + self._cw_set_defaults() values = self._to_gae_dict(convert=False) parent = key_name = _app = None if self._gaeinitargs is not None: @@ -343,7 +343,7 @@ self.req = req dbmodel = self.to_gae_model() key = Put(dbmodel) - self.set_eid(str(key)) + self.eid = str(key) if self.req is not None and self.rset is None: self.rset = rset_from_objs(self.req, dbmodel, ('eid',), 'Any X WHERE X eid %(x)s', {'x': self.eid}) @@ -409,7 +409,7 @@ def dynamic_properties(self): raise NotImplementedError('use eschema') - def is_saved(self): + def cw_is_saved(self): return self.has_eid() def parent(self): diff -r 00b1b6b906cf -r 97c55baefa0c goa/gaesource.py --- a/goa/gaesource.py Thu Jul 15 12:03:13 2010 +0200 +++ b/goa/gaesource.py Mon Jul 19 15:37:02 2010 +0200 @@ -49,15 +49,15 @@ except KeyError: pass else: - entity.clear_related_cache(rtype, role) + entity.cw_clear_relation_cache(rtype, role) if gaesubject.kind() == 'CWUser': for asession in session.repo._sessions.itervalues(): if asession.user.eid == subject: - asession.user.clear_related_cache(rtype, 'subject') + asession.user.cw_clear_relation_cache(rtype, 'subject') if gaeobject.kind() == 'CWUser': for asession in session.repo._sessions.itervalues(): if asession.user.eid == object: - asession.user.clear_related_cache(rtype, 'object') + asession.user.cw_clear_relation_cache(rtype, 'object') def _mark_modified(session, gaeentity): modified = session.transaction_data.setdefault('modifiedentities', {}) diff -r 00b1b6b906cf -r 97c55baefa0c goa/skel/loader.py --- a/goa/skel/loader.py Thu Jul 15 12:03:13 2010 +0200 +++ b/goa/skel/loader.py Mon Jul 19 15:37:02 2010 +0200 @@ -30,7 +30,7 @@ # apply monkey patches first goa.do_monkey_patch() # get instance's configuration (will be loaded from app.conf file) - GAEConfiguration.ext_resources['JAVASCRIPTS'].append('DATADIR/goa.js') + GAEConfiguration.uiprops['JAVASCRIPTS'].append('DATADIR/goa.js') config = GAEConfiguration('toto', APPLROOT) # create default groups create_groups() diff -r 00b1b6b906cf -r 97c55baefa0c goa/skel/main.py --- a/goa/skel/main.py Thu Jul 15 12:03:13 2010 +0200 +++ b/goa/skel/main.py Mon Jul 19 15:37:02 2010 +0200 @@ -31,7 +31,7 @@ # get instance's configuration (will be loaded from app.conf file) from cubicweb.goa.goaconfig import GAEConfiguration -GAEConfiguration.ext_resources['JAVASCRIPTS'].append('DATADIR/goa.js') +GAEConfiguration.uiprops['JAVASCRIPTS'].append('DATADIR/goa.js') config = GAEConfiguration('toto', APPLROOT) # dynamic objects registry diff -r 00b1b6b906cf -r 97c55baefa0c goa/test/unittest_rql.py --- a/goa/test/unittest_rql.py Thu Jul 15 12:03:13 2010 +0200 +++ b/goa/test/unittest_rql.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,9 +15,6 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -""" - -""" from cubicweb.goa.testlib import * from cubicweb import Binary @@ -612,7 +609,7 @@ def test_error_unknown_eid(self): rset = self.req.execute('Any X WHERE X eid %(x)s', {'x': '1234'}) self.assertEquals(len(rset), 0) - self.blog.delete() + self.blog.cw_delete() rset = self.req.execute('Any X WHERE X eid %(x)s', {'x': self.blog.eid}) self.assertEquals(len(rset), 0) diff -r 00b1b6b906cf -r 97c55baefa0c goa/tools/laxctl.py --- a/goa/tools/laxctl.py Thu Jul 15 12:03:13 2010 +0200 +++ b/goa/tools/laxctl.py Mon Jul 19 15:37:02 2010 +0200 @@ -43,7 +43,7 @@ do_monkey_patch() from cubicweb.goa.goavreg import GAEVregistry from cubicweb.goa.goaconfig import GAEConfiguration - #WebConfiguration.ext_resources['JAVASCRIPTS'].append('DATADIR/goa.js') + #WebConfiguration.uiprops['JAVASCRIPTS'].append('DATADIR/goa.js') config = GAEConfiguration('toto', applroot) vreg = GAEVregistry(config) vreg.set_schema(config.load_schema()) diff -r 00b1b6b906cf -r 97c55baefa0c hooks/bookmark.py --- a/hooks/bookmark.py Thu Jul 15 12:03:13 2010 +0200 +++ b/hooks/bookmark.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""bookmark related hooks +"""bookmark related hooks""" -""" __docformat__ = "restructuredtext en" from cubicweb.server import hook @@ -28,7 +27,7 @@ def precommit_event(self): if not self.session.deleted_in_transaction(self.bookmark.eid): if not self.bookmark.bookmarked_by: - self.bookmark.delete() + self.bookmark.cw_delete() class DelBookmarkedByHook(hook.Hook): diff -r 00b1b6b906cf -r 97c55baefa0c hooks/integrity.py --- a/hooks/integrity.py Thu Jul 15 12:03:13 2010 +0200 +++ b/hooks/integrity.py Mon Jul 19 15:37:02 2010 +0200 @@ -27,7 +27,7 @@ from cubicweb import ValidationError from cubicweb.schema import RQLConstraint, RQLUniqueConstraint -from cubicweb.selectors import implements +from cubicweb.selectors import is_instance from cubicweb.uilib import soup2xhtml from cubicweb.server import hook from cubicweb.server.hook import set_operation @@ -253,7 +253,7 @@ """delete the composed of a composite relation when this relation is deleted """ __regid__ = 'checkownersgroup' - __select__ = IntegrityHook.__select__ & implements('CWGroup') + __select__ = IntegrityHook.__select__ & is_instance('CWGroup') events = ('before_delete_entity', 'before_update_entity') def __call__(self): @@ -293,7 +293,7 @@ class StripCWUserLoginHook(IntegrityHook): """ensure user logins are stripped""" __regid__ = 'stripuserlogin' - __select__ = IntegrityHook.__select__ & implements('CWUser') + __select__ = IntegrityHook.__select__ & is_instance('CWUser') events = ('before_add_entity', 'before_update_entity',) def __call__(self): diff -r 00b1b6b906cf -r 97c55baefa0c hooks/metadata.py --- a/hooks/metadata.py Thu Jul 15 12:03:13 2010 +0200 +++ b/hooks/metadata.py Mon Jul 19 15:37:02 2010 +0200 @@ -21,7 +21,7 @@ from datetime import datetime -from cubicweb.selectors import implements +from cubicweb.selectors import is_instance from cubicweb.server import hook from cubicweb.server.utils import eschema_eid @@ -140,7 +140,7 @@ class FixUserOwnershipHook(MetaDataHook): """when a user has been created, add owned_by relation on itself""" __regid__ = 'fixuserowner' - __select__ = MetaDataHook.__select__ & implements('CWUser') + __select__ = MetaDataHook.__select__ & is_instance('CWUser') events = ('after_add_entity',) def __call__(self): diff -r 00b1b6b906cf -r 97c55baefa0c hooks/notification.py --- a/hooks/notification.py Thu Jul 15 12:03:13 2010 +0200 +++ b/hooks/notification.py Mon Jul 19 15:37:02 2010 +0200 @@ -22,7 +22,7 @@ from logilab.common.textutils import normalize_text -from cubicweb.selectors import implements +from cubicweb.selectors import is_instance from cubicweb.server import hook from cubicweb.sobjects.supervising import SupervisionMailOp @@ -49,7 +49,7 @@ class StatusChangeHook(NotificationHook): """notify when a workflowable entity has its state modified""" __regid__ = 'notifystatuschange' - __select__ = NotificationHook.__select__ & implements('TrInfo') + __select__ = NotificationHook.__select__ & is_instance('TrInfo') events = ('after_add_entity',) def __call__(self): diff -r 00b1b6b906cf -r 97c55baefa0c hooks/security.py --- a/hooks/security.py Thu Jul 15 12:03:13 2010 +0200 +++ b/hooks/security.py Mon Jul 19 15:37:02 2010 +0200 @@ -29,9 +29,9 @@ def check_entity_attributes(session, entity, editedattrs=None, creation=False): eid = entity.eid eschema = entity.e_schema - # .skip_security_attributes is there to bypass security for attributes + # ._cw_skip_security_attributes is there to bypass security for attributes # set by hooks by modifying the entity's dictionnary - dontcheck = entity.skip_security_attributes + dontcheck = entity._cw_skip_security_attributes if editedattrs is None: try: editedattrs = entity.edited_attributes @@ -59,7 +59,7 @@ for values in session.transaction_data.pop('check_entity_perm_op'): entity = session.entity_from_eid(values[0]) action = values[1] - entity.check_perm(action) + entity.cw_check_perm(action) check_entity_attributes(session, entity, values[2:], creation=self.creation) @@ -110,10 +110,10 @@ def __call__(self): try: # check user has permission right now, if not retry at commit time - self.entity.check_perm('update') + self.entity.cw_check_perm('update') check_entity_attributes(self._cw, self.entity) except Unauthorized: - self.entity.clear_local_perm_cache('update') + self.entity._cw_clear_local_perm_cache('update') # save back editedattrs in case the entity is reedited later in the # same transaction, which will lead to edited_attributes being # overwritten @@ -127,7 +127,7 @@ events = ('before_delete_entity',) def __call__(self): - self.entity.check_perm('delete') + self.entity.cw_check_perm('delete') class BeforeAddRelationSecurityHook(SecurityHook): diff -r 00b1b6b906cf -r 97c55baefa0c hooks/syncschema.py --- a/hooks/syncschema.py Thu Jul 15 12:03:13 2010 +0200 +++ b/hooks/syncschema.py Mon Jul 19 15:37:02 2010 +0200 @@ -33,8 +33,9 @@ from logilab.common.testlib import mock_object from cubicweb import ValidationError -from cubicweb.selectors import implements -from cubicweb.schema import META_RTYPES, VIRTUAL_RTYPES, CONSTRAINTS, display_name +from cubicweb.selectors import is_instance +from cubicweb.schema import (META_RTYPES, VIRTUAL_RTYPES, CONSTRAINTS, + ETYPE_NAME_MAP, display_name) from cubicweb.server import hook, schemaserial as ss from cubicweb.server.sqlutils import SQL_PREFIX @@ -80,6 +81,11 @@ def add_inline_relation_column(session, etype, rtype): """add necessary column and index for an inlined relation""" + attrkey = '%s.%s' % (etype, rtype) + createdattrs = session.transaction_data.setdefault('createdattrs', set()) + if attrkey in createdattrs: + return + createdattrs.add(attrkey) table = SQL_PREFIX + etype column = SQL_PREFIX + rtype try: @@ -96,8 +102,6 @@ # is done by the dbhelper) session.pool.source('system').create_index(session, table, column) session.info('added index on %s(%s)', table, column) - session.transaction_data.setdefault('createdattrs', []).append( - '%s.%s' % (etype, rtype)) def check_valid_changes(session, entity, ro_attrs=('name', 'final')): @@ -115,6 +119,14 @@ raise ValidationError(entity.eid, errors) +class SyncSchemaHook(hook.Hook): + """abstract class for schema synchronization hooks (in the `syncschema` + category) + """ + __abstract__ = True + category = 'syncschema' + + # operations for low-level database alteration ################################ class DropTable(hook.Operation): @@ -129,6 +141,8 @@ self.session.system_sql('DROP TABLE %s' % self.table) self.info('dropped table %s', self.table) + # XXX revertprecommit_event + class DropRelationTable(DropTable): def __init__(self, session, rtype): @@ -156,6 +170,8 @@ self.error('dropping column not supported by the backend, handle ' 'it yourself (%s.%s)', table, column) + # XXX revertprecommit_event + # base operations for in-memory schema synchronization ######################## @@ -175,7 +191,7 @@ if not eschema.final: clear_cache(eschema, 'ordered_relations') - def commit_event(self): + def postcommit_event(self): rebuildinfered = self.session.data.get('rebuild-infered', True) repo = self.session.repo # commit event should not raise error, while set_schema has chances to @@ -195,60 +211,88 @@ class MemSchemaOperation(hook.Operation): """base class for schema operations""" - def __init__(self, session, kobj=None, **kwargs): - self.kobj = kobj - # once Operation.__init__ has been called, event may be triggered, so - # do this last ! + def __init__(self, session, **kwargs): hook.Operation.__init__(self, session, **kwargs) # every schema operation is triggering a schema update MemSchemaNotifyChanges(session) - def prepare_constraints(self, rdef): - # if constraints is already a list, reuse it (we're updating multiple - # constraints of the same rdef in the same transactions) - if not isinstance(rdef.constraints, list): - rdef.constraints = list(rdef.constraints) - self.constraints = rdef.constraints - - -class MemSchemaEarlyOperation(MemSchemaOperation): - def insert_index(self): - """schema operation which are inserted at the begining of the queue - (typically to add/remove entity or relation types) - """ - i = -1 - for i, op in enumerate(self.session.pending_operations): - if not isinstance(op, MemSchemaEarlyOperation): - return i - return i + 1 - # operations for high-level source database alteration ######################## -class SourceDbCWETypeRename(hook.Operation): +class CWETypeAddOp(MemSchemaOperation): + """after adding a CWEType entity: + * add it to the instance's schema + * create the necessary table + * set creation_date and modification_date by creating the necessary + CWAttribute entities + * add owned_by relation by creating the necessary CWRelation entity + """ + + def precommit_event(self): + session = self.session + entity = self.entity + schema = session.vreg.schema + etype = ybo.EntityType(eid=entity.eid, name=entity.name, + description=entity.description) + eschema = schema.add_entity_type(etype) + # create the necessary table + tablesql = y2sql.eschema2sql(session.pool.source('system').dbhelper, + eschema, prefix=SQL_PREFIX) + for sql in tablesql.split(';'): + if sql.strip(): + session.system_sql(sql) + # add meta relations + gmap = group_mapping(session) + cmap = ss.cstrtype_mapping(session) + for rtype in (META_RTYPES - VIRTUAL_RTYPES): + rschema = schema[rtype] + sampletype = rschema.subjects()[0] + desttype = rschema.objects()[0] + rdef = copy(rschema.rdef(sampletype, desttype)) + rdef.subject = mock_object(eid=entity.eid) + mock = mock_object(eid=None) + ss.execschemarql(session.execute, mock, ss.rdef2rql(rdef, cmap, gmap)) + + def revertprecommit_event(self): + # revert changes on in memory schema + self.session.vreg.schema.del_entity_type(self.entity.name) + # revert changes on database + self.session.system_sql('DROP TABLE %s%s' % (SQL_PREFIX, self.entity.name)) + + +class CWETypeRenameOp(MemSchemaOperation): """this operation updates physical storage accordingly""" oldname = newname = None # make pylint happy - def precommit_event(self): + def rename(self, oldname, newname): + self.session.vreg.schema.rename_entity_type(oldname, newname) # we need sql to operate physical changes on the system database sqlexec = self.session.system_sql - sqlexec('ALTER TABLE %s%s RENAME TO %s%s' % (SQL_PREFIX, self.oldname, - SQL_PREFIX, self.newname)) - self.info('renamed table %s to %s', self.oldname, self.newname) + sqlexec('ALTER TABLE %s%s RENAME TO %s%s' % (SQL_PREFIX, oldname, + SQL_PREFIX, newname)) + self.info('renamed table %s to %s', oldname, newname) sqlexec('UPDATE entities SET type=%s WHERE type=%s', - (self.newname, self.oldname)) + (newname, oldname)) sqlexec('UPDATE deleted_entities SET type=%s WHERE type=%s', - (self.newname, self.oldname)) + (newname, oldname)) + # XXX transaction records + + def precommit_event(self): + self.rename(self.oldname, self.newname) + + def revertprecommit_event(self): + self.rename(self.newname, self.oldname) -class SourceDbCWRTypeUpdate(hook.Operation): +class CWRTypeUpdateOp(MemSchemaOperation): """actually update some properties of a relation definition""" rschema = entity = values = None # make pylint happy + oldvalus = None def precommit_event(self): rschema = self.rschema if rschema.final: - return + return # watched changes to final relation type are unexpected session = self.session if 'fulltext_container' in self.values: for subjtype, objtype in rschema.rdefs: @@ -256,10 +300,14 @@ UpdateFTIndexOp) hook.set_operation(session, 'fti_update_etypes', objtype, UpdateFTIndexOp) + # update the in-memory schema first + self.oldvalues = dict( (attr, getattr(rschema, attr)) for attr in self.values) + self.rschema.__dict__.update(self.values) + # then make necessary changes to the system source database if not 'inlined' in self.values: return # nothing to do inlined = self.values['inlined'] - # check in-lining is necessary / possible + # check in-lining is possible when inlined if inlined: self.entity.check_inlined_allowed() # inlined changed, make necessary physical changes! @@ -295,7 +343,7 @@ except Exception, ex: # the column probably already exists. this occurs when the # entity's type has just been added or if the column has not - # been previously dropped + # been previously dropped (eg sqlite) self.error('error while altering table %s: %s', etype, ex) # copy existant data. # XXX don't use, it's not supported by sqlite (at least at when i tried it) @@ -315,8 +363,13 @@ # drop existant table DropRelationTable(session, rtype) + def revertprecommit_event(self): + # revert changes on in memory schema + self.rschema.__dict__.update(self.oldvalues) + # XXX revert changes on database -class SourceDbCWAttributeAdd(hook.Operation): + +class CWAttributeAddOp(MemSchemaOperation): """an attribute relation (CWAttribute) has been added: * add the necessary column * set default on this column if any and possible @@ -330,24 +383,18 @@ def init_rdef(self, **kwargs): entity = self.entity fromentity = entity.stype + rdefdef = self.rdefdef = ybo.RelationDefinition( + str(fromentity.name), entity.rtype.name, str(entity.otype.name), + description=entity.description, cardinality=entity.cardinality, + constraints=get_constraints(self.session, entity), + order=entity.ordernum, eid=entity.eid, **kwargs) + self.session.vreg.schema.add_relation_def(rdefdef) self.session.execute('SET X ordernum Y+1 ' 'WHERE X from_entity SE, SE eid %(se)s, X ordernum Y, ' 'X ordernum >= %(order)s, NOT X eid %(x)s', {'x': entity.eid, 'se': fromentity.eid, 'order': entity.ordernum or 0}) - subj = str(fromentity.name) - rtype = entity.rtype.name - obj = str(entity.otype.name) - constraints = get_constraints(self.session, entity) - rdef = ybo.RelationDefinition(subj, rtype, obj, - description=entity.description, - cardinality=entity.cardinality, - constraints=constraints, - order=entity.ordernum, - eid=entity.eid, - **kwargs) - MemSchemaRDefAdd(self.session, rdef) - return rdef + return rdefdef def precommit_event(self): session = self.session @@ -361,22 +408,24 @@ 'indexed': entity.indexed, 'fulltextindexed': entity.fulltextindexed, 'internationalizable': entity.internationalizable} - rdef = self.init_rdef(**props) - sysource = session.pool.source('system') + # update the in-memory schema first + rdefdef = self.init_rdef(**props) + # then make necessary changes to the system source database + syssource = session.pool.source('system') attrtype = y2sql.type_from_constraints( - sysource.dbhelper, rdef.object, rdef.constraints) + syssource.dbhelper, rdefdef.object, rdefdef.constraints) # XXX should be moved somehow into lgdb: sqlite doesn't support to # add a new column with UNIQUE, it should be added after the ALTER TABLE # using ADD INDEX - if sysource.dbdriver == 'sqlite' and 'UNIQUE' in attrtype: + if syssource.dbdriver == 'sqlite' and 'UNIQUE' in attrtype: extra_unique_index = True attrtype = attrtype.replace(' UNIQUE', '') else: extra_unique_index = False # added some str() wrapping query since some backend (eg psycopg) don't # allow unicode queries - table = SQL_PREFIX + rdef.subject - column = SQL_PREFIX + rdef.name + table = SQL_PREFIX + rdefdef.subject + column = SQL_PREFIX + rdefdef.name try: session.system_sql(str('ALTER TABLE %s ADD %s %s' % (table, column, attrtype)), @@ -389,7 +438,7 @@ self.error('error while altering table %s: %s', table, ex) if extra_unique_index or entity.indexed: try: - sysource.create_index(session, table, column, + syssource.create_index(session, table, column, unique=extra_unique_index) except Exception, ex: self.error('error while creating index for %s.%s: %s', @@ -397,28 +446,28 @@ # final relations are not infered, propagate schema = session.vreg.schema try: - eschema = schema.eschema(rdef.subject) + eschema = schema.eschema(rdefdef.subject) except KeyError: return # entity type currently being added # propagate attribute to children classes - rschema = schema.rschema(rdef.name) + rschema = schema.rschema(rdefdef.name) # if relation type has been inserted in the same transaction, its final # attribute is still set to False, so we've to ensure it's False rschema.final = True # XXX 'infered': True/False, not clear actually - props.update({'constraints': rdef.constraints, - 'description': rdef.description, - 'cardinality': rdef.cardinality, - 'constraints': rdef.constraints, - 'permissions': rdef.get_permissions(), - 'order': rdef.order, + props.update({'constraints': rdefdef.constraints, + 'description': rdefdef.description, + 'cardinality': rdefdef.cardinality, + 'constraints': rdefdef.constraints, + 'permissions': rdefdef.get_permissions(), + 'order': rdefdef.order, 'infered': False, 'eid': None }) cstrtypemap = ss.cstrtype_mapping(session) groupmap = group_mapping(session) - object = schema.eschema(rdef.object) + object = schema.eschema(rdefdef.object) for specialization in eschema.specialized_by(False): - if (specialization, rdef.object) in rschema.rdefs: + if (specialization, rdefdef.object) in rschema.rdefs: continue sperdef = RelationDefinitionSchema(specialization, rschema, object, props) @@ -430,14 +479,21 @@ session.system_sql('UPDATE %s SET %s=%%(default)s' % (table, column), {'default': default}) + def revertprecommit_event(self): + # revert changes on in memory schema + self.session.vreg.schema.del_relation_def( + self.rdefdef.subject, self.rdefdef.name, self.rdefdef.object) + # XXX revert changes on database -class SourceDbCWRelationAdd(SourceDbCWAttributeAdd): + +class CWRelationAddOp(CWAttributeAddOp): """an actual relation has been added: - * if this is an inlined relation, add the necessary column - else if it's the first instance of this relation type, add the - necessary table and set default permissions - * register an operation to add the relation definition to the - instance's schema on commit + + * add the relation definition to the instance's schema + + * if this is an inlined relation, add the necessary column else if it's the + first instance of this relation type, add the necessary table and set + default permissions constraints are handled by specific hooks """ @@ -446,280 +502,229 @@ def precommit_event(self): session = self.session entity = self.entity - rdef = self.init_rdef(composite=entity.composite) + # update the in-memory schema first + rdefdef = self.init_rdef(composite=entity.composite) + # then make necessary changes to the system source database schema = session.vreg.schema - rtype = rdef.name + rtype = rdefdef.name rschema = schema.rschema(rtype) # this have to be done before permissions setting if rschema.inlined: # need to add a column if the relation is inlined and if this is the # first occurence of "Subject relation Something" whatever Something - # and if it has not been added during other event of the same - # transaction - key = '%s.%s' % (rdef.subject, rtype) - try: - alreadythere = bool(rschema.objects(rdef.subject)) - except KeyError: - alreadythere = False - if not (alreadythere or - key in session.transaction_data.get('createdattrs', ())): - add_inline_relation_column(session, rdef.subject, rtype) + if len(rschema.objects(rdefdef.subject)) == 1: + add_inline_relation_column(session, rdefdef.subject, rtype) else: # need to create the relation if no relation definition in the # schema and if it has not been added during other event of the same # transaction - if not (rschema.subjects() or + if not (len(rschema.rdefs) > 1 or rtype in session.transaction_data.get('createdtables', ())): - try: - rschema = schema.rschema(rtype) - tablesql = y2sql.rschema2sql(rschema) - except KeyError: - # fake we add it to the schema now to get a correctly - # initialized schema but remove it before doing anything - # more dangerous... - rschema = schema.add_relation_type(rdef) - tablesql = y2sql.rschema2sql(rschema) - schema.del_relation_type(rtype) + rschema = schema.rschema(rtype) # create the necessary table - for sql in tablesql.split(';'): + for sql in y2sql.rschema2sql(rschema).split(';'): if sql.strip(): session.system_sql(sql) session.transaction_data.setdefault('createdtables', []).append( rtype) + # XXX revertprecommit_event -class SourceDbRDefUpdate(hook.Operation): + +class RDefDelOp(MemSchemaOperation): + """an actual relation has been removed""" + rdef = None # make pylint happy + + def precommit_event(self): + session = self.session + rdef = self.rdef + rschema = rdef.rtype + # make necessary changes to the system source database first + rdeftype = rschema.final and 'CWAttribute' or 'CWRelation' + execute = session.execute + rset = execute('Any COUNT(X) WHERE X is %s, X relation_type R,' + 'R eid %%(x)s' % rdeftype, {'x': rschema.eid}) + lastrel = rset[0][0] == 0 + # we have to update physical schema systematically for final and inlined + # relations, but only if it's the last instance for this relation type + # for other relations + if (rschema.final or rschema.inlined): + rset = execute('Any COUNT(X) WHERE X is %s, X relation_type R, ' + 'R eid %%(r)s, X from_entity E, E eid %%(e)s' + % rdeftype, + {'r': rschema.eid, 'e': rdef.subject.eid}) + if rset[0][0] == 0 and not session.deleted_in_transaction(rdef.subject.eid): + ptypes = session.transaction_data.setdefault('pendingrtypes', set()) + ptypes.add(rschema.type) + DropColumn(session, table=SQL_PREFIX + str(rdef.subject), + column=SQL_PREFIX + str(rschema)) + elif lastrel: + DropRelationTable(session, str(rschema)) + # then update the in-memory schema + rschema.del_relation_def(rdef.subject, rdef.object) + # if this is the last relation definition of this type, drop associated + # relation type + if lastrel and not session.deleted_in_transaction(rschema.eid): + execute('DELETE CWRType X WHERE X eid %(x)s', {'x': rschema.eid}) + + def revertprecommit_event(self): + # revert changes on in memory schema + # + # Note: add_relation_def takes a RelationDefinition, not a + # RelationDefinitionSchema, needs to fake it + self.rdef.name = str(self.rdef.rtype) + self.session.vreg.schema.add_relation_def(self.rdef) + + + +class RDefUpdateOp(MemSchemaOperation): """actually update some properties of a relation definition""" - rschema = values = None # make pylint happy + rschema = rdefkey = values = None # make pylint happy + oldvalues = None + indexed_changed = null_allowed_changed = False def precommit_event(self): session = self.session - etype = self.kobj[0] - table = SQL_PREFIX + etype - column = SQL_PREFIX + self.rschema.type + rdef = self.rdef = self.rschema.rdefs[self.rdefkey] + # update the in-memory schema first + self.oldvalues = dict( (attr, getattr(rdef, attr)) for attr in self.values) + rdef.update(self.values) + # then make necessary changes to the system source database + syssource = session.pool.source('system') if 'indexed' in self.values: - sysource = session.pool.source('system') - if self.values['indexed']: - sysource.create_index(session, table, column) - else: - sysource.drop_index(session, table, column) - if 'cardinality' in self.values and self.rschema.final: - syssource = session.pool.source('system') - if not syssource.dbhelper.alter_column_support: - # not supported (and NOT NULL not set by yams in that case, so - # no worry) XXX (syt) then should we set NOT NULL below ?? - return - atype = self.rschema.objects(etype)[0] - constraints = self.rschema.rdef(etype, atype).constraints - coltype = y2sql.type_from_constraints(syssource.dbhelper, atype, constraints, - creating=False) - # XXX check self.values['cardinality'][0] actually changed? - syssource.set_null_allowed(self.session, table, column, coltype, - self.values['cardinality'][0] != '1') + syssource.update_rdef_indexed(session, rdef) + self.indexed_changed = True + if 'cardinality' in self.values and (rdef.rtype.final or + rdef.rtype.inlined) \ + and self.values['cardinality'][0] != self.oldvalues['cardinality'][0]: + syssource.update_rdef_null_allowed(self.session, rdef) + self.null_allowed_changed = True if 'fulltextindexed' in self.values: - hook.set_operation(session, 'fti_update_etypes', etype, + hook.set_operation(session, 'fti_update_etypes', rdef.subject, UpdateFTIndexOp) + def revertprecommit_event(self): + # revert changes on in memory schema + self.rdef.update(self.oldvalues) + # revert changes on database + syssource = self.session.pool.source('system') + if self.indexed_changed: + syssource.update_rdef_indexed(self.session, self.rdef) + if self.null_allowed_changed: + syssource.update_rdef_null_allowed(self.session, self.rdef) -class SourceDbCWConstraintAdd(hook.Operation): + +def _set_modifiable_constraints(rdef): + # for proper in-place modification of in-memory schema: if rdef.constraints + # is already a list, reuse it (we're updating multiple constraints of the + # same rdef in the same transactions) + if not isinstance(rdef.constraints, list): + rdef.constraints = list(rdef.constraints) + + +class CWConstraintDelOp(MemSchemaOperation): + """actually remove a constraint of a relation definition""" + rdef = oldcstr = newcstr = None # make pylint happy + size_cstr_changed = unique_changed = False + + def precommit_event(self): + session = self.session + rdef = self.rdef + # in-place modification of in-memory schema first + _set_modifiable_constraints(rdef) + rdef.constraints.remove(self.oldcstr) + # then update database: alter the physical schema on size/unique + # constraint changes + syssource = session.pool.source('system') + cstrtype = self.oldcstr.type() + if cstrtype == 'SizeConstraint': + syssource.update_rdef_column(session, rdef) + self.size_cstr_changed = True + elif cstrtype == 'UniqueConstraint': + syssource.update_rdef_unique(session, rdef) + self.unique_changed = True + + def revertprecommit_event(self): + # revert changes on in memory schema + if self.newcstr is not None: + self.rdef.constraints.remove(self.newcstr) + if self.oldcstr is not None: + self.rdef.constraints.append(self.oldcstr) + # revert changes on database + syssource = self.session.pool.source('system') + if self.size_cstr_changed: + syssource.update_rdef_column(self.session, self.rdef) + if self.unique_changed: + syssource.update_rdef_unique(self.session, self.rdef) + + +class CWConstraintAddOp(CWConstraintDelOp): """actually update constraint of a relation definition""" entity = None # make pylint happy - cancelled = False def precommit_event(self): - rdef = self.entity.reverse_constrained_by[0] session = self.session + rdefentity = self.entity.reverse_constrained_by[0] # when the relation is added in the same transaction, the constraint # object is created by the operation adding the attribute or relation, # so there is nothing to do here - if session.added_in_transaction(rdef.eid): + if session.added_in_transaction(rdefentity.eid): return - rdefschema = session.vreg.schema.schema_by_eid(rdef.eid) - subjtype, rtype, objtype = rdefschema.as_triple() + rdef = self.rdef = session.vreg.schema.schema_by_eid(rdefentity.eid) cstrtype = self.entity.type - oldcstr = rtype.rdef(subjtype, objtype).constraint_by_type(cstrtype) - newcstr = CONSTRAINTS[cstrtype].deserialize(self.entity.value) - table = SQL_PREFIX + str(subjtype) - column = SQL_PREFIX + str(rtype) - # alter the physical schema on size constraint changes - if newcstr.type() == 'SizeConstraint' and ( - oldcstr is None or oldcstr.max != newcstr.max): - syssource = self.session.pool.source('system') - card = rtype.rdef(subjtype, objtype).cardinality - coltype = y2sql.type_from_constraints(syssource.dbhelper, objtype, - [newcstr], creating=False) - try: - syssource.change_col_type(session, table, column, coltype, card[0] != '1') - self.info('altered column %s of table %s: now %s', - column, table, coltype) - except Exception, ex: - # not supported by sqlite for instance - self.error('error while altering table %s: %s', table, ex) + oldcstr = self.oldcstr = rdef.constraint_by_type(cstrtype) + newcstr = self.newcstr = CONSTRAINTS[cstrtype].deserialize(self.entity.value) + # in-place modification of in-memory schema first + _set_modifiable_constraints(rdef) + newcstr.eid = self.entity.eid + if oldcstr is not None: + rdef.constraints.remove(oldcstr) + rdef.constraints.append(newcstr) + # then update database: alter the physical schema on size/unique + # constraint changes + syssource = session.pool.source('system') + if cstrtype == 'SizeConstraint' and (oldcstr is None or + oldcstr.max != newcstr.max): + syssource.update_rdef_column(session, rdef) + self.size_cstr_changed = True elif cstrtype == 'UniqueConstraint' and oldcstr is None: - session.pool.source('system').create_index( - self.session, table, column, unique=True) - - -class SourceDbCWConstraintDel(hook.Operation): - """actually remove a constraint of a relation definition""" - rtype = subjtype = None # make pylint happy - - def precommit_event(self): - cstrtype = self.cstr.type() - table = SQL_PREFIX + str(self.rdef.subject) - column = SQL_PREFIX + str(self.rdef.rtype) - # alter the physical schema on size/unique constraint changes - if cstrtype == 'SizeConstraint': - syssource = self.session.pool.source('system') - coltype = y2sql.type_from_constraints(syssource.dbhelper, - self.rdef.object, [], - creating=False) - try: - syssource.change_col_type(session, table, column, coltype, - self.rdef.cardinality[0] != '1') - self.info('altered column %s of table %s: now %s', - column, table, coltype) - except Exception, ex: - # not supported by sqlite for instance - self.error('error while altering table %s: %s', table, ex) - elif cstrtype == 'UniqueConstraint': - self.session.pool.source('system').drop_index( - self.session, table, column, unique=True) + syssource.update_rdef_unique(session, rdef) + self.unique_changed = True # operations for in-memory schema synchronization ############################# -class MemSchemaCWETypeAdd(MemSchemaEarlyOperation): - """actually add the entity type to the instance's schema""" - eid = None # make pylint happy - def commit_event(self): - self.session.vreg.schema.add_entity_type(self.kobj) - - -class MemSchemaCWETypeRename(MemSchemaOperation): - """this operation updates physical storage accordingly""" - oldname = newname = None # make pylint happy - - def commit_event(self): - self.session.vreg.schema.rename_entity_type(self.oldname, self.newname) - - class MemSchemaCWETypeDel(MemSchemaOperation): """actually remove the entity type from the instance's schema""" - def commit_event(self): - try: - # del_entity_type also removes entity's relations - self.session.vreg.schema.del_entity_type(self.kobj) - except KeyError: - # s/o entity type have already been deleted - pass + def postcommit_event(self): + # del_entity_type also removes entity's relations + self.session.vreg.schema.del_entity_type(self.etype) -class MemSchemaCWRTypeAdd(MemSchemaEarlyOperation): +class MemSchemaCWRTypeAdd(MemSchemaOperation): """actually add the relation type to the instance's schema""" - eid = None # make pylint happy - def commit_event(self): - self.session.vreg.schema.add_relation_type(self.kobj) - + def precommit_event(self): + self.session.vreg.schema.add_relation_type(self.rtypedef) -class MemSchemaCWRTypeUpdate(MemSchemaOperation): - """actually update some properties of a relation definition""" - rschema = values = None # make pylint happy - - def commit_event(self): - # structure should be clean, not need to remove entity's relations - # at this point - self.rschema.__dict__.update(self.values) + def revertprecommit_event(self): + self.session.vreg.schema.del_relation_type(self.rtypedef.name) class MemSchemaCWRTypeDel(MemSchemaOperation): """actually remove the relation type from the instance's schema""" - def commit_event(self): + def postcommit_event(self): try: - self.session.vreg.schema.del_relation_type(self.kobj) + self.session.vreg.schema.del_relation_type(self.rtype) except KeyError: # s/o entity type have already been deleted pass -class MemSchemaRDefAdd(MemSchemaEarlyOperation): - """actually add the attribute relation definition to the instance's - schema - """ - def commit_event(self): - self.session.vreg.schema.add_relation_def(self.kobj) - - -class MemSchemaRDefUpdate(MemSchemaOperation): - """actually update some properties of a relation definition""" - rschema = values = None # make pylint happy - - def commit_event(self): - # structure should be clean, not need to remove entity's relations - # at this point - self.rschema.rdefs[self.kobj].update(self.values) - - -class MemSchemaRDefDel(MemSchemaOperation): - """actually remove the relation definition from the instance's schema""" - def commit_event(self): - subjtype, rtype, objtype = self.kobj - try: - self.session.vreg.schema.del_relation_def(subjtype, rtype, objtype) - except KeyError: - # relation type may have been already deleted - pass - - -class MemSchemaCWConstraintAdd(MemSchemaOperation): - """actually update constraint of a relation definition - - has to be called before SourceDbCWConstraintAdd - """ - cancelled = False - - def precommit_event(self): - rdef = self.entity.reverse_constrained_by[0] - # when the relation is added in the same transaction, the constraint - # object is created by the operation adding the attribute or relation, - # so there is nothing to do here - if self.session.added_in_transaction(rdef.eid): - self.cancelled = True - return - rdef = self.session.vreg.schema.schema_by_eid(rdef.eid) - self.prepare_constraints(rdef) - cstrtype = self.entity.type - self.cstr = rdef.constraint_by_type(cstrtype) - self.newcstr = CONSTRAINTS[cstrtype].deserialize(self.entity.value) - self.newcstr.eid = self.entity.eid - - def commit_event(self): - if self.cancelled: - return - # in-place modification - if not self.cstr is None: - self.constraints.remove(self.cstr) - self.constraints.append(self.newcstr) - - -class MemSchemaCWConstraintDel(MemSchemaOperation): - """actually remove a constraint of a relation definition - - has to be called before SourceDbCWConstraintDel - """ - rtype = subjtype = objtype = None # make pylint happy - def precommit_event(self): - self.prepare_constraints(self.rdef) - - def commit_event(self): - self.constraints.remove(self.cstr) - - class MemSchemaPermissionAdd(MemSchemaOperation): """synchronize schema when a *_permission relation has been added on a group """ - def commit_event(self): + def precommit_event(self): """the observed connections pool has been commited""" try: erschema = self.session.vreg.schema.schema_by_eid(self.eid) @@ -740,13 +745,15 @@ perms.append(perm) erschema.set_action_permissions(self.action, perms) + # XXX revertprecommit_event + class MemSchemaPermissionDel(MemSchemaPermissionAdd): """synchronize schema when a *_permission relation has been deleted from a group """ - def commit_event(self): + def precommit_event(self): """the observed connections pool has been commited""" try: erschema = self.session.vreg.schema.schema_by_eid(self.eid) @@ -771,19 +778,23 @@ self.error('can\'t remove permission %s for %s on %s', perm, self.action, erschema) + # XXX revertprecommit_event + class MemSchemaSpecializesAdd(MemSchemaOperation): - def commit_event(self): + def precommit_event(self): eschema = self.session.vreg.schema.schema_by_eid(self.etypeeid) parenteschema = self.session.vreg.schema.schema_by_eid(self.parentetypeeid) eschema._specialized_type = parenteschema.type parenteschema._specialized_by.append(eschema.type) + # XXX revertprecommit_event + class MemSchemaSpecializesDel(MemSchemaOperation): - def commit_event(self): + def precommit_event(self): try: eschema = self.session.vreg.schema.schema_by_eid(self.etypeeid) parenteschema = self.session.vreg.schema.schema_by_eid(self.parentetypeeid) @@ -793,10 +804,7 @@ eschema._specialized_type = None parenteschema._specialized_by.remove(eschema.type) - -class SyncSchemaHook(hook.Hook): - __abstract__ = True - category = 'syncschema' + # XXX revertprecommit_event # CWEType hooks ################################################################ @@ -808,7 +816,7 @@ * instantiate an operation to delete the entity type on commit """ __regid__ = 'syncdelcwetype' - __select__ = SyncSchemaHook.__select__ & implements('CWEType') + __select__ = SyncSchemaHook.__select__ & is_instance('CWEType') events = ('before_delete_entity',) def __call__(self): @@ -817,9 +825,10 @@ if name in CORE_ETYPES: raise ValidationError(self.entity.eid, {None: self._cw._('can\'t be deleted')}) # delete every entities of this type - self._cw.execute('DELETE %s X' % name) + if not name in ETYPE_NAME_MAP: + self._cw.execute('DELETE %s X' % name) + MemSchemaCWETypeDel(self._cw, etype=name) DropTable(self._cw, table=SQL_PREFIX + name) - MemSchemaCWETypeDel(self._cw, name) class AfterDelCWETypeHook(DelCWETypeHook): @@ -847,42 +856,7 @@ entity = self.entity if entity.get('final'): return - schema = self._cw.vreg.schema - name = entity['name'] - etype = ybo.EntityType(name=name, description=entity.get('description'), - meta=entity.get('meta')) # don't care about final - # fake we add it to the schema now to get a correctly initialized schema - # but remove it before doing anything more dangerous... - schema = self._cw.vreg.schema - eschema = schema.add_entity_type(etype) - # generate table sql and rql to add metadata - tablesql = y2sql.eschema2sql(self._cw.pool.source('system').dbhelper, - eschema, prefix=SQL_PREFIX) - rdefrqls = [] - gmap = group_mapping(self._cw) - cmap = ss.cstrtype_mapping(self._cw) - for rtype in (META_RTYPES - VIRTUAL_RTYPES): - rschema = schema[rtype] - sampletype = rschema.subjects()[0] - desttype = rschema.objects()[0] - rdef = copy(rschema.rdef(sampletype, desttype)) - rdef.subject = mock_object(eid=entity.eid) - mock = mock_object(eid=None) - rdefrqls.append( (mock, tuple(ss.rdef2rql(rdef, cmap, gmap))) ) - # now remove it ! - schema.del_entity_type(name) - # create the necessary table - for sql in tablesql.split(';'): - if sql.strip(): - self._cw.system_sql(sql) - # register operation to modify the schema on commit - # this have to be done before adding other relations definitions - # or permission settings - etype.eid = entity.eid - MemSchemaCWETypeAdd(self._cw, etype) - # add meta relations - for rdef, relrqls in rdefrqls: - ss.execschemarql(self._cw.execute, rdef, relrqls) + CWETypeAddOp(self._cw, entity=entity) class BeforeUpdateCWETypeHook(DelCWETypeHook): @@ -895,12 +869,9 @@ check_valid_changes(self._cw, entity, ro_attrs=('final',)) # don't use getattr(entity, attr), we would get the modified value if any if 'name' in entity.edited_attributes: - newname = entity.pop('name') - oldname = entity.name + oldname, newname = hook.entity_oldnewvalue(entity, 'name') if newname.lower() != oldname.lower(): - SourceDbCWETypeRename(self._cw, oldname=oldname, newname=newname) - MemSchemaCWETypeRename(self._cw, oldname=oldname, newname=newname) - entity['name'] = newname + CWETypeRenameOp(self._cw, oldname=oldname, newname=newname) # CWRType hooks ################################################################ @@ -912,7 +883,7 @@ * instantiate an operation to delete the relation type on commit """ __regid__ = 'syncdelcwrtype' - __select__ = SyncSchemaHook.__select__ & implements('CWRType') + __select__ = SyncSchemaHook.__select__ & is_instance('CWRType') events = ('before_delete_entity',) def __call__(self): @@ -924,7 +895,7 @@ {'x': self.entity.eid}) self._cw.execute('DELETE CWRelation X WHERE X relation_type Y, Y eid %(x)s', {'x': self.entity.eid}) - MemSchemaCWRTypeDel(self._cw, name) + MemSchemaCWRTypeDel(self._cw, rtype=name) class AfterAddCWRTypeHook(DelCWRTypeHook): @@ -939,13 +910,12 @@ def __call__(self): entity = self.entity - rtype = ybo.RelationType(name=entity.name, - description=entity.get('description'), - meta=entity.get('meta', False), - inlined=entity.get('inlined', False), - symmetric=entity.get('symmetric', False), - eid=entity.eid) - MemSchemaCWRTypeAdd(self._cw, rtype) + rtypedef = ybo.RelationType(name=entity.name, + description=entity.description, + inlined=entity.get('inlined', False), + symmetric=entity.get('symmetric', False), + eid=entity.eid) + MemSchemaCWRTypeAdd(self._cw, rtypedef=rtypedef) class BeforeUpdateCWRTypeHook(DelCWRTypeHook): @@ -964,9 +934,8 @@ newvalues[prop] = entity[prop] if newvalues: rschema = self._cw.vreg.schema.rschema(entity.name) - SourceDbCWRTypeUpdate(self._cw, rschema=rschema, entity=entity, - values=newvalues) - MemSchemaCWRTypeUpdate(self._cw, rschema=rschema, values=newvalues) + CWRTypeUpdateOp(self._cw, rschema=rschema, entity=entity, + values=newvalues) class AfterDelRelationTypeHook(SyncSchemaHook): @@ -984,9 +953,12 @@ def __call__(self): session = self._cw - rdef = session.vreg.schema.schema_by_eid(self.eidfrom) + try: + rdef = session.vreg.schema.schema_by_eid(self.eidfrom) + except KeyError: + self.critical('cant get schema rdef associated to %s', self.eidfrom) + return subjschema, rschema, objschema = rdef.as_triple() - pendings = session.transaction_data.get('pendingeids', ()) pendingrdefs = session.transaction_data.setdefault('pendingrdefs', set()) # first delete existing relation if necessary if rschema.final: @@ -995,93 +967,73 @@ else: rdeftype = 'CWRelation' pendingrdefs.add((subjschema, rschema, objschema)) - if not (subjschema.eid in pendings or objschema.eid in pendings): + if not (session.deleted_in_transaction(subjschema.eid) or + session.deleted_in_transaction(objschema.eid)): session.execute('DELETE X %s Y WHERE X is %s, Y is %s' % (rschema, subjschema, objschema)) - execute = session.execute - rset = execute('Any COUNT(X) WHERE X is %s, X relation_type R,' - 'R eid %%(x)s' % rdeftype, {'x': self.eidto}) - lastrel = rset[0][0] == 0 - # we have to update physical schema systematically for final and inlined - # relations, but only if it's the last instance for this relation type - # for other relations - - if (rschema.final or rschema.inlined): - rset = execute('Any COUNT(X) WHERE X is %s, X relation_type R, ' - 'R eid %%(x)s, X from_entity E, E name %%(name)s' - % rdeftype, {'x': self.eidto, 'name': str(subjschema)}) - if rset[0][0] == 0 and not subjschema.eid in pendings: - ptypes = session.transaction_data.setdefault('pendingrtypes', set()) - ptypes.add(rschema.type) - DropColumn(session, table=SQL_PREFIX + subjschema.type, - column=SQL_PREFIX + rschema.type) - elif lastrel: - DropRelationTable(session, rschema.type) - # if this is the last instance, drop associated relation type - if lastrel and not self.eidto in pendings: - execute('DELETE CWRType X WHERE X eid %(x)s', {'x': self.eidto}) - MemSchemaRDefDel(session, (subjschema, rschema, objschema)) + RDefDelOp(session, rdef=rdef) # CWAttribute / CWRelation hooks ############################################### class AfterAddCWAttributeHook(SyncSchemaHook): __regid__ = 'syncaddcwattribute' - __select__ = SyncSchemaHook.__select__ & implements('CWAttribute') + __select__ = SyncSchemaHook.__select__ & is_instance('CWAttribute') events = ('after_add_entity',) def __call__(self): - SourceDbCWAttributeAdd(self._cw, entity=self.entity) + CWAttributeAddOp(self._cw, entity=self.entity) class AfterAddCWRelationHook(AfterAddCWAttributeHook): __regid__ = 'syncaddcwrelation' - __select__ = SyncSchemaHook.__select__ & implements('CWRelation') + __select__ = SyncSchemaHook.__select__ & is_instance('CWRelation') def __call__(self): - SourceDbCWRelationAdd(self._cw, entity=self.entity) + CWRelationAddOp(self._cw, entity=self.entity) class AfterUpdateCWRDefHook(SyncSchemaHook): __regid__ = 'syncaddcwattribute' - __select__ = SyncSchemaHook.__select__ & implements('CWAttribute', - 'CWRelation') + __select__ = SyncSchemaHook.__select__ & is_instance('CWAttribute', + 'CWRelation') events = ('before_update_entity',) def __call__(self): entity = self.entity if self._cw.deleted_in_transaction(entity.eid): return - desttype = entity.otype.name + subjtype = entity.stype.name + objtype = entity.otype.name rschema = self._cw.vreg.schema[entity.rtype.name] + # note: do not access schema rdef here, it may be added later by an + # operation newvalues = {} - for prop in RelationDefinitionSchema.rproperty_defs(desttype): + for prop in RelationDefinitionSchema.rproperty_defs(objtype): if prop == 'constraints': continue if prop == 'order': - prop = 'ordernum' - if prop in entity.edited_attributes: - old, new = hook.entity_oldnewvalue(entity, prop) + attr = 'ordernum' + else: + attr = prop + if attr in entity.edited_attributes: + old, new = hook.entity_oldnewvalue(entity, attr) if old != new: - newvalues[prop] = entity[prop] + newvalues[prop] = new if newvalues: - subjtype = entity.stype.name - MemSchemaRDefUpdate(self._cw, kobj=(subjtype, desttype), - rschema=rschema, values=newvalues) - SourceDbRDefUpdate(self._cw, kobj=(subjtype, desttype), - rschema=rschema, values=newvalues) + RDefUpdateOp(self._cw, rschema=rschema, rdefkey=(subjtype, objtype), + values=newvalues) # constraints synchronization hooks ############################################ class AfterAddCWConstraintHook(SyncSchemaHook): __regid__ = 'syncaddcwconstraint' - __select__ = SyncSchemaHook.__select__ & implements('CWConstraint') + __select__ = SyncSchemaHook.__select__ & is_instance('CWConstraint') events = ('after_add_entity', 'after_update_entity') def __call__(self): - MemSchemaCWConstraintAdd(self._cw, entity=self.entity) - SourceDbCWConstraintAdd(self._cw, entity=self.entity) + CWConstraintAddOp(self._cw, entity=self.entity) class AfterAddConstrainedByHook(SyncSchemaHook): @@ -1109,8 +1061,7 @@ except IndexError: self._cw.critical('constraint type no more accessible') else: - SourceDbCWConstraintDel(self._cw, rdef=rdef, cstr=cstr) - MemSchemaCWConstraintDel(self._cw, rdef=rdef, cstr=cstr) + CWConstraintDelOp(self._cw, rdef=rdef, oldcstr=cstr) # permissions synchronization hooks ############################################ @@ -1176,7 +1127,7 @@ still_fti = list(schema[etype].indexable_attributes()) for entity in rset.entities(): source.fti_unindex_entity(session, entity.eid) - for container in entity.fti_containers(): + for container in entity.cw_adapt_to('IFTIndexable').fti_containers(): if still_fti or container is not entity: source.fti_unindex_entity(session, container.eid) source.fti_index_entity(session, container) diff -r 00b1b6b906cf -r 97c55baefa0c hooks/syncsession.py --- a/hooks/syncsession.py Thu Jul 15 12:03:13 2010 +0200 +++ b/hooks/syncsession.py Mon Jul 19 15:37:02 2010 +0200 @@ -22,7 +22,7 @@ from yams.schema import role_name from cubicweb import UnknownProperty, ValidationError, BadConnectionId -from cubicweb.selectors import implements +from cubicweb.selectors import is_instance from cubicweb.server import hook @@ -108,7 +108,7 @@ class CloseDeletedUserSessionsHook(SyncSessionHook): __regid__ = 'closession' - __select__ = SyncSessionHook.__select__ & implements('CWUser') + __select__ = SyncSessionHook.__select__ & is_instance('CWUser') events = ('after_delete_entity',) def __call__(self): @@ -152,7 +152,7 @@ class AddCWPropertyHook(SyncSessionHook): __regid__ = 'addcwprop' - __select__ = SyncSessionHook.__select__ & implements('CWProperty') + __select__ = SyncSessionHook.__select__ & is_instance('CWProperty') events = ('after_add_entity',) def __call__(self): diff -r 00b1b6b906cf -r 97c55baefa0c hooks/test/unittest_hooks.py --- a/hooks/test/unittest_hooks.py Thu Jul 15 12:03:13 2010 +0200 +++ b/hooks/test/unittest_hooks.py Mon Jul 19 15:37:02 2010 +0200 @@ -114,13 +114,10 @@ self.assertEquals(rset.get_entity(0, 0).reverse_parts[0].messageid, '<2345>') def test_unsatisfied_constraints(self): - releid = self.execute('INSERT CWRelation X: X from_entity FE, X relation_type RT, X to_entity TE ' - 'WHERE FE name "CWUser", RT name "in_group", TE name "String"')[0][0] - self.execute('SET X read_permission Y WHERE X eid %(x)s, Y name "managers"', - {'x': releid}, 'x') + releid = self.execute('SET U in_group G WHERE G name "owners", U login "admin"')[0][0] ex = self.assertRaises(ValidationError, self.commit) self.assertEquals(ex.errors, - {'to_entity-object': 'RQLConstraint O final FALSE failed'}) + {'in_group-object': u'RQLConstraint NOT O name "owners" failed'}) def test_html_tidy_hook(self): req = self.request() diff -r 00b1b6b906cf -r 97c55baefa0c hooks/test/unittest_syncschema.py --- a/hooks/test/unittest_syncschema.py Thu Jul 15 12:03:13 2010 +0200 +++ b/hooks/test/unittest_syncschema.py Mon Jul 19 15:37:02 2010 +0200 @@ -188,6 +188,9 @@ self.failIf(self.index_exists('State', 'state_of')) rset = self.execute('Any X, Y WHERE X state_of Y') self.assertEquals(len(rset), 2) # user states + except: + import traceback + traceback.print_exc() finally: self.execute('SET X inlined TRUE WHERE X name "state_of"') self.failIf(self.schema['state_of'].inlined) diff -r 00b1b6b906cf -r 97c55baefa0c hooks/workflow.py --- a/hooks/workflow.py Thu Jul 15 12:03:13 2010 +0200 +++ b/hooks/workflow.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""Core hooks: workflow related hooks +"""Core hooks: workflow related hooks""" -""" __docformat__ = "restructuredtext en" from datetime import datetime @@ -25,8 +24,7 @@ from yams.schema import role_name from cubicweb import RepositoryError, ValidationError -from cubicweb.interfaces import IWorkflowable -from cubicweb.selectors import implements +from cubicweb.selectors import is_instance, adaptable from cubicweb.server import hook @@ -51,11 +49,12 @@ def precommit_event(self): session = self.session entity = self.entity + iworkflowable = entity.cw_adapt_to('IWorkflowable') # if there is an initial state and the entity's state is not set, # use the initial state as a default state if not (session.deleted_in_transaction(entity.eid) or entity.in_state) \ - and entity.current_workflow: - state = entity.current_workflow.initial + and iworkflowable.current_workflow: + state = iworkflowable.current_workflow.initial if state: session.add_relation(entity.eid, 'in_state', state.eid) _FireAutotransitionOp(session, entity=entity) @@ -65,10 +64,11 @@ def precommit_event(self): entity = self.entity - autotrs = list(entity.possible_transitions('auto')) + iworkflowable = entity.cw_adapt_to('IWorkflowable') + autotrs = list(iworkflowable.possible_transitions('auto')) if autotrs: assert len(autotrs) == 1 - entity.fire_transition(autotrs[0]) + iworkflowable.fire_transition(autotrs[0]) class _WorkflowChangedOp(hook.Operation): @@ -82,29 +82,30 @@ if self.eid in pendingeids: return entity = session.entity_from_eid(self.eid) + iworkflowable = entity.cw_adapt_to('IWorkflowable') # check custom workflow has not been rechanged to another one in the same # transaction - mainwf = entity.main_workflow + mainwf = iworkflowable.main_workflow if mainwf.eid == self.wfeid: deststate = mainwf.initial if not deststate: qname = role_name('custom_workflow', 'subject') msg = session._('workflow has no initial state') raise ValidationError(entity.eid, {qname: msg}) - if mainwf.state_by_eid(entity.current_state.eid): + if mainwf.state_by_eid(iworkflowable.current_state.eid): # nothing to do return # if there are no history, simply go to new workflow's initial state - if not entity.workflow_history: - if entity.current_state.eid != deststate.eid: + if not iworkflowable.workflow_history: + if iworkflowable.current_state.eid != deststate.eid: _change_state(session, entity.eid, - entity.current_state.eid, deststate.eid) + iworkflowable.current_state.eid, deststate.eid) _FireAutotransitionOp(session, entity=entity) return msg = session._('workflow changed to "%s"') msg %= session._(mainwf.name) session.transaction_data[(entity.eid, 'customwf')] = self.wfeid - entity.change_state(deststate, msg, u'text/plain') + iworkflowable.change_state(deststate, msg, u'text/plain') class _CheckTrExitPoint(hook.Operation): @@ -125,9 +126,10 @@ def precommit_event(self): session = self.session forentity = self.forentity + iworkflowable = forentity.cw_adapt_to('IWorkflowable') trinfo = self.trinfo # we're in a subworkflow, check if we've reached an exit point - wftr = forentity.subworkflow_input_transition() + wftr = iworkflowable.subworkflow_input_transition() if wftr is None: # inconsistency detected qname = role_name('to_state', 'subject') @@ -137,9 +139,9 @@ if tostate is not None: # reached an exit point msg = session._('exiting from subworkflow %s') - msg %= session._(forentity.current_workflow.name) + msg %= session._(iworkflowable.current_workflow.name) session.transaction_data[(forentity.eid, 'subwfentrytr')] = True - forentity.change_state(tostate, msg, u'text/plain', tr=wftr) + iworkflowable.change_state(tostate, msg, u'text/plain', tr=wftr) # hooks ######################################################################## @@ -151,7 +153,7 @@ class SetInitialStateHook(WorkflowHook): __regid__ = 'wfsetinitial' - __select__ = WorkflowHook.__select__ & implements(IWorkflowable) + __select__ = WorkflowHook.__select__ & adaptable('IWorkflowable') events = ('after_add_entity',) def __call__(self): @@ -175,7 +177,7 @@ * by_transition or to_state (managers only) inlined relation is set """ __regid__ = 'wffiretransition' - __select__ = WorkflowHook.__select__ & implements('TrInfo') + __select__ = WorkflowHook.__select__ & is_instance('TrInfo') events = ('before_add_entity',) def __call__(self): @@ -189,18 +191,19 @@ msg = session._('mandatory relation') raise ValidationError(entity.eid, {qname: msg}) forentity = session.entity_from_eid(foreid) + iworkflowable = forentity.cw_adapt_to('IWorkflowable') # then check it has a workflow set, unless we're in the process of changing # entity's workflow if session.transaction_data.get((forentity.eid, 'customwf')): wfeid = session.transaction_data[(forentity.eid, 'customwf')] wf = session.entity_from_eid(wfeid) else: - wf = forentity.current_workflow + wf = iworkflowable.current_workflow if wf is None: msg = session._('related entity has no workflow set') raise ValidationError(entity.eid, {None: msg}) # then check it has a state set - fromstate = forentity.current_state + fromstate = iworkflowable.current_state if fromstate is None: msg = session._('related entity has no state') raise ValidationError(entity.eid, {None: msg}) @@ -270,7 +273,7 @@ class FiredTransitionHook(WorkflowHook): """change related entity state""" __regid__ = 'wffiretransition' - __select__ = WorkflowHook.__select__ & implements('TrInfo') + __select__ = WorkflowHook.__select__ & is_instance('TrInfo') events = ('after_add_entity',) def __call__(self): @@ -278,8 +281,9 @@ _change_state(self._cw, trinfo['wf_info_for'], trinfo['from_state'], trinfo['to_state']) forentity = self._cw.entity_from_eid(trinfo['wf_info_for']) - assert forentity.current_state.eid == trinfo['to_state'] - if forentity.main_workflow.eid != forentity.current_workflow.eid: + iworkflowable = forentity.cw_adapt_to('IWorkflowable') + assert iworkflowable.current_state.eid == trinfo['to_state'] + if iworkflowable.main_workflow.eid != iworkflowable.current_workflow.eid: _SubWorkflowExitOp(self._cw, forentity=forentity, trinfo=trinfo) @@ -297,7 +301,8 @@ # state changed through TrInfo insertion, so we already know it's ok return entity = session.entity_from_eid(self.eidfrom) - mainwf = entity.main_workflow + iworkflowable = entity.cw_adapt_to('IWorkflowable') + mainwf = iworkflowable.main_workflow if mainwf is None: msg = session._('entity has no workflow set') raise ValidationError(entity.eid, {None: msg}) @@ -309,7 +314,7 @@ msg = session._("state doesn't belong to entity's workflow. You may " "want to set a custom workflow for this entity first.") raise ValidationError(self.eidfrom, {qname: msg}) - if entity.current_workflow and wf.eid != entity.current_workflow.eid: + if iworkflowable.current_workflow and wf.eid != iworkflowable.current_workflow.eid: qname = role_name('in_state', 'subject') msg = session._("state doesn't belong to entity's current workflow") raise ValidationError(self.eidfrom, {qname: msg}) @@ -359,7 +364,7 @@ def __call__(self): entity = self._cw.entity_from_eid(self.eidfrom) - typewf = entity.cwetype_workflow() + typewf = entity.cw_adapt_to('IWorkflowable').cwetype_workflow() if typewf is not None: _WorkflowChangedOp(self._cw, eid=self.eidfrom, wfeid=typewf.eid) diff -r 00b1b6b906cf -r 97c55baefa0c i18n/en.po --- a/i18n/en.po Thu Jul 15 12:03:13 2010 +0200 +++ b/i18n/en.po Mon Jul 19 15:37:02 2010 +0200 @@ -235,6 +235,9 @@ msgid "Browse by category" msgstr "" +msgid "Browse by entity type" +msgstr "" + msgid "Bytes" msgstr "Bytes" @@ -419,6 +422,9 @@ msgid "Garbage collection information" msgstr "" +msgid "Got rhythm?" +msgstr "" + msgid "Help" msgstr "" @@ -624,9 +630,6 @@ msgid "Submit bug report by mail" msgstr "" -msgid "The repository holds the following entities" -msgstr "" - #, python-format msgid "The view %s can not be applied to this query" msgstr "" @@ -955,6 +958,9 @@ msgid "add_permission_object" msgstr "has permission to add" +msgid "add_relation" +msgstr "add" + #, python-format msgid "added %(etype)s #%(eid)s (%(title)s)" msgstr "" @@ -1282,6 +1288,12 @@ msgid "click on the box to cancel the deletion" msgstr "" +msgid "click to add a value" +msgstr "" + +msgid "click to delete this value" +msgstr "" + msgid "click to edit this field" msgstr "" @@ -2271,11 +2283,20 @@ msgid "granted to groups" msgstr "" -msgid "graphical representation of the instance'schema" +#, python-format +msgid "graphical representation of %(appid)s data model" msgstr "" #, python-format -msgid "graphical schema for %s" +msgid "" +"graphical representation of the %(etype)s entity type from %(appid)s data " +"model" +msgstr "" + +#, python-format +msgid "" +"graphical representation of the %(rtype)s relation type from %(appid)s data " +"model" msgstr "" #, python-format @@ -2801,6 +2822,9 @@ msgid "no edited fields specified for entity %s" msgstr "" +msgid "no related entity" +msgstr "" + msgid "no related project" msgstr "" @@ -3711,6 +3735,9 @@ msgid "update_permission_object" msgstr "has permission to update" +msgid "update_relation" +msgstr "update" + msgid "updated" msgstr "" diff -r 00b1b6b906cf -r 97c55baefa0c i18n/es.po --- a/i18n/es.po Thu Jul 15 12:03:13 2010 +0200 +++ b/i18n/es.po Mon Jul 19 15:37:02 2010 +0200 @@ -243,6 +243,9 @@ msgid "Browse by category" msgstr "Busca por categoría" +msgid "Browse by entity type" +msgstr "" + msgid "Bytes" msgstr "Bytes" @@ -427,6 +430,9 @@ msgid "Garbage collection information" msgstr "" +msgid "Got rhythm?" +msgstr "" + msgid "Help" msgstr "" @@ -632,9 +638,6 @@ msgid "Submit bug report by mail" msgstr "Enviar este reporte por email" -msgid "The repository holds the following entities" -msgstr "El repositorio contiene las entidades siguientes" - #, python-format msgid "The view %s can not be applied to this query" msgstr "La vista %s no puede ser aplicada a esta búsqueda" @@ -978,6 +981,9 @@ msgid "add_permission_object" msgstr "tiene la autorización para agregar" +msgid "add_relation" +msgstr "" + #, python-format msgid "added %(etype)s #%(eid)s (%(title)s)" msgstr "Agregado %(etype)s #%(eid)s (%(title)s)" @@ -1310,6 +1316,12 @@ msgid "click on the box to cancel the deletion" msgstr "Seleccione la zona de edición para cancelar la eliminación" +msgid "click to add a value" +msgstr "" + +msgid "click to delete this value" +msgstr "" + msgid "click to edit this field" msgstr "" @@ -2321,12 +2333,21 @@ msgid "granted to groups" msgstr "Otorgado a los grupos" -msgid "graphical representation of the instance'schema" +#, python-format +msgid "graphical representation of %(appid)s data model" msgstr "" #, python-format -msgid "graphical schema for %s" -msgstr "Gráfica del esquema por %s" +msgid "" +"graphical representation of the %(etype)s entity type from %(appid)s data " +"model" +msgstr "" + +#, python-format +msgid "" +"graphical representation of the %(rtype)s relation type from %(appid)s data " +"model" +msgstr "" #, python-format msgid "graphical workflow for %s" @@ -2875,6 +2896,9 @@ msgid "no edited fields specified for entity %s" msgstr "" +msgid "no related entity" +msgstr "" + msgid "no related project" msgstr "no hay proyecto relacionado" @@ -3792,6 +3816,9 @@ msgid "update_permission_object" msgstr "objeto de autorización de modificaciones" +msgid "update_relation" +msgstr "" + msgid "updated" msgstr "" @@ -4039,5 +4066,11 @@ msgid "you should probably delete that property" msgstr "deberia probablamente suprimir esta propriedad" +#~ msgid "The repository holds the following entities" +#~ msgstr "El repositorio contiene las entidades siguientes" + +#~ msgid "graphical schema for %s" +#~ msgstr "Gráfica del esquema por %s" + #~ msgid "schema-image" #~ msgstr "esquema imagen" diff -r 00b1b6b906cf -r 97c55baefa0c i18n/fr.po --- a/i18n/fr.po Thu Jul 15 12:03:13 2010 +0200 +++ b/i18n/fr.po Mon Jul 19 15:37:02 2010 +0200 @@ -242,6 +242,9 @@ msgid "Browse by category" msgstr "Naviguer par catégorie" +msgid "Browse by entity type" +msgstr "Naviguer par type d'entité" + msgid "Bytes" msgstr "Donnée binaires" @@ -438,6 +441,9 @@ msgid "Garbage collection information" msgstr "Information sur le ramasse-miette" +msgid "Got rhythm?" +msgstr "" + msgid "Help" msgstr "Aide" @@ -535,7 +541,7 @@ msgstr "Nouvelle transition workflow" msgid "No result matching query" -msgstr "aucun résultat" +msgstr "Aucun résultat ne correspond à la requête" msgid "Non exhaustive list of views that may apply to entities of this type" msgstr "Liste non exhausite des vues s'appliquant à ce type d'entité" @@ -643,9 +649,6 @@ msgid "Submit bug report by mail" msgstr "Soumettre ce rapport par email" -msgid "The repository holds the following entities" -msgstr "Le dépot contient les entités suivantes" - #, python-format msgid "The view %s can not be applied to this query" msgstr "La vue %s ne peut être appliquée à cette requête" @@ -995,6 +998,9 @@ msgid "add_permission_object" msgstr "a la permission d'ajouter" +msgid "add_relation" +msgstr "ajouter" + #, python-format msgid "added %(etype)s #%(eid)s (%(title)s)" msgstr "ajout de l'entité %(etype)s #%(eid)s (%(title)s)" @@ -1330,6 +1336,12 @@ msgid "click on the box to cancel the deletion" msgstr "cliquez dans la zone d'édition pour annuler la suppression" +msgid "click to add a value" +msgstr "cliquer pour ajouter une valeur" + +msgid "click to delete this value" +msgstr "cliquer pour supprimer cette valeur" + msgid "click to edit this field" msgstr "cliquez pour éditer ce champ" @@ -2357,12 +2369,25 @@ msgid "granted to groups" msgstr "accordée aux groupes" -msgid "graphical representation of the instance'schema" -msgstr "représentation graphique du schéma de l'instance" +#, python-format +msgid "graphical representation of %(appid)s data model" +msgstr "réprésentation graphique du modèle de données de %(appid)s" #, python-format -msgid "graphical schema for %s" -msgstr "graphique du schéma pour %s" +msgid "" +"graphical representation of the %(etype)s entity type from %(appid)s data " +"model" +msgstr "" +"réprésentation graphique du modèle de données pour le type d'entité %(etype)" +"s de %(appid)s" + +#, python-format +msgid "" +"graphical representation of the %(rtype)s relation type from %(appid)s data " +"model" +msgstr "" +"réprésentation graphique du modèle de données pour le type de relation %" +"(etype)s de %(appid)s" #, python-format msgid "graphical workflow for %s" @@ -2909,6 +2934,9 @@ msgid "no edited fields specified for entity %s" msgstr "aucun champ à éditer spécifié pour l'entité %s" +msgid "no related entity" +msgstr "pas d'entité liée" + msgid "no related project" msgstr "pas de projet rattaché" @@ -3834,6 +3862,9 @@ msgid "update_permission_object" msgstr "a la permission de modifier" +msgid "update_relation" +msgstr "modifier" + msgid "updated" msgstr "mis à jour" diff -r 00b1b6b906cf -r 97c55baefa0c interfaces.py --- a/interfaces.py Thu Jul 15 12:03:13 2010 +0200 +++ b/interfaces.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,68 +15,24 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -""" -Standard interfaces. +"""Standard interfaces. Deprecated in favor of adapters. .. note:: - The `implements` selector matches not only entity classes but also - their interfaces. Writing __select__ = implements('IGeocodable') is - a perfectly fine thing to do. + The `implements` selector used to match not only entity classes but also their + interfaces. This will disappear in a future version. You should define an + adapter for that interface and use `adaptable('MyIFace')` selector on appobjects + that require that interface. + """ __docformat__ = "restructuredtext en" from logilab.common.interface import Interface -class IEmailable(Interface): - """interface for emailable entities""" - def get_email(self): - """return email address""" - - @classmethod - def allowed_massmail_keys(cls): - """returns a set of allowed email substitution keys - - The default is to return the entity's attribute list but an - entity class might override this method to allow extra keys. - For instance, the Person class might want to return a `companyname` - key. - """ - - def as_email_context(self): - """returns the dictionary as used by the sendmail controller to - build email bodies. - - NOTE: the dictionary keys should match the list returned by the - `allowed_massmail_keys` method. - """ - - -class IWorkflowable(Interface): - """interface for entities dealing with a specific workflow""" - # XXX to be completed, see cw.entities.wfobjs.WorkflowableMixIn - - @property - def state(self): - """return current state name""" - - def change_state(self, stateeid, trcomment=None, trcommentformat=None): - """change the entity's state to the state of the given name in entity's - workflow - """ - - def latest_trinfo(self): - """return the latest transition information for this entity - """ - - +# XXX deprecates in favor of IProgressAdapter class IProgress(Interface): - """something that has a cost, a state and a progression - - Take a look at cubicweb.mixins.ProgressMixIn for some - default implementations - """ + """something that has a cost, a state and a progression""" @property def cost(self): @@ -112,7 +68,7 @@ def progress(self): """returns the % progress of the task item""" - +# XXX deprecates in favor of IMileStoneAdapter class IMileStone(IProgress): """represents an ITask's item""" @@ -135,7 +91,132 @@ def contractors(self): """returns the list of persons supposed to work on this task""" +# XXX deprecates in favor of IEmbedableAdapter +class IEmbedable(Interface): + """interface for embedable entities""" + def embeded_url(self): + """embed action interface""" + +# XXX deprecates in favor of ICalendarViewsAdapter +class ICalendarViews(Interface): + """calendar views interface""" + def matching_dates(self, begin, end): + """ + :param begin: day considered as begin of the range (`DateTime`) + :param end: day considered as end of the range (`DateTime`) + + :return: + a list of dates (`DateTime`) in the range [`begin`, `end`] on which + this entity apply + """ + +# XXX deprecates in favor of ICalendarableAdapter +class ICalendarable(Interface): + """interface for items that do have a begin date 'start' and an end date 'stop' + """ + + @property + def start(self): + """return start date""" + + @property + def stop(self): + """return stop state""" + +# XXX deprecates in favor of ICalendarableAdapter +class ITimetableViews(Interface): + """timetable views interface""" + def timetable_date(self): + """XXX explain + + :return: date (`DateTime`) + """ + +# XXX deprecates in favor of IGeocodableAdapter +class IGeocodable(Interface): + """interface required by geocoding views such as gmap-view""" + + @property + def latitude(self): + """returns the latitude of the entity""" + + @property + def longitude(self): + """returns the longitude of the entity""" + + def marker_icon(self): + """returns the icon that should be used as the marker""" + +# XXX deprecates in favor of ISIOCItemAdapter +class ISiocItem(Interface): + """interface for entities which may be represented as an ISIOC item""" + + def isioc_content(self): + """return item's content""" + + def isioc_container(self): + """return container entity""" + + def isioc_type(self): + """return container type (post, BlogPost, MailMessage)""" + + def isioc_replies(self): + """return replies items""" + + def isioc_topics(self): + """return topics items""" + +# XXX deprecates in favor of ISIOCContainerAdapter +class ISiocContainer(Interface): + """interface for entities which may be represented as an ISIOC container""" + + def isioc_type(self): + """return container type (forum, Weblog, MailingList)""" + + def isioc_items(self): + """return contained items""" + +# XXX deprecates in favor of IEmailableAdapter +class IFeed(Interface): + """interface for entities with rss flux""" + + def rss_feed_url(self): + """""" + +# XXX deprecates in favor of IDownloadableAdapter +class IDownloadable(Interface): + """interface for downloadable entities""" + + def download_url(self): # XXX not really part of this interface + """return an url to download entity's content""" + def download_content_type(self): + """return MIME type of the downloadable content""" + def download_encoding(self): + """return encoding of the downloadable content""" + def download_file_name(self): + """return file name of the downloadable content""" + def download_data(self): + """return actual data of the downloadable content""" + +# XXX deprecates in favor of IPrevNextAdapter +class IPrevNext(Interface): + """interface for entities which can be linked to a previous and/or next + entity + """ + + def next_entity(self): + """return the 'next' entity""" + def previous_entity(self): + """return the 'previous' entity""" + +# XXX deprecates in favor of IBreadCrumbsAdapter +class IBreadCrumbs(Interface): + + def breadcrumbs(self, view, recurs=False): + pass + +# XXX deprecates in favor of ITreeAdapter class ITree(Interface): def parent(self): @@ -159,141 +240,3 @@ def root(self): """returns the root object""" - -## web specific interfaces #################################################### - - -class IPrevNext(Interface): - """interface for entities which can be linked to a previous and/or next - entity - """ - - def next_entity(self): - """return the 'next' entity""" - def previous_entity(self): - """return the 'previous' entity""" - - -class IBreadCrumbs(Interface): - """interface for entities which can be "located" on some path""" - - # XXX fix recurs ! - def breadcrumbs(self, view, recurs=False): - """return a list containing some: - - * tuple (url, label) - * entity - * simple label string - - defining path from a root to the current view - - the main view is given as argument so breadcrumbs may vary according - to displayed view (may be None). When recursing on a parent entity, - the `recurs` argument should be set to True. - """ - - -class IDownloadable(Interface): - """interface for downloadable entities""" - - def download_url(self): # XXX not really part of this interface - """return an url to download entity's content""" - def download_content_type(self): - """return MIME type of the downloadable content""" - def download_encoding(self): - """return encoding of the downloadable content""" - def download_file_name(self): - """return file name of the downloadable content""" - def download_data(self): - """return actual data of the downloadable content""" - - -class IEmbedable(Interface): - """interface for embedable entities""" - - def embeded_url(self): - """embed action interface""" - -class ICalendarable(Interface): - """interface for items that do have a begin date 'start' and an end date 'stop' - """ - - @property - def start(self): - """return start date""" - - @property - def stop(self): - """return stop state""" - -class ICalendarViews(Interface): - """calendar views interface""" - def matching_dates(self, begin, end): - """ - :param begin: day considered as begin of the range (`DateTime`) - :param end: day considered as end of the range (`DateTime`) - - :return: - a list of dates (`DateTime`) in the range [`begin`, `end`] on which - this entity apply - """ - -class ITimetableViews(Interface): - """timetable views interface""" - def timetable_date(self): - """XXX explain - - :return: date (`DateTime`) - """ - -class IGeocodable(Interface): - """interface required by geocoding views such as gmap-view""" - - @property - def latitude(self): - """returns the latitude of the entity""" - - @property - def longitude(self): - """returns the longitude of the entity""" - - def marker_icon(self): - """returns the icon that should be used as the marker - (returns None for default) - """ - -class IFeed(Interface): - """interface for entities with rss flux""" - - def rss_feed_url(self): - """return an url which layout sub-entities item - """ - -class ISiocItem(Interface): - """interface for entities (which are item - in sioc specification) with sioc views""" - - def isioc_content(self): - """return content entity""" - - def isioc_container(self): - """return container entity""" - - def isioc_type(self): - """return container type (post, BlogPost, MailMessage)""" - - def isioc_replies(self): - """return replies items""" - - def isioc_topics(self): - """return topics items""" - -class ISiocContainer(Interface): - """interface for entities (which are container - in sioc specification) with sioc views""" - - def isioc_type(self): - """return container type (forum, Weblog, MailingList)""" - - def isioc_items(self): - """return contained items""" diff -r 00b1b6b906cf -r 97c55baefa0c mail.py --- a/mail.py Thu Jul 15 12:03:13 2010 +0200 +++ b/mail.py Mon Jul 19 15:37:02 2010 +0200 @@ -184,7 +184,7 @@ # previous email if not self.msgid_timestamp: refs = [self.construct_message_id(eid) - for eid in entity.notification_references(self)] + for eid in entity.cw_adapt_to('INotifiable').notification_references(self)] else: refs = () msgid = self.construct_message_id(entity.eid) @@ -198,7 +198,7 @@ if isinstance(something, Entity): # hi-jack self._cw to get a session for the returned user self._cw = self._cw.hijack_user(something) - emailaddr = something.get_email() + emailaddr = something.cw_adapt_to('IEmailable').get_email() else: emailaddr, lang = something self._cw.set_language(lang) @@ -246,7 +246,8 @@ # email generation helpers ################################################# def construct_message_id(self, eid): - return construct_message_id(self._cw.vreg.config.appid, eid, self.msgid_timestamp) + return construct_message_id(self._cw.vreg.config.appid, eid, + self.msgid_timestamp) def format_field(self, attr, value): return ':%(attr)s: %(value)s' % {'attr': attr, 'value': value} diff -r 00b1b6b906cf -r 97c55baefa0c migration.py --- a/migration.py Thu Jul 15 12:03:13 2010 +0200 +++ b/migration.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""utilities for instances migration +"""utilities for instances migration""" -""" __docformat__ = "restructuredtext en" import sys @@ -111,7 +110,7 @@ self.config = config if config: # no config on shell to a remote instance - self.config.init_log(logthreshold=logging.ERROR, debug=True) + self.config.init_log(logthreshold=logging.ERROR) # 0: no confirmation, 1: only main commands confirmed, 2 ask for everything self.verbosity = verbosity self.need_wrap = True @@ -281,14 +280,25 @@ return context def cmd_process_script(self, migrscript, funcname=None, *args, **kwargs): - """execute a migration script - in interactive mode, display the migration script path, ask for - confirmation and execute it if confirmed + """execute a migration script in interactive mode + + Display the migration script path, ask for confirmation and execute it + if confirmed + + Context environment can have these variables defined: + - __name__ : will be determine by funcname parameter + - __file__ : is the name of the script if it exists + - __args__ : script arguments coming from command-line + + :param migrscript: name of the script + :param funcname: defines __name__ inside the shell (or use __main__) + :params args: optional arguments for funcname + :keyword scriptargs: optional arguments of the script """ migrscript = os.path.normpath(migrscript) if migrscript.endswith('.py'): script_mode = 'python' - elif migrscript.endswith('.txt') or migrscript.endswith('.rst'): + elif migrscript.endswith(('.txt', '.rst')): script_mode = 'doctest' else: raise Exception('This is not a valid cubicweb shell input') @@ -300,7 +310,8 @@ pyname = '__main__' else: pyname = splitext(basename(migrscript))[0] - scriptlocals.update({'__file__': migrscript, '__name__': pyname}) + scriptlocals.update({'__file__': migrscript, '__name__': pyname, + '__args__': kwargs.pop("scriptargs", [])}) execfile(migrscript, scriptlocals) if funcname is not None: try: diff -r 00b1b6b906cf -r 97c55baefa0c misc/migration/3.9.0_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/misc/migration/3.9.0_Any.py Mon Jul 19 15:37:02 2010 +0200 @@ -0,0 +1,3 @@ +if repo.system_source.dbdriver == 'postgres': + sql('ALTER TABLE appears ADD COLUMN weight float') + sql('UPDATE appears SET weight=1.0 ') diff -r 00b1b6b906cf -r 97c55baefa0c misc/scripts/ldap_change_base_dn.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/misc/scripts/ldap_change_base_dn.py Mon Jul 19 15:37:02 2010 +0200 @@ -0,0 +1,24 @@ +from base64 import b64decode, b64encode +try: + uri, newdn = __args__ +except ValueError: + print 'USAGE: cubicweb-ctl shell ldap_change_base_dn.py -- ' + print + print 'you should not have updated your sources file yet' + +olddn = repo.config.sources()[uri]['user-base-dn'] + +assert olddn != newdn + +raw_input("Ensure you've stopped the instance, type enter when done.") + +for eid, extid in sql("SELECT eid, extid FROM entities WHERE source='%s'" % uri): + olduserdn = b64decode(extid) + newuserdn = olduserdn.replace(olddn, newdn) + if newuserdn != olduserdn: + print olduserdn, '->', newuserdn + sql("UPDATE entities SET extid='%s' WHERE eid=%s" % (b64encode(newuserdn), eid)) + +commit() + +print 'you can now update the sources file to the new dn and restart the instance' diff -r 00b1b6b906cf -r 97c55baefa0c mixins.py --- a/mixins.py Thu Jul 15 12:03:13 2010 +0200 +++ b/mixins.py Mon Jul 19 15:37:02 2010 +0200 @@ -21,9 +21,10 @@ from itertools import chain from logilab.common.decorators import cached +from logilab.common.deprecation import deprecated, class_deprecated from cubicweb.selectors import implements -from cubicweb.interfaces import IEmailable, ITree +from cubicweb.interfaces import ITree class TreeMixIn(object): @@ -33,6 +34,9 @@ tree_attribute, parent_target and children_target class attribute to benefit from this default implementation """ + __metaclass__ = class_deprecated + __deprecation_warning__ = '[3.9] TreeMixIn is deprecated, use/override ITreeAdapter instead' + tree_attribute = None # XXX misnamed parent_target = 'subject' @@ -117,16 +121,6 @@ return chain([self], _uptoroot(self)) return _uptoroot(self) - def notification_references(self, view): - """used to control References field of email send on notification - for this entity. `view` is the notification view. - - Should return a list of eids which can be used to generate message ids - of previously sent email - """ - return self.path()[:-1] - - ## ITree interface ######################################################## def parent(self): """return the parent entity if any, else None (e.g. if we are on the @@ -151,7 +145,7 @@ entities=entities) def children_rql(self): - return self.related_rql(self.tree_attribute, self.children_target) + return self.cw_related_rql(self.tree_attribute, self.children_target) def is_leaf(self): return len(self.children()) == 0 @@ -171,8 +165,7 @@ NOTE: The default implementation is based on the primary_email / use_email scheme """ - __implements__ = (IEmailable,) - + @deprecated("[3.9] use entity.cw_adapt_to('IEmailable').get_email()") def get_email(self): if getattr(self, 'primary_email', None): return self.primary_email[0].address @@ -180,28 +173,6 @@ return self.use_email[0].address return None - @classmethod - def allowed_massmail_keys(cls): - """returns a set of allowed email substitution keys - - The default is to return the entity's attribute list but an - entity class might override this method to allow extra keys. - For instance, the Person class might want to return a `companyname` - key. - """ - return set(rschema.type - for rschema, attrtype in cls.e_schema.attribute_definitions() - if attrtype.type not in ('Password', 'Bytes')) - - def as_email_context(self): - """returns the dictionary as used by the sendmail controller to - build email bodies. - - NOTE: the dictionary keys should match the list returned by the - `allowed_massmail_keys` method. - """ - return dict( (attr, getattr(self, attr)) for attr in self.allowed_massmail_keys() ) - """pluggable mixins system: plug classes registered in MI_REL_TRIGGERS on entity classes which have the relation described by the dict's key. @@ -215,7 +186,7 @@ } - +# XXX move to cubicweb.web.views.treeview once we delete usage from this file def _done_init(done, view, row, col): """handle an infinite recursion safety belt""" if done is None: @@ -223,7 +194,7 @@ entity = view.cw_rset.get_entity(row, col) if entity.eid in done: msg = entity._cw._('loop in %(rel)s relation (%(eid)s)') % { - 'rel': entity.tree_attribute, + 'rel': entity.cw_adapt_to('ITree').tree_relation, 'eid': entity.eid } return None, msg @@ -233,16 +204,20 @@ class TreeViewMixIn(object): """a recursive tree view""" + __metaclass__ = class_deprecated + __deprecation_warning__ = '[3.9] TreeViewMixIn is deprecated, use/override BaseTreeView instead' + __regid__ = 'tree' + __select__ = implements(ITree, warn=False) item_vid = 'treeitem' - __select__ = implements(ITree) def call(self, done=None, **kwargs): if done is None: done = set() super(TreeViewMixIn, self).call(done=done, **kwargs) - def cell_call(self, row, col=0, vid=None, done=None, **kwargs): + def cell_call(self, row, col=0, vid=None, done=None, maxlevel=None, **kwargs): + assert maxlevel is None or maxlevel > 0 done, entity = _done_init(done, self, row, col) if done is None: # entity is actually an error message @@ -250,8 +225,14 @@ return self.open_item(entity) entity.view(vid or self.item_vid, w=self.w, **kwargs) + if maxlevel is not None: + maxlevel -= 1 + if maxlevel == 0: + self.close_item(entity) + return relatedrset = entity.children(entities=False) - self.wview(self.__regid__, relatedrset, 'null', done=done, **kwargs) + self.wview(self.__regid__, relatedrset, 'null', done=done, + maxlevel=maxlevel, **kwargs) self.close_item(entity) def open_item(self, entity): @@ -262,6 +243,8 @@ class TreePathMixIn(object): """a recursive path view""" + __metaclass__ = class_deprecated + __deprecation_warning__ = '[3.9] TreePathMixIn is deprecated, use/override TreePathView instead' __regid__ = 'path' item_vid = 'oneline' separator = u' > ' @@ -286,6 +269,8 @@ class ProgressMixIn(object): """provide a default implementations for IProgress interface methods""" + __metaclass__ = class_deprecated + __deprecation_warning__ = '[3.9] ProgressMixIn is deprecated, use/override IProgressAdapter instead' @property def cost(self): diff -r 00b1b6b906cf -r 97c55baefa0c mttransforms.py --- a/mttransforms.py Thu Jul 15 12:03:13 2010 +0200 +++ b/mttransforms.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""mime type transformation engine for cubicweb, based on mtconverter +"""mime type transformation engine for cubicweb, based on mtconverter""" -""" __docformat__ = "restructuredtext en" from logilab import mtconverter diff -r 00b1b6b906cf -r 97c55baefa0c req.py --- a/req.py Thu Jul 15 12:03:13 2010 +0200 +++ b/req.py Mon Jul 19 15:37:02 2010 +0200 @@ -133,7 +133,7 @@ Example (in a shell session): >>> c = create_entity('Company', name=u'Logilab') - >>> create_entity('Person', firstname=u'John', lastname=u'Doe', + >>> create_entity('Person', firstname=u'John', surname=u'Doe', ... works_for=c) """ @@ -279,7 +279,7 @@ user = self.user userinfo['login'] = user.login userinfo['name'] = user.name() - userinfo['email'] = user.get_email() + userinfo['email'] = user.cw_adapt_to('IEmailable').get_email() return userinfo def is_internal_session(self): @@ -373,11 +373,11 @@ raise ValueError(self._('can\'t parse %(value)r (expected %(format)s)') % {'value': value, 'format': format}) - # abstract methods to override according to the web front-end ############# - def base_url(self): """return the root url of the instance""" - raise NotImplementedError + return self.vreg.config['base-url'] + + # abstract methods to override according to the web front-end ############# def describe(self, eid): """return a tuple (type, sourceuri, extid) for the entity with id """ diff -r 00b1b6b906cf -r 97c55baefa0c rqlrewrite.py --- a/rqlrewrite.py Thu Jul 15 12:03:13 2010 +0200 +++ b/rqlrewrite.py Mon Jul 19 15:37:02 2010 +0200 @@ -19,8 +19,8 @@ tree. This is used for instance for read security checking in the repository. +""" -""" __docformat__ = "restructuredtext en" from rql import nodes as n, stmts, TypeResolverException diff -r 00b1b6b906cf -r 97c55baefa0c rset.py --- a/rset.py Thu Jul 15 12:03:13 2010 +0200 +++ b/rset.py Mon Jul 19 15:37:02 2010 +0200 @@ -77,10 +77,16 @@ rows = self.rows if len(rows) > 10: rows = rows[:10] + ['...'] + if len(rows) > 1: + # add a line break before first entity if more that one. + pattern = '' + else: + pattern = '' + if not self.description: - return '' % (self.rql, len(self.rows), + return pattern % (self.rql, len(self.rows), '\n'.join(str(r) for r in rows)) - return '' % (self.rql, len(self.rows), + return pattern % (self.rql, len(self.rows), '\n'.join('%s (%s)' % (r, d) for r, d in zip(rows, self.description))) @@ -453,7 +459,7 @@ etype = self.description[row][col] entity = self.req.vreg['etypes'].etype_class(etype)(req, rset=self, row=row, col=col) - entity.set_eid(eid) + entity.eid = eid # cache entity req.set_entity_cache(entity) eschema = entity.e_schema @@ -494,7 +500,7 @@ rrset.req = req else: rrset = self._build_entity(row, outerselidx).as_rset() - entity.set_related_cache(attr, role, rrset) + entity.cw_set_relation_cache(attr, role, rrset) return entity @cached diff -r 00b1b6b906cf -r 97c55baefa0c schema.py --- a/schema.py Thu Jul 15 12:03:13 2010 +0200 +++ b/schema.py Mon Jul 19 15:37:02 2010 +0200 @@ -417,7 +417,7 @@ # avoid deleting the relation type accidentally... self.schema['has_text'].del_relation_def(self, self.schema['String']) - def schema_entity(self): + def schema_entity(self): # XXX @property for consistency with meta """return True if this entity type is used to build the schema""" return self.type in SCHEMA_TYPES @@ -441,7 +441,7 @@ def meta(self): return self.type in META_RTYPES - def schema_relation(self): + def schema_relation(self): # XXX @property for consistency with meta """return True if this relation type is used to build the schema""" return self.type in SCHEMA_TYPES @@ -572,7 +572,13 @@ rdef.name = rdef.name.lower() rdef.subject = bw_normalize_etype(rdef.subject) rdef.object = bw_normalize_etype(rdef.object) - rdefs = super(CubicWebSchema, self).add_relation_def(rdef) + try: + rdefs = super(CubicWebSchema, self).add_relation_def(rdef) + except BadSchemaDefinition: + reversed_etype_map = dict( (v, k) for k, v in ETYPE_NAME_MAP.iteritems() ) + if rdef.subject in reversed_etype_map or rdef.object in reversed_etype_map: + return + raise if rdefs: try: self._eid_index[rdef.eid] = rdefs diff -r 00b1b6b906cf -r 97c55baefa0c schemas/base.py --- a/schemas/base.py Thu Jul 15 12:03:13 2010 +0200 +++ b/schemas/base.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""core CubicWeb schema, but not necessary at bootstrap time +"""core CubicWeb schema, but not necessary at bootstrap time""" -""" __docformat__ = "restructuredtext en" _ = unicode diff -r 00b1b6b906cf -r 97c55baefa0c selectors.py --- a/selectors.py Thu Jul 15 12:03:13 2010 +0200 +++ b/selectors.py Mon Jul 19 15:37:02 2010 +0200 @@ -169,7 +169,7 @@ or below the :func:`objectify_selector` decorator of your selector function so it gets traceable when :class:`traced_selection` is activated (see :ref:`DebuggingSelectors`). -.. autofunction:: cubicweb.selectors.lltrace +.. autofunction:: cubicweb.appobject.lltrace .. note:: Selectors __call__ should *always* return a positive integer, and shall never @@ -183,10 +183,10 @@ Once in a while, one needs to understand why a view (or any application object) is, or is not selected appropriately. Looking at which selectors fired (or did -not) is the way. The :class:`cubicweb.selectors.traced_selection` context +not) is the way. The :class:`cubicweb.appobject.traced_selection` context manager to help with that, *if you're running your instance in debug mode*. -.. autoclass:: cubicweb.selectors.traced_selection +.. autoclass:: cubicweb.appobject.traced_selection .. |cubicweb| replace:: *CubicWeb* @@ -202,90 +202,15 @@ from logilab.common.interface import implements as implements_iface from yams import BASE_TYPES +from rql.nodes import Function -from cubicweb import Unauthorized, NoSelectableObject, NotAnEntity, role +from cubicweb import (Unauthorized, NoSelectableObject, NotAnEntity, + CW_EVENT_MANAGER, role) # even if not used, let yes here so it's importable through this module -from cubicweb.appobject import Selector, objectify_selector, yes -from cubicweb.vregistry import class_regid -from cubicweb.cwconfig import CubicWebConfiguration +from cubicweb.appobject import Selector, objectify_selector, lltrace, yes from cubicweb.schema import split_expression -# helpers for debugging selectors -SELECTOR_LOGGER = logging.getLogger('cubicweb.selectors') -TRACED_OIDS = None - -def _trace_selector(cls, selector, args, ret): - # /!\ lltrace decorates pure function or __call__ method, this - # means argument order may be different - if isinstance(cls, Selector): - selname = str(cls) - vobj = args[0] - else: - selname = selector.__name__ - vobj = cls - if TRACED_OIDS == 'all' or class_regid(vobj) in TRACED_OIDS: - #SELECTOR_LOGGER.warning('selector %s returned %s for %s', selname, ret, cls) - print '%s -> %s for %s(%s)' % (selname, ret, vobj, vobj.__regid__) - -def lltrace(selector): - """use this decorator on your selectors so the becomes traceable with - :class:`traced_selection` - """ - # don't wrap selectors if not in development mode - if CubicWebConfiguration.mode == 'system': # XXX config.debug - return selector - def traced(cls, *args, **kwargs): - ret = selector(cls, *args, **kwargs) - if TRACED_OIDS is not None: - _trace_selector(cls, selector, args, ret) - return ret - traced.__name__ = selector.__name__ - traced.__doc__ = selector.__doc__ - return traced - -class traced_selection(object): - """ - Typical usage is : - - .. sourcecode:: python - - >>> from cubicweb.selectors import traced_selection - >>> with traced_selection(): - ... # some code in which you want to debug selectors - ... # for all objects - - Don't forget the 'from __future__ import with_statement' at the module top-level - if you're using python prior to 2.6. - - This will yield lines like this in the logs:: - - selector one_line_rset returned 0 for - - You can also give to :class:`traced_selection` the identifiers of objects on - which you want to debug selection ('oid1' and 'oid2' in the example above). - - .. sourcecode:: python - - >>> with traced_selection( ('regid1', 'regid2') ): - ... # some code in which you want to debug selectors - ... # for objects with __regid__ 'regid1' and 'regid2' - - A potentially usefull point to set up such a tracing function is - the `cubicweb.vregistry.Registry.select` method body. - """ - - def __init__(self, traced='all'): - self.traced = traced - - def __enter__(self): - global TRACED_OIDS - TRACED_OIDS = self.traced - - def __exit__(self, exctype, exc, traceback): - global TRACED_OIDS - TRACED_OIDS = None - return traceback is None - +from cubicweb.appobject import traced_selection # XXX for bw compat def score_interface(etypesreg, cls_or_inst, cls, iface): """Return XXX if the give object (maybe an instance or class) implements @@ -302,6 +227,7 @@ if iface is basecls: return index + 3 return 0 + # XXX iface in implements deprecated in 3.9 if implements_iface(cls_or_inst, iface): # implenting an interface takes precedence other special Any interface return 2 @@ -321,31 +247,6 @@ return super(PartialSelectorMixIn, self).__call__(cls, *args, **kwargs) -class ImplementsMixIn(object): - """mix-in class for selectors checking implemented interfaces of something - """ - def __init__(self, *expected_ifaces, **kwargs): - super(ImplementsMixIn, self).__init__(**kwargs) - self.expected_ifaces = expected_ifaces - - def __str__(self): - return '%s(%s)' % (self.__class__.__name__, - ','.join(str(s) for s in self.expected_ifaces)) - - def score_interfaces(self, req, cls_or_inst, cls): - score = 0 - etypesreg = req.vreg['etypes'] - for iface in self.expected_ifaces: - if isinstance(iface, basestring): - # entity type - try: - iface = etypesreg.etype_class(iface) - except KeyError: - continue # entity type not in the schema - score += score_interface(etypesreg, cls_or_inst, cls, iface) - return score - - class EClassSelector(Selector): """abstract class for selectors working on *entity class(es)* specified explicitly or found of the result set. @@ -375,14 +276,17 @@ self.accept_none = accept_none @lltrace - def __call__(self, cls, req, rset=None, row=None, col=0, **kwargs): + def __call__(self, cls, req, rset=None, row=None, col=0, accept_none=None, + **kwargs): if kwargs.get('entity'): return self.score_class(kwargs['entity'].__class__, req) if not rset: return 0 score = 0 if row is None: - if not self.accept_none: + if accept_none is None: + accept_none = self.accept_none + if not accept_none: if any(rset[i][col] is None for i in xrange(len(rset))): return 0 for etype in rset.column_types(col): @@ -442,7 +346,8 @@ """ @lltrace - def __call__(self, cls, req, rset=None, row=None, col=0, **kwargs): + def __call__(self, cls, req, rset=None, row=None, col=0, accept_none=None, + **kwargs): if not rset and not kwargs.get('entity'): return 0 score = 0 @@ -450,9 +355,11 @@ score = self.score_entity(kwargs['entity']) elif row is None: col = col or 0 + if accept_none is None: + accept_none = self.accept_none for row, rowvalue in enumerate(rset.rows): if rowvalue[col] is None: # outer join - if not self.accept_none: + if not accept_none: return 0 continue escore = self.score(req, rset, row, col) @@ -482,7 +389,7 @@ """Take a list of expected values as initializer argument and store them into the :attr:`expected` set attribute. - You should implements the :meth:`_get_value(cls, req, **kwargs)` method + You should implement the :meth:`_get_value(cls, req, **kwargs)` method which should return the value for the given context. The selector will then return 1 if the value is expected, else 0. """ @@ -528,19 +435,42 @@ * `registry`, a registry name - * `regid`, an object identifier in this registry + * `regids`, object identifiers in this registry, one of them should be + selectable. """ - def __init__(self, registry, regid): + selectable_score = 1 + def __init__(self, registry, *regids): self.registry = registry - self.regid = regid + self.regids = regids + + @lltrace + def __call__(self, cls, req, **kwargs): + for regid in self.regids: + try: + req.vreg[self.registry].select(regid, req, **kwargs) + return self.selectable_score + except NoSelectableObject: + return 0 + + +class adaptable(appobject_selectable): + """Return 1 if another appobject is selectable using the same input context. + + Initializer arguments: + + * `regids`, adapter identifiers (e.g. interface names) to which the context + (usually entities) should be adaptable. One of them should be selectable + when multiple identifiers are given. + """ + # being adaptable to an interface takes precedence other is_instance('Any'), + # hence return 2 (is_instance('Any') score is 1) + selectable_score = 2 + def __init__(self, *regids): + super(adaptable, self).__init__('adapters', *regids) def __call__(self, cls, req, **kwargs): - try: - req.vreg[self.registry].select(self.regid, req, **kwargs) - return 1 - except NoSelectableObject: - return 0 - + kwargs.setdefault('accept_none', False) + return super(adaptable, self).__call__(cls, req, **kwargs) # rset selectors ############################################################## @@ -586,8 +516,8 @@ @objectify_selector @lltrace def one_line_rset(cls, req, rset=None, row=None, **kwargs): - """Return 1 if the result set is of size 1 or if a specific row in the - result set is specified ('row' argument). + """Return 1 if the result set is of size 1, or greater but a specific row in + the result set is specified ('row' argument). """ if rset is not None and (row is not None or rset.rowcount == 1): return 1 @@ -595,7 +525,7 @@ class multi_lines_rset(Selector): - """If `nb`is specified, return 1 if the result set has exactly `nb` row of + """If `nb` is specified, return 1 if the result set has exactly `nb` row of result. Else (`nb` is None), return 1 if the result set contains *at least* two rows. """ @@ -609,11 +539,11 @@ @lltrace def __call__(self, cls, req, rset=None, **kwargs): - return rset is not None and self.match_expected(rset.rowcount) + return int(rset is not None and self.match_expected(rset.rowcount)) class multi_columns_rset(multi_lines_rset): - """If `nb`is specified, return 1 if the result set has exactly `nb` column + """If `nb` is specified, return 1 if the result set has exactly `nb` column per row. Else (`nb` is None), return 1 if the result set contains *at least* two columns per row. Return 0 for empty result set. """ @@ -659,12 +589,17 @@ @lltrace def sorted_rset(cls, req, rset=None, **kwargs): """Return 1 for sorted result set (e.g. from an RQL query containing an - :ref:ORDERBY clause. + :ref:ORDERBY clause), with exception that it will return 0 if the rset is + 'ORDERBY FTIRANK(VAR)' (eg sorted by rank value of the has_text index). """ if rset is None: return 0 - rqlst = rset.syntax_tree() - if len(rqlst.children) > 1 or not rqlst.children[0].orderby: + selects = rset.syntax_tree().children + if (len(selects) > 1 or + not selects[0].orderby or + (isinstance(selects[0].orderby[0].term, Function) and + selects[0].orderby[0].term.name == 'FTIRANK') + ): return 0 return 2 @@ -712,7 +647,7 @@ class non_final_entity(EClassSelector): """Return 1 for entity of a non final entity type(s). Remember, "final" entity types are String, Int, etc... This is equivalent to - `implements('Any')` but more optimized. + `is_instance('Any')` but more optimized. See :class:`~cubicweb.selectors.EClassSelector` documentation for entity class lookup / score rules according to the input context. @@ -726,7 +661,7 @@ return 1 # necessarily true if we're there -class implements(ImplementsMixIn, EClassSelector): +class implements(EClassSelector): """Return non-zero score for entity that are of the given type(s) or implements at least one of the given interface(s). If multiple arguments are given, matching one of them is enough. @@ -739,10 +674,104 @@ .. note:: when interface is an entity class, the score will reflect class proximity so the most specific object will be selected. + + .. note:: deprecated in cubicweb >= 3.9, use either + :class:`~cubicweb.selectors.is_instance` or + :class:`~cubicweb.selectors.adaptable`. """ + + def __init__(self, *expected_ifaces, **kwargs): + emit_warn = kwargs.pop('warn', True) + super(implements, self).__init__(**kwargs) + self.expected_ifaces = expected_ifaces + if emit_warn: + warn('[3.9] implements selector is deprecated, use either ' + 'is_instance or adaptable', DeprecationWarning, stacklevel=2) + + def __str__(self): + return '%s(%s)' % (self.__class__.__name__, + ','.join(str(s) for s in self.expected_ifaces)) + def score_class(self, eclass, req): return self.score_interfaces(req, eclass, eclass) + def score_interfaces(self, req, cls_or_inst, cls): + score = 0 + etypesreg = req.vreg['etypes'] + for iface in self.expected_ifaces: + if isinstance(iface, basestring): + # entity type + try: + iface = etypesreg.etype_class(iface) + except KeyError: + continue # entity type not in the schema + score += score_interface(etypesreg, cls_or_inst, cls, iface) + return score + +def _reset_is_instance_cache(vreg): + vreg._is_instance_selector_cache = {} + +CW_EVENT_MANAGER.bind('before-registry-reset', _reset_is_instance_cache) + +class is_instance(EClassSelector): + """Return non-zero score for entity that is an instance of the one of given + type(s). If multiple arguments are given, matching one of them is enough. + + Entity types should be given as string, the corresponding class will be + fetched from the registry at selection time. + + See :class:`~cubicweb.selectors.EClassSelector` documentation for entity + class lookup / score rules according to the input context. + + .. note:: the score will reflect class proximity so the most specific object + will be selected. + """ + + def __init__(self, *expected_etypes, **kwargs): + super(is_instance, self).__init__(**kwargs) + self.expected_etypes = expected_etypes + for etype in self.expected_etypes: + assert isinstance(etype, basestring), etype + + def __str__(self): + return '%s(%s)' % (self.__class__.__name__, + ','.join(str(s) for s in self.expected_etypes)) + + def score_class(self, eclass, req): + return self.score_etypes(req, eclass, eclass) + + def score_etypes(self, req, cls_or_inst, cls): + # cache on vreg to avoid reloading issues + cache = req.vreg._is_instance_selector_cache + try: + expected_eclasses = cache[self] + except KeyError: + # turn list of entity types as string into a list of + # (entity class, parent classes) + etypesreg = req.vreg['etypes'] + expected_eclasses = cache[self] = [] + for etype in self.expected_etypes: + try: + expected_eclasses.append( + (etypesreg.etype_class(etype), + etypesreg.parent_classes(etype)) + ) + except KeyError: + continue # entity type not in the schema + score = 0 + for iface, parents in expected_eclasses: + # adjust score according to class proximity + if iface is cls: + score += len(parents) + 4 + elif iface is parents[-1]: # Any + score += 1 + else: + for index, basecls in enumerate(reversed(parents[:-1])): + if iface is basecls: + score += index + 3 + break + return score + class score_entity(EntitySelector): """Return score according to an arbitrary function given as argument which @@ -766,6 +795,26 @@ self.score_entity = intscore +class has_mimetype(EntitySelector): + """Return 1 if the entity adapt to IDownloadable and has the given MIME type. + + You can give 'image/' to match any image for instance, or 'image/png' to match + only PNG images. + """ + def __init__(self, mimetype, once_is_enough=False): + super(has_mimetype, self).__init__(once_is_enough) + self.mimetype = mimetype + + def score_entity(self, entity): + idownloadable = entity.cw_adapt_to('IDownloadable') + if idownloadable is None: + return 0 + mt = idownloadable.download_content_type() + if not (mt and mt.startswith(self.mimetype)): + return 0 + return 1 + + class relation_possible(EntitySelector): """Return 1 for entity that supports the relation, provided that the request's user may do some `action` on it (see below). @@ -1009,7 +1058,7 @@ return self.score(req, rset, row, col) def score_entity(self, entity): - if entity.has_perm(self.action): + if entity.cw_has_perm(self.action): return 1 return 0 @@ -1233,18 +1282,15 @@ return len(self.expected) -class specified_etype_implements(implements): +class specified_etype_implements(is_instance): """Return non-zero score if the entity type specified by an 'etype' key searched in (by priority) input context kwargs and request form parameters match a known entity type (case insensitivly), and it's associated entity - class is of one of the type(s) given to the initializer or implements at - least one of the given interfaces. If multiple arguments are given, matching - one of them is enough. + class is of one of the type(s) given to the initializer. If multiple + arguments are given, matching one of them is enough. - Entity types should be given as string, the corresponding class will be - fetched from the entity types registry at selection time. - - .. note:: when interface is an entity class, the score will reflect class + .. note:: as with :class:`~cubicweb.selectors.is_instance`, entity types + should be given as string and the score will reflect class proximity so the most specific object will be selected. This selector is usually used by views holding entity creation forms (since @@ -1300,25 +1346,30 @@ class is_in_state(score_entity): """return 1 if entity is in one of the states given as argument list - you should use this instead of your own score_entity x: x.state == 'bla' - selector to avoid some gotchas: + you should use this instead of your own :class:`score_entity` selector to + avoid some gotchas: * possible views gives a fake entity with no state - * you must use the latest tr info, not entity.state for repository side + * you must use the latest tr info, not entity.in_state for repository side checking of the current state """ def __init__(self, *states): def score(entity, states=set(states)): + trinfo = entity.cw_adapt_to('IWorkflowable').latest_trinfo() try: - return entity.latest_trinfo().new_state.name in states + return trinfo.new_state.name in states except AttributeError: return None super(is_in_state, self).__init__(score) +@objectify_selector +def debug_mode(cls, req, rset=None, **kwargs): + """Return 1 if running in debug mode""" + return req.vreg.config.debugmode and 1 or 0 ## deprecated stuff ############################################################ -entity_implements = class_renamed('entity_implements', implements) +entity_implements = class_renamed('entity_implements', is_instance) class _but_etype(EntitySelector): """accept if the given entity types are not found in the result set. @@ -1336,7 +1387,7 @@ return 0 return 1 -but_etype = class_renamed('but_etype', _but_etype, 'use ~implements(*etypes) instead') +but_etype = class_renamed('but_etype', _but_etype, 'use ~is_instance(*etypes) instead') # XXX deprecated the one_* variants of selectors below w/ multi_xxx(nb=1)? diff -r 00b1b6b906cf -r 97c55baefa0c server/hook.py --- a/server/hook.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/hook.py Mon Jul 19 15:37:02 2010 +0200 @@ -63,7 +63,7 @@ from cubicweb import RegistryNotFound from cubicweb.cwvreg import CWRegistry, VRegistry from cubicweb.selectors import (objectify_selector, lltrace, ExpectedValueSelector, - implements) + is_instance) from cubicweb.appobject import AppObject from cubicweb.server.session import security_enabled @@ -246,7 +246,7 @@ if ertype.islower(): rtypes.append(ertype) else: - cls.__select__ = cls.__select__ & implements(ertype) + cls.__select__ = cls.__select__ & is_instance(ertype) if rtypes: cls.__select__ = cls.__select__ & match_rtype(*rtypes) return cls @@ -262,7 +262,7 @@ def __call__(self): if hasattr(self, 'call'): cls = self.__class__ - warn('[3.6] %s.%s: call is deprecated, implements __call__' + warn('[3.6] %s.%s: call is deprecated, implement __call__' % (cls.__module__, cls.__name__), DeprecationWarning) if self.event.endswith('_relation'): self.call(self._cw, self.eidfrom, self.rtype, self.eidto) diff -r 00b1b6b906cf -r 97c55baefa0c server/migractions.py --- a/server/migractions.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/migractions.py Mon Jul 19 15:37:02 2010 +0200 @@ -50,7 +50,8 @@ from yams.schema2sql import eschema2sql, rschema2sql from cubicweb import AuthenticationError -from cubicweb.schema import (META_RTYPES, VIRTUAL_RTYPES, +from cubicweb.schema import (ETYPE_NAME_MAP, META_RTYPES, VIRTUAL_RTYPES, + PURE_VIRTUAL_RTYPES, CubicWebRelationSchema, order_eschemas) from cubicweb.dbapi import get_repository, repo_connect from cubicweb.migration import MigrationHelper, yes @@ -855,9 +856,39 @@ `oldname` is a string giving the name of the existing entity type `newname` is a string giving the name of the renamed entity type """ - self.rqlexec('SET ET name %(newname)s WHERE ET is CWEType, ET name %(oldname)s', - {'newname' : unicode(newname), 'oldname' : oldname}, - ask_confirm=False) + schema = self.repo.schema + if newname in schema: + assert oldname in ETYPE_NAME_MAP, \ + '%s should be mappend to %s in ETYPE_NAME_MAP' % (oldname, newname) + attrs = ','.join([SQL_PREFIX + rschema.type + for rschema in schema[newname].subject_relations() + if (rschema.final or rschema.inlined) + and not rschema in PURE_VIRTUAL_RTYPES]) + self.sqlexec('INSERT INTO %s%s(%s) SELECT %s FROM %s%s' % ( + SQL_PREFIX, newname, attrs, attrs, SQL_PREFIX, oldname)) + # old entity type has not been added to the schema, can't gather it + new = schema.eschema(newname) + oldeid = self.rqlexec('CWEType ET WHERE ET name %(on)s', {'on': oldname}, + ask_confirm=False)[0][0] + # backport old type relations to new type + # XXX workflows, other relations? + self.rqlexec('SET X from_entity NET WHERE X from_entity OET, ' + 'NOT EXISTS(X2 from_entity NET, X relation_type XRT, X2 relation_type XRT, ' + 'X to_entity XTE, X2 to_entity XTE), ' + 'OET eid %(o)s, NET eid %(n)s', + {'o': oldeid, 'n': new.eid}, ask_confirm=False) + self.rqlexec('SET X to_entity NET WHERE X to_entity OET, ' + 'NOT EXISTS(X2 to_entity NET, X relation_type XRT, X2 relation_type XRT, ' + 'X from_entity XTE, X2 from_entity XTE), ' + 'OET eid %(o)s, NET eid %(n)s', + {'o': oldeid, 'n': new.eid}, ask_confirm=False) + # remove the old type: use rql to propagate deletion + self.rqlexec('DELETE CWEType ET WHERE ET name %(on)s', {'on': oldname}, + ask_confirm=False) + else: + self.rqlexec('SET ET name %(newname)s WHERE ET is CWEType, ET name %(on)s', + {'newname' : unicode(newname), 'on' : oldname}, + ask_confirm=False) if commit: self.commit() @@ -1152,10 +1183,10 @@ if commit: self.commit() - @deprecated('[3.5] use entity.fire_transition("transition") or entity.change_state("state")', - stacklevel=3) + @deprecated('[3.5] use iworkflowable.fire_transition("transition") or ' + 'iworkflowable.change_state("state")', stacklevel=3) def cmd_set_state(self, eid, statename, commit=False): - self._cw.entity_from_eid(eid).change_state(statename) + self._cw.entity_from_eid(eid).cw_adapt_to('IWorkflowable').change_state(statename) if commit: self.commit() @@ -1215,6 +1246,13 @@ self.commit() return entity + def cmd_update_etype_fti_weight(self, etype, weight): + if self.repo.system_source.dbdriver == 'postgres': + self.sqlexec('UPDATE appears SET weight=%(weight)s ' + 'FROM entities as X ' + 'WHERE X.eid=appears.uid AND X.type=%(type)s', + {'type': etype, 'weight': weight}, ask_confirm=False) + def cmd_reindex_entities(self, etypes=None): """force reindexaction of entities of the given types or of all indexable entity types diff -r 00b1b6b906cf -r 97c55baefa0c server/msplanner.py --- a/server/msplanner.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/msplanner.py Mon Jul 19 15:37:02 2010 +0200 @@ -96,7 +96,7 @@ from rql.stmts import Union, Select from rql.nodes import (VariableRef, Comparison, Relation, Constant, Variable, - Not, Exists) + Not, Exists, SortTerm, Function) from cubicweb import server from cubicweb.utils import make_uid @@ -1330,6 +1330,12 @@ orderby.append) if orderby: newroot.set_orderby(orderby) + elif rqlst.orderby: + for sortterm in rqlst.orderby: + if any(f for f in sortterm.iget_nodes(Function) if f.name == 'FTIRANK'): + newnode, oldnode = sortterm.accept(self, newroot, terms) + if newnode is not None: + newroot.add_sort_term(newnode) self.process_selection(newroot, terms, rqlst) elif not newroot.where: # no restrictions have been copied, just select terms and add @@ -1530,12 +1536,38 @@ copy.operator = '=' return copy, node + def visit_function(self, node, newroot, terms): + if node.name == 'FTIRANK': + # FTIRANK is somewhat special... Rank function should be included in + # the same query has the has_text relation, potentially added to + # selection for latter usage + if not self.hasaggrstep and self.final and node not in self.skip: + return self.visit_default(node, newroot, terms) + elif any(s for s in self.sources if s.uri != 'system'): + return None, node + # p = node.parent + # while p is not None and not isinstance(p, SortTerm): + # p = p.parent + # if isinstance(p, SortTerm): + if not self.hasaggrstep and self.final and node in self.skip: + return Constant(self.skip[node], 'Int'), node + # XXX only if not yet selected + newroot.append_selected(node.copy(newroot)) + self.skip[node] = len(newroot.selection) + return None, node + return self.visit_default(node, newroot, terms) + def visit_default(self, node, newroot, terms): subparts, node = self._visit_children(node, newroot, terms) return copy_node(newroot, node, subparts), node - visit_mathexpression = visit_constant = visit_function = visit_default - visit_sort = visit_sortterm = visit_default + visit_mathexpression = visit_constant = visit_default + + def visit_sortterm(self, node, newroot, terms): + subparts, node = self._visit_children(node, newroot, terms) + if not subparts: + return None, node + return copy_node(newroot, node, subparts), node def _visit_children(self, node, newroot, terms): subparts = [] diff -r 00b1b6b906cf -r 97c55baefa0c server/mssteps.py --- a/server/mssteps.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/mssteps.py Mon Jul 19 15:37:02 2010 +0200 @@ -140,13 +140,6 @@ def mytest_repr(self): """return a representation of this step suitable for test""" - sel = self.select.selection - restr = self.select.where - self.select.selection = self.selection - self.select.where = None - rql = self.select.as_string(kwargs=self.plan.args) - self.select.selection = sel - self.select.where = restr try: # rely on a monkey patch (cf unittest_querier) table = self.plan.tablesinorder[self.table] @@ -155,12 +148,19 @@ # not monkey patched table = self.table outputtable = self.outputtable - return (self.__class__.__name__, rql, self.limit, self.offset, table, - outputtable) + sql = self.get_sql().replace(self.table, table) + return (self.__class__.__name__, sql, outputtable) def execute(self): """execute this step""" self.execute_children() + sql = self.get_sql() + if self.outputtable: + self.plan.create_temp_table(self.outputtable) + sql = 'INSERT INTO %s %s' % (self.outputtable, sql) + return self.plan.sqlexec(sql, self.plan.args) + + def get_sql(self): self.inputmap = inputmap = self.children[-1].outputmap # get the select clause clause = [] @@ -223,17 +223,15 @@ sql.append('LIMIT %s' % self.limit) if self.offset: sql.append('OFFSET %s' % self.offset) - #print 'DATA', plan.sqlexec('SELECT * FROM %s' % self.table, None) - sql = ' '.join(sql) - if self.outputtable: - self.plan.create_temp_table(self.outputtable) - sql = 'INSERT INTO %s %s' % (self.outputtable, sql) - return self.plan.sqlexec(sql, self.plan.args) + return ' '.join(sql) def visit_function(self, function): """generate SQL name for a function""" - return '%s(%s)' % (function.name, - ','.join(c.accept(self) for c in function.children)) + try: + return self.children[0].outputmap[str(function)] + except KeyError: + return '%s(%s)' % (function.name, + ','.join(c.accept(self) for c in function.children)) def visit_variableref(self, variableref): """get the sql name for a variable reference""" diff -r 00b1b6b906cf -r 97c55baefa0c server/querier.py --- a/server/querier.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/querier.py Mon Jul 19 15:37:02 2010 +0200 @@ -29,7 +29,8 @@ from logilab.common.compat import any from rql import RQLSyntaxError from rql.stmts import Union, Select -from rql.nodes import Relation, VariableRef, Constant, SubQuery, Exists, Not +from rql.nodes import (Relation, VariableRef, Constant, SubQuery, Function, + Exists, Not) from cubicweb import Unauthorized, QueryError, UnknownEid, typed_eid from cubicweb import server @@ -50,7 +51,8 @@ key = term.as_string() value = '%s.C%s' % (table, i) if varmap.get(key, value) != value: - raise Exception('variable name conflict on %s' % key) + raise Exception('variable name conflict on %s: got %s / %s' + % (key, value, varmap)) varmap[key] = value # permission utilities ######################################################## @@ -294,7 +296,26 @@ for term in origselection: newselect.append_selected(term.copy(newselect)) if select.orderby: - newselect.set_orderby([s.copy(newselect) for s in select.orderby]) + sortterms = [] + for sortterm in select.orderby: + sortterms.append(sortterm.copy(newselect)) + for fnode in sortterm.get_nodes(Function): + if fnode.name == 'FTIRANK': + # we've to fetch the has_text relation as well + var = fnode.children[0].variable + rel = iter(var.stinfo['ftirels']).next() + assert not rel.ored(), 'unsupported' + newselect.add_restriction(rel.copy(newselect)) + # remove relation from the orig select and + # cleanup variable stinfo + rel.parent.remove(rel) + var.stinfo['ftirels'].remove(rel) + var.stinfo['relations'].remove(rel) + # XXX not properly re-annotated after security insertion? + newvar = newselect.get_variable(var.name) + newvar.stinfo.setdefault('ftirels', set()).add(rel) + newvar.stinfo.setdefault('relations', set()).add(rel) + newselect.set_orderby(sortterms) _expand_selection(select.orderby, selected, aliases, select, newselect) select.orderby = () # XXX dereference? if select.groupby: @@ -339,6 +360,7 @@ select.set_possible_types(localchecks[()]) add_types_restriction(self.schema, select) add_noinvariant(noinvariant, restricted, select, nbtrees) + self.rqlhelper.annotate(union) def _check_permissions(self, rqlst): """return a dict defining "local checks", e.g. RQLExpression defined in @@ -571,6 +593,8 @@ # rql parsing / analysing helper self.solutions = repo.vreg.solutions rqlhelper = repo.vreg.rqlhelper + # set backend on the rql helper, will be used for function checking + rqlhelper.backend = repo.config.sources()['system']['db-driver'] self._parse = rqlhelper.parse self._annotate = rqlhelper.annotate # rql planner diff -r 00b1b6b906cf -r 97c55baefa0c server/repository.py --- a/server/repository.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/repository.py Mon Jul 19 15:37:02 2010 +0200 @@ -104,10 +104,10 @@ XXX protect pyro access """ - def __init__(self, config, vreg=None, debug=False): + def __init__(self, config, vreg=None): self.config = config if vreg is None: - vreg = cwvreg.CubicWebVRegistry(config, debug) + vreg = cwvreg.CubicWebVRegistry(config) self.vreg = vreg self.pyro_registered = False self.info('starting repository from %s', self.config.apphome) @@ -154,13 +154,6 @@ if not isinstance(session.user, InternalManager): session.user.__class__ = usercls - def _bootstrap_hook_registry(self): - """called during bootstrap since we need the metadata hooks""" - hooksdirectory = join(CW_SOFTWARE_ROOT, 'hooks') - self.vreg.init_registration([hooksdirectory]) - self.vreg.load_file(join(hooksdirectory, 'metadata.py'), - 'cubicweb.hooks.metadata') - def open_connections_pools(self): config = self.config self._available_pools = Queue.Queue() @@ -186,7 +179,9 @@ for modname in ('__init__', 'authobjs', 'wfobjs'): self.vreg.load_file(join(etdirectory, '%s.py' % modname), 'cubicweb.entities.%s' % modname) - self._bootstrap_hook_registry() + hooksdirectory = join(CW_SOFTWARE_ROOT, 'hooks') + self.vreg.load_file(join(hooksdirectory, 'metadata.py'), + 'cubicweb.hooks.metadata') elif config.read_instance_schema: # normal start: load the instance schema from the database self.fill_schema() @@ -234,8 +229,7 @@ if resetvreg: if self.config._cubes is None: self.config.init_cubes(self.get_cubes()) - # full reload of all appobjects - self.vreg.reset() + # trigger full reload of all appobjects self.vreg.set_schema(schema) else: self.vreg._set_schema(schema) @@ -392,7 +386,7 @@ raise AuthenticationError('authentication failed with all sources') cwuser = self._build_user(session, eid) if self.config.consider_user_state and \ - not cwuser.state in cwuser.AUTHENTICABLE_STATES: + not cwuser.cw_adapt_to('IWorkflowable').state in cwuser.AUTHENTICABLE_STATES: raise AuthenticationError('user is not in authenticable state') return cwuser @@ -573,7 +567,7 @@ session.close() session = Session(user, self, cnxprops) user._cw = user.cw_rset.req = session - user.clear_related_cache() + user.cw_clear_relation_cache() self._sessions[session.id] = session self.info('opened session %s for user %s', session.id, login) self.hm.call_hooks('session_open', session) @@ -932,7 +926,7 @@ self._extid_cache[cachekey] = eid self._type_source_cache[eid] = (etype, source.uri, extid) entity = source.before_entity_insertion(session, extid, etype, eid) - entity.edited_attributes = set(entity) + entity.edited_attributes = set(entity.cw_attr_cache) if source.should_call_hooks: self.hm.call_hooks('before_add_entity', session, entity=entity) # XXX call add_info with complete=False ? @@ -1042,37 +1036,32 @@ the entity instance """ # init edited_attributes before calling before_add_entity hooks - entity._is_saved = False # entity has an eid but is not yet saved - entity.edited_attributes = set(entity) - entity_ = entity.pre_add_hook() - # XXX kill that transmutation feature ! - if not entity_ is entity: - entity.__class__ = entity_.__class__ - entity.__dict__.update(entity_.__dict__) + entity._cw_is_saved = False # entity has an eid but is not yet saved + entity.edited_attributes = set(entity.cw_attr_cache) # XXX cw_edited_attributes eschema = entity.e_schema source = self.locate_etype_source(entity.__regid__) # allocate an eid to the entity before calling hooks - entity.set_eid(self.system_source.create_eid(session)) + entity.eid = self.system_source.create_eid(session) # set caches asap extid = self.init_entity_caches(session, entity, source) if server.DEBUG & server.DBG_REPO: - print 'ADD entity', entity.__regid__, entity.eid, dict(entity) + print 'ADD entity', self, entity.__regid__, entity.eid, entity.cw_attr_cache relations = [] if source.should_call_hooks: self.hm.call_hooks('before_add_entity', session, entity=entity) # XXX use entity.keys here since edited_attributes is not updated for # inline relations XXX not true, right? (see edited_attributes # affectation above) - for attr in entity.iterkeys(): + for attr in entity.cw_attr_cache.iterkeys(): rschema = eschema.subjrels[attr] if not rschema.final: # inlined relation relations.append((attr, entity[attr])) - entity.set_defaults() + entity._cw_set_defaults() if session.is_hook_category_activated('integrity'): - entity.check(creation=True) + entity._cw_check(creation=True) source.add_entity(session, entity) self.add_info(session, entity, source, extid, complete=False) - entity._is_saved = True # entity has an eid and is saved + entity._cw_is_saved = True # entity has an eid and is saved # prefill entity relation caches for rschema in eschema.subject_relations(): rtype = str(rschema) @@ -1081,12 +1070,13 @@ if rschema.final: entity.setdefault(rtype, None) else: - entity.set_related_cache(rtype, 'subject', session.empty_rset()) + entity.cw_set_relation_cache(rtype, 'subject', + session.empty_rset()) for rschema in eschema.object_relations(): rtype = str(rschema) if rtype in schema.VIRTUAL_RTYPES: continue - entity.set_related_cache(rtype, 'object', session.empty_rset()) + entity.cw_set_relation_cache(rtype, 'object', session.empty_rset()) # set inline relation cache before call to after_add_entity for attr, value in relations: session.update_rel_cache_add(entity.eid, attr, value) @@ -1107,7 +1097,7 @@ """ if server.DEBUG & server.DBG_REPO: print 'UPDATE entity', entity.__regid__, entity.eid, \ - dict(entity), edited_attributes + entity.cw_attr_cache, edited_attributes hm = self.hm eschema = entity.e_schema session.set_entity_cache(entity) @@ -1145,7 +1135,7 @@ if not only_inline_rels: hm.call_hooks('before_update_entity', session, entity=entity) if session.is_hook_category_activated('integrity'): - entity.check() + entity._cw_check() source.update_entity(session, entity) self.system_source.update_info(session, entity, need_fti_update) if source.should_call_hooks: @@ -1153,7 +1143,7 @@ hm.call_hooks('after_update_entity', session, entity=entity) for attr, value, prevvalue in relations: # if the relation is already cached, update existant cache - relcache = entity.relation_cached(attr, 'subject') + relcache = entity.cw_relation_cached(attr, 'subject') if prevvalue is not None: hm.call_hooks('after_delete_relation', session, eidfrom=entity.eid, rtype=attr, eidto=prevvalue) @@ -1163,8 +1153,8 @@ if relcache is not None: session.update_rel_cache_add(entity.eid, attr, value) else: - entity.set_related_cache(attr, 'subject', - session.eid_rset(value)) + entity.cw_set_relation_cache(attr, 'subject', + session.eid_rset(value)) hm.call_hooks('after_add_relation', session, eidfrom=entity.eid, rtype=attr, eidto=value) finally: diff -r 00b1b6b906cf -r 97c55baefa0c server/schemaserial.py --- a/server/schemaserial.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/schemaserial.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""functions for schema / permissions (de)serialization using RQL +"""functions for schema / permissions (de)serialization using RQL""" -""" __docformat__ = "restructuredtext en" import os @@ -27,7 +26,9 @@ from yams import schema as schemamod, buildobjs as ybo -from cubicweb.schema import CONSTRAINTS, ETYPE_NAME_MAP, VIRTUAL_RTYPES +from cubicweb import CW_SOFTWARE_ROOT, typed_eid +from cubicweb.schema import (CONSTRAINTS, ETYPE_NAME_MAP, + VIRTUAL_RTYPES, PURE_VIRTUAL_RTYPES) from cubicweb.server import sqlutils def group_mapping(cursor, interactive=True): @@ -57,10 +58,18 @@ if not value: continue try: - res[group] = int(value) + eid = typed_eid(value) except ValueError: print 'eid should be an integer' continue + for eid_ in res.values(): + if eid == eid_: + break + else: + print 'eid is not a group eid' + continue + res[name] = eid + break return res def cstrtype_mapping(cursor): @@ -100,17 +109,28 @@ sidx[eid] = eschema continue if etype in ETYPE_NAME_MAP: + needcopy = False netype = ETYPE_NAME_MAP[etype] # can't use write rql queries at this point, use raw sql - session.system_sql('UPDATE %(p)sCWEType SET %(p)sname=%%(n)s WHERE %(p)seid=%%(x)s' - % {'p': sqlutils.SQL_PREFIX}, - {'x': eid, 'n': netype}) - session.system_sql('UPDATE entities SET type=%(n)s WHERE type=%(x)s', - {'x': etype, 'n': netype}) + sqlexec = session.system_sql + if sqlexec('SELECT 1 FROM %(p)sCWEType WHERE %(p)sname=%%(n)s' + % {'p': sqlutils.SQL_PREFIX}, {'n': netype}).fetchone(): + # the new type already exists, we should merge + assert etype.lower() != netype.lower() + needcopy = True + else: + # the new type doesn't exist, we should rename + sqlexec('UPDATE %(p)sCWEType SET %(p)sname=%%(n)s WHERE %(p)seid=%%(x)s' + % {'p': sqlutils.SQL_PREFIX}, {'x': eid, 'n': netype}) + if etype.lower() != netype.lower(): + sqlexec('ALTER TABLE %s%s RENAME TO %s%s' % ( + sqlutils.SQL_PREFIX, etype, sqlutils.SQL_PREFIX, netype)) + sqlexec('UPDATE entities SET type=%(n)s WHERE type=%(x)s', + {'x': etype, 'n': netype}) session.commit(False) try: - session.system_sql('UPDATE deleted_entities SET type=%(n)s WHERE type=%(x)s', - {'x': etype, 'n': netype}) + sqlexec('UPDATE deleted_entities SET type=%(n)s WHERE type=%(x)s', + {'x': etype, 'n': netype}) except: pass tocleanup = [eid] @@ -118,6 +138,12 @@ if etype == eidetype) repo.clear_caches(tocleanup) session.commit(False) + if needcopy: + from logilab.common.testlib import mock_object + sidx[eid] = mock_object(type=netype) + # copy / CWEType entity removal expected to be done through + # rename_entity_type in a migration script + continue etype = netype etype = ybo.EntityType(name=etype, description=desc, eid=eid) eschema = schema.add_entity_type(etype) diff -r 00b1b6b906cf -r 97c55baefa0c server/server.py --- a/server/server.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/server.py Mon Jul 19 15:37:02 2010 +0200 @@ -74,10 +74,10 @@ class RepositoryServer(object): - def __init__(self, config, debug=False): + def __init__(self, config): """make the repository available as a PyRO object""" self.config = config - self.repo = Repository(config, debug=debug) + self.repo = Repository(config) self.ns = None self.quiting = None # event queue diff -r 00b1b6b906cf -r 97c55baefa0c server/serverconfig.py --- a/server/serverconfig.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/serverconfig.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""server.serverconfig definition +"""server.serverconfig definition""" -""" __docformat__ = "restructuredtext en" from os.path import join, exists diff -r 00b1b6b906cf -r 97c55baefa0c server/serverctl.py --- a/server/serverctl.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/serverctl.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""cubicweb-ctl commands and command handlers specific to the server.serverconfig +"""cubicweb-ctl commands and command handlers specific to the repository""" -""" __docformat__ = 'restructuredtext en' # *ctl module should limit the number of import to be imported as quickly as @@ -48,14 +47,16 @@ if dbname is None: dbname = source['db-name'] driver = source['db-driver'] - print '-> connecting to %s database' % driver, - if dbhost: - print '%s@%s' % (dbname, dbhost), - else: - print dbname, + if verbose: + print '-> connecting to %s database' % driver, + if dbhost: + print '%s@%s' % (dbname, dbhost), + else: + print dbname, if not verbose or (not special_privs and source.get('db-user')): user = source['db-user'] - print 'as', user + if verbose: + print 'as', user if source.get('db-password'): password = source['db-password'] else: @@ -152,8 +153,8 @@ cfgname = 'repository' def bootstrap(self, cubes, inputlevel=0): - """create an instance by copying files from the given cube and by - asking information necessary to build required configuration files + """create an instance by copying files from the given cube and by asking + information necessary to build required configuration files """ from cubicweb.server.utils import ask_source_config config = self.config @@ -249,11 +250,12 @@ cmdname = 'start' cfgname = 'repository' - def start_server(self, ctlconf, debug): + def start_server(self, config): command = ['cubicweb-ctl start-repository '] - if debug: + if config.debugmode: command.append('--debug') - command.append(self.config.appid) + command.append('--loglevel %s' % config['log-threshold'].lower()) + command.append(config.appid) os.system(' '.join(command)) @@ -262,8 +264,7 @@ cfgname = 'repository' def poststop(self): - """if pyro is enabled, ensure the repository is correctly - unregistered + """if pyro is enabled, ensure the repository is correctly unregistered """ if self.config.pyro_enabled(): from cubicweb.server.repository import pyro_unregister @@ -272,6 +273,14 @@ # repository specific commands ################################################ +def createdb(helper, source, dbcnx, cursor, **kwargs): + if dbcnx.logged_user != source['db-user']: + helper.create_database(cursor, source['db-name'], source['db-user'], + source['db-encoding'], **kwargs) + else: + helper.create_database(cursor, source['db-name'], + dbencoding=source['db-encoding'], **kwargs) + class CreateInstanceDBCommand(Command): """Create the system database of an instance (run after 'create'). @@ -314,14 +323,13 @@ source = config.sources()['system'] dbname = source['db-name'] driver = source['db-driver'] - create_db = self.config.create_db helper = get_db_helper(driver) if driver == 'sqlite': if os.path.exists(dbname) and ( automatic or ASK.confirm('Database %s already exists. Drop it?' % dbname)): os.unlink(dbname) - elif create_db: + elif self.config.create_db: print '\n'+underline_title('Creating the system database') # connect on the dbms system base to create our base dbcnx = _db_sys_cnx(source, 'CREATE DATABASE and / or USER', verbose=verbose) @@ -338,12 +346,7 @@ cursor.execute('DROP DATABASE %s' % dbname) else: return - if dbcnx.logged_user != source['db-user']: - helper.create_database(cursor, dbname, source['db-user'], - source['db-encoding']) - else: - helper.create_database(cursor, dbname, - dbencoding=source['db-encoding']) + createdb(helper, source, dbcnx, cursor) dbcnx.commit() print '-> database %s created.' % dbname except: @@ -523,22 +526,28 @@ ('debug', {'short': 'D', 'action' : 'store_true', 'help': 'start server in debug mode.'}), + ('loglevel', + {'short': 'l', 'type' : 'choice', 'metavar': '', + 'default': None, 'choices': ('debug', 'info', 'warning', 'error'), + 'help': 'debug if -D is set, error otherwise', + }), ) def run(self, args): from logilab.common.daemon import daemonize + from cubicweb.cwctl import init_cmdline_log_threshold from cubicweb.server.server import RepositoryServer appid = pop_arg(args, msg='No instance specified !') - config = ServerConfiguration.config_for(appid) - if sys.platform == 'win32': - if not self.config.debug: - from logging import getLogger - logger = getLogger('cubicweb.ctl') - logger.info('Forcing debug mode on win32 platform') - self.config.debug = True - debug = self.config.debug + debug = self['debug'] + if sys.platform == 'win32' and not debug: + from logging import getLogger + logger = getLogger('cubicweb.ctl') + logger.info('Forcing debug mode on win32 platform') + debug = True + config = ServerConfiguration.config_for(appid, debugmode=debug) + init_cmdline_log_threshold(config, self['loglevel']) # create the server - server = RepositoryServer(config, debug) + server = RepositoryServer(config) # ensure the directory where the pid-file should be set exists (for # instance /var/run/cubicweb may be deleted on computer restart) pidfile = config['pid-file'] diff -r 00b1b6b906cf -r 97c55baefa0c server/session.py --- a/server/session.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/session.py Mon Jul 19 15:37:02 2010 +0200 @@ -250,7 +250,7 @@ entity = self.entity_cache(eid) except KeyError: return - rcache = entity.relation_cached(rtype, role) + rcache = entity.cw_relation_cached(rtype, role) if rcache is not None: rset, entities = rcache rset = rset.copy() @@ -266,14 +266,15 @@ targetentity.cw_col = 0 rset.rowcount += 1 entities.append(targetentity) - entity._related_cache['%s_%s' % (rtype, role)] = (rset, tuple(entities)) + entity._cw_related_cache['%s_%s' % (rtype, role)] = ( + rset, tuple(entities)) def _update_entity_rel_cache_del(self, eid, rtype, role, targeteid): try: entity = self.entity_cache(eid) except KeyError: return - rcache = entity.relation_cached(rtype, role) + rcache = entity.cw_relation_cached(rtype, role) if rcache is not None: rset, entities = rcache for idx, row in enumerate(rset.rows): @@ -292,7 +293,8 @@ del rset.description[idx] del entities[idx] rset.rowcount -= 1 - entity._related_cache['%s_%s' % (rtype, role)] = (rset, tuple(entities)) + entity._cw_related_cache['%s_%s' % (rtype, role)] = ( + rset, tuple(entities)) # resource accessors ###################################################### @@ -312,16 +314,15 @@ def set_language(self, language): """i18n configuration for translation""" - vreg = self.vreg language = language or self.user.property_value('ui.language') try: - gettext, pgettext = vreg.config.translations[language] + gettext, pgettext = self.vreg.config.translations[language] self._ = self.__ = gettext self.pgettext = pgettext except KeyError: - language = vreg.property_value('ui.language') + language = self.vreg.property_value('ui.language') try: - gettext, pgettext = vreg.config.translations[language] + gettext, pgettext = self.vreg.config.translations[language] self._ = self.__ = gettext self.pgettext = pgettext except KeyError: @@ -661,16 +662,6 @@ else: del self.transaction_data['ecache'][eid] - def base_url(self): - url = self.repo.config['base-url'] - if not url: - try: - url = self.repo.config.default_base_url() - except AttributeError: # default_base_url() might not be available - self.warning('missing base-url definition in server config') - url = u'' - return url - def from_controller(self): """return the id (string) of the controller issuing the request (no sense here, always return 'view') @@ -756,7 +747,6 @@ self.pending_operations[:] = processed self.debug('%s session %s done', trstate, self.id) except: - self.exception('error while %sing', trstate) # if error on [pre]commit: # # * set .failed = True on the operation causing the failure @@ -768,8 +758,12 @@ # instead of having to implements rollback, revertprecommit # and revertcommit, that will be enough in mont case. operation.failed = True - for operation in processed: - operation.handle_event('revert%s_event' % trstate) + for operation in reversed(processed): + try: + operation.handle_event('revert%s_event' % trstate) + except: + self.critical('error while reverting %sing', trstate, + exc_info=True) # XXX use slice notation since self.pending_operations is a # read-only property. self.pending_operations[:] = processed + self.pending_operations diff -r 00b1b6b906cf -r 97c55baefa0c server/sources/__init__.py --- a/server/sources/__init__.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/sources/__init__.py Mon Jul 19 15:37:02 2010 +0200 @@ -342,7 +342,7 @@ entity. """ entity = self.repo.vreg['etypes'].etype_class(etype)(session) - entity.set_eid(eid) + entity.eid = eid return entity def after_entity_insertion(self, session, lid, entity): diff -r 00b1b6b906cf -r 97c55baefa0c server/sources/ldapuser.py --- a/server/sources/ldapuser.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/sources/ldapuser.py Mon Jul 19 15:37:02 2010 +0200 @@ -232,6 +232,8 @@ if res: ldapemailaddr = res[0].get(ldap_emailattr) if ldapemailaddr: + if isinstance(ldapemailaddr, list): + ldapemailaddr = ldapemailaddr[0] # XXX consider only the first email in the list rset = execute('Any X,A WHERE ' 'X address A, U use_email X, U eid %(u)s', {'u': eid}) @@ -522,7 +524,7 @@ eid, base) entity = session.entity_from_eid(eid, 'CWUser') self.repo.delete_info(session, entity, self.uri, base) - self.reset_cache() + self.reset_caches() return [] # except ldap.REFERRAL, e: # cnx = self.handle_referral(e) @@ -589,6 +591,8 @@ emailaddr = self._cache[dn][self.user_rev_attrs['email']] except KeyError: return + if isinstance(emailaddr, list): + emailaddr = emailaddr[0] # XXX consider only the first email in the list rset = session.execute('EmailAddress X WHERE X address %(addr)s', {'addr': emailaddr}) if rset: diff -r 00b1b6b906cf -r 97c55baefa0c server/sources/native.py --- a/server/sources/native.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/sources/native.py Mon Jul 19 15:37:02 2010 +0200 @@ -42,6 +42,8 @@ from logilab.common.shellutils import getlogin from logilab.database import get_db_helper +from yams import schema2sql as y2sql + from cubicweb import UnknownEid, AuthenticationError, ValidationError, Binary from cubicweb import transaction as tx, server, neg_role from cubicweb.schema import VIRTUAL_RTYPES @@ -127,6 +129,21 @@ restr = '(%s)' % ' OR '.join(clauses) return '%s WHERE %s' % (select, restr) +def rdef_table_column(rdef): + """return table and column used to store the given relation definition in + the database + """ + return (SQL_PREFIX + str(rdef.subject), + SQL_PREFIX + str(rdef.rtype)) + +def rdef_physical_info(dbhelper, rdef): + """return backend type and a boolean flag if NULL values should be allowed + for a given relation definition + """ + coltype = y2sql.type_from_constraints(dbhelper, rdef.object, + rdef.constraints, creating=False) + allownull = rdef.cardinality[0] != '1' + return coltype, allownull class UndoException(Exception): """something went wrong during undoing""" @@ -678,6 +695,47 @@ # short cut to method requiring advanced db helper usage ################## + def update_rdef_column(self, session, rdef): + """update physical column for a relation definition (final or inlined) + """ + table, column = rdef_table_column(rdef) + coltype, allownull = rdef_physical_info(self.dbhelper, rdef) + if not self.dbhelper.alter_column_support: + self.error("backend can't alter %s.%s to %s%s", table, column, coltype, + not allownull and 'NOT NULL' or '') + return + self.dbhelper.change_col_type(LogCursor(session.pool[self.uri]), + table, column, coltype, allownull) + self.info('altered %s.%s: now %s%s', table, column, coltype, + not allownull and 'NOT NULL' or '') + + def update_rdef_null_allowed(self, session, rdef): + """update NULL / NOT NULL of physical column for a relation definition + (final or inlined) + """ + if not self.dbhelper.alter_column_support: + # not supported (and NOT NULL not set by yams in that case, so no + # worry) + return + table, column = rdef_table_column(rdef) + coltype, allownull = rdef_physical_info(self.dbhelper, rdef) + self.dbhelper.set_null_allowed(LogCursor(session.pool[self.uri]), + table, column, coltype, allownull) + + def update_rdef_indexed(self, session, rdef): + table, column = rdef_table_column(rdef) + if rdef.indexed: + self.create_index(session, table, column) + else: + self.drop_index(session, table, column) + + def update_rdef_unique(self, session, rdef): + table, column = rdef_table_column(rdef) + if rdef.constraint_by_type('UniqueConstraint'): + self.create_index(session, table, column, unique=True) + else: + self.drop_index(session, table, column, unique=True) + def create_index(self, session, table, column, unique=False): cursor = LogCursor(session.pool[self.uri]) self.dbhelper.create_index(cursor, table, column, unique) @@ -686,14 +744,6 @@ cursor = LogCursor(session.pool[self.uri]) self.dbhelper.drop_index(cursor, table, column, unique) - def change_col_type(self, session, table, column, coltype, null_allowed): - cursor = LogCursor(session.pool[self.uri]) - self.dbhelper.change_col_type(cursor, table, column, coltype, null_allowed) - - def set_null_allowed(self, session, table, column, coltype, null_allowed): - cursor = LogCursor(session.pool[self.uri]) - self.dbhelper.set_null_allowed(cursor, table, column, coltype, null_allowed) - # system source interface ################################################# def eid_type_source(self, session, eid): @@ -1079,10 +1129,10 @@ entity[rtype] = unicode(value, session.encoding, 'replace') else: entity[rtype] = value - entity.set_eid(eid) + entity.eid = eid session.repo.init_entity_caches(session, entity, self) entity.edited_attributes = set(entity) - entity.check() + entity._cw_check() self.repo.hm.call_hooks('before_add_entity', session, entity=entity) # restore the entity action.changes['cw_eid'] = eid @@ -1149,7 +1199,7 @@ return [session._( "Can't undo creation of entity %(eid)s of type %(etype)s, type " "no more supported" % {'eid': eid, 'etype': etype})] - entity.set_eid(eid) + entity.eid = eid # for proper eid/type cache update hook.set_operation(session, 'pendingeids', eid, CleanupDeletedEidsCacheOp) @@ -1237,7 +1287,8 @@ try: # use cursor_index_object, not cursor_reindex_object since # unindexing done in the FTIndexEntityOp - self.dbhelper.cursor_index_object(entity.eid, entity, + self.dbhelper.cursor_index_object(entity.eid, + entity.cw_adapt_to('IFTIndexable'), session.pool['system']) except Exception: # let KeyboardInterrupt / SystemExit propagate self.exception('error while reindexing %s', entity) @@ -1262,7 +1313,8 @@ # processed return done.add(eid) - for container in session.entity_from_eid(eid).fti_containers(): + iftindexable = session.entity_from_eid(eid).cw_adapt_to('IFTIndexable') + for container in iftindexable.fti_containers(): source.fti_unindex_entity(session, container.eid) source.fti_index_entity(session, container) diff -r 00b1b6b906cf -r 97c55baefa0c server/sources/rql2sql.py --- a/server/sources/rql2sql.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/sources/rql2sql.py Mon Jul 19 15:37:02 2010 +0200 @@ -611,12 +611,14 @@ sql += '\nHAVING %s' % having # sort if sorts: - sql += '\nORDER BY %s' % ','.join(self._sortterm_sql(sortterm, - fselectidx) - for sortterm in sorts) - if fneedwrap: - selection = ['T1.C%s' % i for i in xrange(len(origselection))] - sql = 'SELECT %s FROM (%s) AS T1' % (','.join(selection), sql) + sqlsortterms = [self._sortterm_sql(sortterm, fselectidx) + for sortterm in sorts] + sqlsortterms = [x for x in sqlsortterms if x is not None] + if sqlsortterms: + sql += '\nORDER BY %s' % ','.join(sqlsortterms) + if sorts and fneedwrap: + selection = ['T1.C%s' % i for i in xrange(len(origselection))] + sql = 'SELECT %s FROM (%s) AS T1' % (','.join(selection), sql) state.finalize_source_cbs() finally: select.selection = origselection @@ -696,12 +698,14 @@ def _sortterm_sql(self, sortterm, selectidx): term = sortterm.term try: - sqlterm = str(selectidx.index(str(term)) + 1) + sqlterm = selectidx.index(str(term)) + 1 except ValueError: # Constant node or non selected term - sqlterm = str(term.accept(self)) + sqlterm = term.accept(self) + if sqlterm is None: + return None if sortterm.asc: - return sqlterm + return str(sqlterm) else: return '%s DESC' % sqlterm @@ -1060,7 +1064,8 @@ not_ = True else: not_ = False - return self.dbhelper.fti_restriction_sql(alias, const.eval(self._args), + query = const.eval(self._args) + return self.dbhelper.fti_restriction_sql(alias, query, jointo, not_) + restriction def visit_comparison(self, cmp): @@ -1104,6 +1109,15 @@ def visit_function(self, func): """generate SQL name for a function""" + if func.name == 'FTIRANK': + try: + rel = iter(func.children[0].variable.stinfo['ftirels']).next() + except KeyError: + raise BadRQLQuery("can't use FTIRANK on variable not used in an" + " 'has_text' relation (eg full-text search)") + const = rel.get_parts()[1].children[0] + return self.dbhelper.fti_rank_order(self._fti_table(rel), + const.eval(self._args)) args = [c.accept(self) for c in func.children] if func in self._state.source_cb_funcs: # function executed as a callback on the source @@ -1132,8 +1146,6 @@ _id = _id.encode() else: _id = str(id(constant)).replace('-', '', 1) - if isinstance(value, unicode): - value = value.encode(self.dbencoding) self._query_attrs[_id] = value return '%%(%s)s' % _id diff -r 00b1b6b906cf -r 97c55baefa0c server/sources/storages.py --- a/server/sources/storages.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/sources/storages.py Mon Jul 19 15:37:02 2010 +0200 @@ -174,7 +174,7 @@ # PIL processing that use filename extension to detect content-type, as # well as providing more understandable file names on the fs. basename = [str(entity.eid), attr] - name = entity.attr_metadata(attr, 'name') + name = entity.cw_attr_metadata(attr, 'name') if name is not None: basename.append(name.encode(self.fsencoding)) fspath = uniquify_path(self.default_directory, '_'.join(basename)) diff -r 00b1b6b906cf -r 97c55baefa0c server/sqlutils.py --- a/server/sqlutils.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/sqlutils.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""SQL utilities functions and classes. +"""SQL utilities functions and classes.""" -""" __docformat__ = "restructuredtext en" import os diff -r 00b1b6b906cf -r 97c55baefa0c server/ssplanner.py --- a/server/ssplanner.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/ssplanner.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,15 +15,12 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""plan execution of rql queries on a single source +"""plan execution of rql queries on a single source""" -""" from __future__ import with_statement __docformat__ = "restructuredtext en" -from copy import copy - from rql.stmts import Union, Select from rql.nodes import Constant, Relation @@ -479,7 +476,7 @@ result = [[]] for row in result: # get a new entity definition for this row - edef = copy(base_edef) + edef = base_edef.cw_copy() # complete this entity def using row values index = 0 for rtype, rorder, value in self.rdefs: @@ -487,7 +484,7 @@ value = row[index] index += 1 if rorder == InsertRelationsStep.FINAL: - edef.rql_set_value(rtype, value) + edef._cw_rql_set_value(rtype, value) elif rorder == InsertRelationsStep.RELATION: self.plan.add_relation_def( (edef, rtype, value) ) edef.querier_pending_relations[(rtype, 'subject')] = value @@ -584,7 +581,7 @@ edef = edefs[eid] except KeyError: edefs[eid] = edef = session.entity_from_eid(eid) - edef.rql_set_value(str(rschema), rhsval) + edef._cw_rql_set_value(str(rschema), rhsval) else: repo.glob_add_relation(session, lhsval, str(rschema), rhsval) result[i] = newrow diff -r 00b1b6b906cf -r 97c55baefa0c server/test/data/migratedapp/schema.py --- a/server/test/data/migratedapp/schema.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/test/data/migratedapp/schema.py Mon Jul 19 15:37:02 2010 +0200 @@ -69,7 +69,7 @@ mydate = Date(default='TODAY') shortpara = String(maxsize=64) ecrit_par = SubjectRelation('Personne', constraints=[RQLConstraint('S concerne A, O concerne A')]) - attachment = SubjectRelation(('File', 'Image')) + attachment = SubjectRelation('File') class Text(Para): __specializes_schema__ = True diff -r 00b1b6b906cf -r 97c55baefa0c server/test/data/schema.py --- a/server/test/data/schema.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/test/data/schema.py Mon Jul 19 15:37:02 2010 +0200 @@ -92,7 +92,7 @@ }) migrated_from = SubjectRelation('Note') - attachment = SubjectRelation(('File', 'Image')) + attachment = SubjectRelation('File') inline1 = SubjectRelation('Affaire', inlined=True, cardinality='?*') todo_by = SubjectRelation('CWUser') diff -r 00b1b6b906cf -r 97c55baefa0c server/test/data/site_cubicweb.py --- a/server/test/data/site_cubicweb.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/test/data/site_cubicweb.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,9 +15,6 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -""" - -""" from logilab.database import FunctionDescr from logilab.database.sqlite import register_sqlite_pyfunc @@ -25,7 +22,7 @@ try: class DUMB_SORT(FunctionDescr): - supported_backends = ('sqlite',) + pass register_function(DUMB_SORT) def dumb_sort(something): diff -r 00b1b6b906cf -r 97c55baefa0c server/test/data/sources_fti --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/server/test/data/sources_fti Mon Jul 19 15:37:02 2010 +0200 @@ -0,0 +1,14 @@ +[system] + +db-driver = postgres +db-host = localhost +db-port = +adapter = native +db-name = cw_fti_test +db-encoding = UTF-8 +db-user = syt +db-password = syt + +[admin] +login = admin +password = gingkow diff -r 00b1b6b906cf -r 97c55baefa0c server/test/unittest_fti.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/server/test/unittest_fti.py Mon Jul 19 15:37:02 2010 +0200 @@ -0,0 +1,52 @@ +from __future__ import with_statement + +from cubicweb.devtools import ApptestConfiguration +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.selectors import is_instance +from cubicweb.entities.adapters import IFTIndexableAdapter + +class PostgresFTITC(CubicWebTC): + config = ApptestConfiguration('data', sourcefile='sources_fti') + + def test_occurence_count(self): + req = self.request() + c1 = req.create_entity('Card', title=u'c1', + content=u'cubicweb cubicweb cubicweb') + c2 = req.create_entity('Card', title=u'c3', + content=u'cubicweb') + c3 = req.create_entity('Card', title=u'c2', + content=u'cubicweb cubicweb') + self.commit() + self.assertEquals(req.execute('Card X ORDERBY FTIRANK(X) DESC WHERE X has_text "cubicweb"').rows, + [[c1.eid], [c3.eid], [c2.eid]]) + + + def test_attr_weight(self): + class CardIFTIndexableAdapter(IFTIndexableAdapter): + __select__ = is_instance('Card') + attr_weight = {'title': 'A'} + with self.temporary_appobjects(CardIFTIndexableAdapter): + req = self.request() + c1 = req.create_entity('Card', title=u'c1', + content=u'cubicweb cubicweb cubicweb') + c2 = req.create_entity('Card', title=u'c2', + content=u'cubicweb cubicweb') + c3 = req.create_entity('Card', title=u'cubicweb', + content=u'autre chose') + self.commit() + self.assertEquals(req.execute('Card X ORDERBY FTIRANK(X) DESC WHERE X has_text "cubicweb"').rows, + [[c3.eid], [c1.eid], [c2.eid]]) + + + def test_entity_weight(self): + class PersonneIFTIndexableAdapter(IFTIndexableAdapter): + __select__ = is_instance('Personne') + entity_weight = 2.0 + with self.temporary_appobjects(PersonneIFTIndexableAdapter): + req = self.request() + c1 = req.create_entity('Personne', nom=u'c1', prenom=u'cubicweb') + c2 = req.create_entity('Comment', content=u'cubicweb cubicweb', comments=c1) + c3 = req.create_entity('Comment', content=u'cubicweb cubicweb cubicweb', comments=c1) + self.commit() + self.assertEquals(req.execute('Any X ORDERBY FTIRANK(X) DESC WHERE X has_text "cubicweb"').rows, + [[c1.eid], [c3.eid], [c2.eid]]) diff -r 00b1b6b906cf -r 97c55baefa0c server/test/unittest_hook.py --- a/server/test/unittest_hook.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/test/unittest_hook.py Mon Jul 19 15:37:02 2010 +0200 @@ -23,7 +23,6 @@ from cubicweb.devtools import TestServerConfiguration from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.selectors import implements from cubicweb.server import hook from cubicweb.hooks import integrity, syncschema @@ -65,7 +64,7 @@ def test_global_operation_order(self): session = self.session op1 = integrity._DelayedDeleteOp(session) - op2 = syncschema.MemSchemaRDefDel(session) + op2 = syncschema.RDefDelOp(session) # equivalent operation generated by op2 but replace it here by op3 so we # can check the result... op3 = syncschema.MemSchemaNotifyChanges(session) diff -r 00b1b6b906cf -r 97c55baefa0c server/test/unittest_ldapuser.py --- a/server/test/unittest_ldapuser.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/test/unittest_ldapuser.py Mon Jul 19 15:37:02 2010 +0200 @@ -178,12 +178,13 @@ cnx = self.login(SYT, password='dummypassword') cu = cnx.cursor() adim = cu.execute('CWUser X WHERE X login %(login)s', {'login': ADIM}).get_entity(0, 0) - adim.fire_transition('deactivate') + iworkflowable = adim.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') try: cnx.commit() adim.clear_all_caches() self.assertEquals(adim.in_state[0].name, 'deactivated') - trinfo = adim.latest_trinfo() + trinfo = iworkflowable.latest_trinfo() self.assertEquals(trinfo.owned_by[0].login, SYT) # select from_state to skip the user's creation TrInfo rset = self.sexecute('Any U ORDERBY D DESC WHERE WF wf_info_for X,' @@ -195,7 +196,7 @@ # restore db state self.restore_connection() adim = self.sexecute('CWUser X WHERE X login %(login)s', {'login': ADIM}).get_entity(0, 0) - adim.fire_transition('activate') + adim.cw_adapt_to('IWorkflowable').fire_transition('activate') self.sexecute('DELETE X in_group G WHERE X login %(syt)s, G name "managers"', {'syt': SYT}) def test_same_column_names(self): diff -r 00b1b6b906cf -r 97c55baefa0c server/test/unittest_migractions.py --- a/server/test/unittest_migractions.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/test/unittest_migractions.py Mon Jul 19 15:37:02 2010 +0200 @@ -425,7 +425,7 @@ self.failIf(self.config.cube_dir('email') in self.config.cubes_path()) self.failIf('file' in self.config.cubes()) self.failIf(self.config.cube_dir('file') in self.config.cubes_path()) - for ertype in ('Email', 'EmailThread', 'EmailPart', 'File', 'Image', + for ertype in ('Email', 'EmailThread', 'EmailPart', 'File', 'sender', 'in_thread', 'reply_to', 'data_format'): self.failIf(ertype in schema, ertype) self.assertEquals(sorted(schema['see_also'].rdefs.keys()), @@ -448,7 +448,7 @@ self.failUnless(self.config.cube_dir('email') in self.config.cubes_path()) self.failUnless('file' in self.config.cubes()) self.failUnless(self.config.cube_dir('file') in self.config.cubes_path()) - for ertype in ('Email', 'EmailThread', 'EmailPart', 'File', 'Image', + for ertype in ('Email', 'EmailThread', 'EmailPart', 'File', 'sender', 'in_thread', 'reply_to', 'data_format'): self.failUnless(ertype in schema, ertype) self.assertEquals(sorted(schema['see_also'].rdefs.keys()), diff -r 00b1b6b906cf -r 97c55baefa0c server/test/unittest_msplanner.py --- a/server/test/unittest_msplanner.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/test/unittest_msplanner.py Mon Jul 19 15:37:02 2010 +0200 @@ -60,7 +60,7 @@ {'X': 'Card'}, {'X': 'Comment'}, {'X': 'Division'}, {'X': 'Email'}, {'X': 'EmailAddress'}, {'X': 'EmailPart'}, {'X': 'EmailThread'}, {'X': 'ExternalUri'}, {'X': 'File'}, - {'X': 'Folder'}, {'X': 'Image'}, {'X': 'Note'}, + {'X': 'Folder'}, {'X': 'Note'}, {'X': 'Personne'}, {'X': 'RQLExpression'}, {'X': 'Societe'}, {'X': 'State'}, {'X': 'SubDivision'}, {'X': 'SubWorkflowExitPoint'}, {'X': 'Tag'}, {'X': 'TrInfo'}, {'X': 'Transition'}, @@ -413,7 +413,7 @@ """retrieve CWUser X from both sources and return concatenation of results """ self._test('CWUser X ORDERBY X LIMIT 10 OFFSET 10', - [('AggrStep', 'Any X ORDERBY X', 10, 10, 'table0', None, [ + [('AggrStep', 'SELECT table0.C0 FROM table0 ORDER BY table0.C0 LIMIT 10 OFFSET 10', None, [ ('FetchStep', [('Any X WHERE X is CWUser', [{'X': 'CWUser'}])], [self.ldap, self.system], {}, {'X': 'table0.C0'}, []), ]), @@ -423,7 +423,7 @@ """ # COUNT(X) is kept in sub-step and transformed into SUM(X) in the AggrStep self._test('Any COUNT(X) WHERE X is CWUser', - [('AggrStep', 'Any COUNT(X)', None, None, 'table0', None, [ + [('AggrStep', 'SELECT SUM(table0.C0) FROM table0', None, [ ('FetchStep', [('Any COUNT(X) WHERE X is CWUser', [{'X': 'CWUser'}])], [self.ldap, self.system], {}, {'COUNT(X)': 'table0.C0'}, []), ]), @@ -498,7 +498,7 @@ def test_complex_ordered(self): self._test('Any L ORDERBY L WHERE X login L', - [('AggrStep', 'Any L ORDERBY L', None, None, 'table0', None, + [('AggrStep', 'SELECT table0.C0 FROM table0 ORDER BY table0.C0', None, [('FetchStep', [('Any L WHERE X login L, X is CWUser', [{'X': 'CWUser', 'L': 'String'}])], [self.ldap, self.system], {}, {'X.login': 'table0.C0', 'L': 'table0.C0'}, []), @@ -507,7 +507,7 @@ def test_complex_ordered_limit_offset(self): self._test('Any L ORDERBY L LIMIT 10 OFFSET 10 WHERE X login L', - [('AggrStep', 'Any L ORDERBY L', 10, 10, 'table0', None, + [('AggrStep', 'SELECT table0.C0 FROM table0 ORDER BY table0.C0 LIMIT 10 OFFSET 10', None, [('FetchStep', [('Any L WHERE X login L, X is CWUser', [{'X': 'CWUser', 'L': 'String'}])], [self.ldap, self.system], {}, {'X.login': 'table0.C0', 'L': 'table0.C0'}, []), @@ -593,7 +593,7 @@ 2. return content of the table sorted """ self._test('Any X,F ORDERBY F WHERE X firstname F', - [('AggrStep', 'Any X,F ORDERBY F', None, None, 'table0', None, + [('AggrStep', 'SELECT table0.C0, table0.C1 FROM table0 ORDER BY table0.C1', None, [('FetchStep', [('Any X,F WHERE X firstname F, X is CWUser', [{'X': 'CWUser', 'F': 'String'}])], [self.ldap, self.system], {}, @@ -657,7 +657,7 @@ def test_complex_typed_aggregat(self): self._test('Any MAX(X) WHERE X is Card', - [('AggrStep', 'Any MAX(X)', None, None, 'table0', None, + [('AggrStep', 'SELECT MAX(table0.C0) FROM table0', None, [('FetchStep', [('Any MAX(X) WHERE X is Card', [{'X': 'Card'}])], [self.cards, self.system], {}, {'MAX(X)': 'table0.C0'}, []) @@ -784,10 +784,10 @@ [{'X': 'Basket'}]), ('Any X WHERE X has_text "bla", EXISTS(X owned_by 5), X is CWUser', [{'X': 'CWUser'}]), - ('Any X WHERE X has_text "bla", X is IN(Card, Comment, Division, Email, EmailThread, File, Folder, Image, Note, Personne, Societe, SubDivision, Tag)', + ('Any X WHERE X has_text "bla", X is IN(Card, Comment, Division, Email, EmailThread, File, Folder, Note, Personne, Societe, SubDivision, Tag)', [{'X': 'Card'}, {'X': 'Comment'}, {'X': 'Division'}, {'X': 'Email'}, {'X': 'EmailThread'}, - {'X': 'File'}, {'X': 'Folder'}, {'X': 'Image'}, + {'X': 'File'}, {'X': 'Folder'}, {'X': 'Note'}, {'X': 'Personne'}, {'X': 'Societe'}, {'X': 'SubDivision'}, {'X': 'Tag'}]),], None, None, [self.system], {}, []), @@ -810,10 +810,10 @@ [{'X': 'Basket'}]), ('Any X WHERE X has_text "bla", EXISTS(X owned_by 5), X is CWUser', [{'X': 'CWUser'}]), - ('Any X WHERE X has_text "bla", X is IN(Card, Comment, Division, Email, EmailThread, File, Folder, Image, Note, Personne, Societe, SubDivision, Tag)', + ('Any X WHERE X has_text "bla", X is IN(Card, Comment, Division, Email, EmailThread, File, Folder, Note, Personne, Societe, SubDivision, Tag)', [{'X': 'Card'}, {'X': 'Comment'}, {'X': 'Division'}, {'X': 'Email'}, {'X': 'EmailThread'}, - {'X': 'File'}, {'X': 'Folder'}, {'X': 'Image'}, + {'X': 'File'}, {'X': 'Folder'}, {'X': 'Note'}, {'X': 'Personne'}, {'X': 'Societe'}, {'X': 'SubDivision'}, {'X': 'Tag'}])], [self.system], {}, {'X': 'table0.C0'}, []), @@ -823,7 +823,7 @@ [{'X': 'Affaire'}, {'X': 'Basket'}, {'X': 'CWUser'}, {'X': 'Card'}, {'X': 'Comment'}, {'X': 'Division'}, {'X': 'Email'}, {'X': 'EmailThread'}, - {'X': 'File'}, {'X': 'Folder'}, {'X': 'Image'}, + {'X': 'File'}, {'X': 'Folder'}, {'X': 'Note'}, {'X': 'Personne'}, {'X': 'Societe'}, {'X': 'SubDivision'}, {'X': 'Tag'}])], 10, 10, [self.system], {'X': 'table0.C0'}, []) @@ -888,7 +888,7 @@ [{'X': 'Card'}, {'X': 'Note'}, {'X': 'State'}])], [self.cards, self.system], {}, {'X': 'table0.C0'}, []), ('FetchStep', - [('Any X WHERE X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Image, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)', + [('Any X WHERE X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)', [{'X': 'BaseTransition'}, {'X': 'Bookmark'}, {'X': 'CWAttribute'}, {'X': 'CWCache'}, {'X': 'CWConstraint'}, {'X': 'CWConstraintType'}, @@ -899,7 +899,7 @@ {'X': 'Email'}, {'X': 'EmailAddress'}, {'X': 'EmailPart'}, {'X': 'EmailThread'}, {'X': 'ExternalUri'}, {'X': 'File'}, - {'X': 'Folder'}, {'X': 'Image'}, + {'X': 'Folder'}, {'X': 'Personne'}, {'X': 'RQLExpression'}, {'X': 'Societe'}, {'X': 'SubDivision'}, {'X': 'SubWorkflowExitPoint'}, {'X': 'Tag'}, @@ -949,7 +949,7 @@ [self.system], {'X': 'table3.C0'}, {'ET': 'table0.C0', 'X': 'table0.C1'}, []), # extra UnionFetchStep could be avoided but has no cost, so don't care ('UnionFetchStep', - [('FetchStep', [('Any ET,X WHERE X is ET, ET is CWEType, X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Image, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)', + [('FetchStep', [('Any ET,X WHERE X is ET, ET is CWEType, X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)', [{'X': 'BaseTransition', 'ET': 'CWEType'}, {'X': 'Bookmark', 'ET': 'CWEType'}, {'X': 'CWAttribute', 'ET': 'CWEType'}, {'X': 'CWCache', 'ET': 'CWEType'}, {'X': 'CWConstraint', 'ET': 'CWEType'}, @@ -961,7 +961,7 @@ {'X': 'EmailAddress', 'ET': 'CWEType'}, {'X': 'EmailPart', 'ET': 'CWEType'}, {'X': 'EmailThread', 'ET': 'CWEType'}, {'X': 'ExternalUri', 'ET': 'CWEType'}, {'X': 'File', 'ET': 'CWEType'}, {'X': 'Folder', 'ET': 'CWEType'}, - {'X': 'Image', 'ET': 'CWEType'}, {'X': 'Personne', 'ET': 'CWEType'}, + {'X': 'Personne', 'ET': 'CWEType'}, {'X': 'RQLExpression', 'ET': 'CWEType'}, {'X': 'Societe', 'ET': 'CWEType'}, {'X': 'SubDivision', 'ET': 'CWEType'}, {'X': 'SubWorkflowExitPoint', 'ET': 'CWEType'}, {'X': 'Tag', 'ET': 'CWEType'}, {'X': 'TrInfo', 'ET': 'CWEType'}, @@ -1299,9 +1299,66 @@ ]), ]) + def test_has_text_orderby_rank(self): + self._test('Any X ORDERBY FTIRANK(X) WHERE X has_text "bla", X firstname "bla"', + [('FetchStep', [('Any X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])], + [self.ldap, self.system], None, {'X': 'table0.C0'}, []), + ('AggrStep', 'SELECT table1.C1 FROM table1 ORDER BY table1.C0', None, [ + ('FetchStep', [('Any FTIRANK(X),X WHERE X has_text "bla", X is CWUser', + [{'X': 'CWUser'}])], + [self.system], {'X': 'table0.C0'}, {'FTIRANK(X)': 'table1.C0', 'X': 'table1.C1'}, []), + ('FetchStep', [('Any FTIRANK(X),X WHERE X has_text "bla", X firstname "bla", X is Personne', + [{'X': 'Personne'}])], + [self.system], {}, {'FTIRANK(X)': 'table1.C0', 'X': 'table1.C1'}, []), + ]), + ]) + + def test_security_has_text_orderby_rank(self): + # use a guest user + self.session = self.user_groups_session('guests') + self._test('Any X ORDERBY FTIRANK(X) WHERE X has_text "bla", X firstname "bla"', + [('FetchStep', [('Any X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])], + [self.ldap, self.system], None, {'X': 'table1.C0'}, []), + ('UnionFetchStep', + [('FetchStep', [('Any X WHERE X firstname "bla", X is Personne', [{'X': 'Personne'}])], + [self.system], {}, {'X': 'table0.C0'}, []), + ('FetchStep', [('Any X WHERE EXISTS(X owned_by 5), X is CWUser', [{'X': 'CWUser'}])], + [self.system], {'X': 'table1.C0'}, {'X': 'table0.C0'}, [])]), + ('OneFetchStep', [('Any X ORDERBY FTIRANK(X) WHERE X has_text "bla"', + [{'X': 'CWUser'}, {'X': 'Personne'}])], + None, None, [self.system], {'X': 'table0.C0'}, []), + ]) + + def test_has_text_select_rank(self): + self._test('Any X, FTIRANK(X) WHERE X has_text "bla", X firstname "bla"', + # XXX unecessary duplicate selection + [('FetchStep', [('Any X,X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])], + [self.ldap, self.system], None, {'X': 'table0.C1'}, []), + ('UnionStep', None, None, [ + ('OneFetchStep', [('Any X,FTIRANK(X) WHERE X has_text "bla", X is CWUser', [{'X': 'CWUser'}])], + None, None, [self.system], {'X': 'table0.C1'}, []), + ('OneFetchStep', [('Any X,FTIRANK(X) WHERE X has_text "bla", X firstname "bla", X is Personne', [{'X': 'Personne'}])], + None, None, [self.system], {}, []), + ]), + ]) + + def test_security_has_text_select_rank(self): + # use a guest user + self.session = self.user_groups_session('guests') + self._test('Any X, FTIRANK(X) WHERE X has_text "bla", X firstname "bla"', + [('FetchStep', [('Any X,X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])], + [self.ldap, self.system], None, {'X': 'table0.C1'}, []), + ('UnionStep', None, None, [ + ('OneFetchStep', [('Any X,FTIRANK(X) WHERE X has_text "bla", EXISTS(X owned_by 5), X is CWUser', [{'X': 'CWUser'}])], + None, None, [self.system], {'X': 'table0.C1'}, []), + ('OneFetchStep', [('Any X,FTIRANK(X) WHERE X has_text "bla", X firstname "bla", X is Personne', [{'X': 'Personne'}])], + None, None, [self.system], {}, []), + ]), + ]) + def test_sort_func(self): self._test('Note X ORDERBY DUMB_SORT(RF) WHERE X type RF', - [('AggrStep', 'Any X ORDERBY DUMB_SORT(RF)', None, None, 'table0', None, [ + [('AggrStep', 'SELECT table0.C0 FROM table0 ORDER BY DUMB_SORT(table0.C1)', None, [ ('FetchStep', [('Any X,RF WHERE X type RF, X is Note', [{'X': 'Note', 'RF': 'String'}])], [self.cards, self.system], {}, {'X': 'table0.C0', 'X.type': 'table0.C1', 'RF': 'table0.C1'}, []), @@ -1310,8 +1367,7 @@ def test_ambigous_sort_func(self): self._test('Any X ORDERBY DUMB_SORT(RF) WHERE X title RF, X is IN (Bookmark, Card, EmailThread)', - [('AggrStep', 'Any X ORDERBY DUMB_SORT(RF)', - None, None, 'table0', None, + [('AggrStep', 'SELECT table0.C0 FROM table0 ORDER BY DUMB_SORT(table0.C1)', None, [('FetchStep', [('Any X,RF WHERE X title RF, X is Card', [{'X': 'Card', 'RF': 'String'}])], [self.cards, self.system], {}, @@ -1718,8 +1774,9 @@ ]) def test_nonregr2(self): - self.session.user.fire_transition('deactivate') - treid = self.session.user.latest_trinfo().eid + iworkflowable = self.session.user.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') + treid = iworkflowable.latest_trinfo().eid self._test('Any X ORDERBY D DESC WHERE E eid %(x)s, E wf_info_for X, X modification_date D', [('FetchStep', [('Any X,D WHERE X modification_date D, X is Note', [{'X': 'Note', 'D': 'Datetime'}])], @@ -1727,7 +1784,7 @@ ('FetchStep', [('Any X,D WHERE X modification_date D, X is CWUser', [{'X': 'CWUser', 'D': 'Datetime'}])], [self.ldap, self.system], None, {'X': 'table1.C0', 'X.modification_date': 'table1.C1', 'D': 'table1.C1'}, []), - ('AggrStep', 'Any X ORDERBY D DESC', None, None, 'table2', None, [ + ('AggrStep', 'SELECT table2.C0 FROM table2 ORDER BY table2.C1 DESC', None, [ ('FetchStep', [('Any X,D WHERE E eid %s, E wf_info_for X, X modification_date D, E is TrInfo, X is Affaire'%treid, [{'X': 'Affaire', 'E': 'TrInfo', 'D': 'Datetime'}])], [self.system], @@ -1870,8 +1927,7 @@ [{'X': 'Note', 'Z': 'Datetime'}])], [self.cards, self.system], None, {'X': 'table0.C0', 'X.modification_date': 'table0.C1', 'Z': 'table0.C1'}, []), - ('AggrStep', 'Any X ORDERBY Z DESC', - None, None, 'table1', None, + ('AggrStep', 'SELECT table1.C0 FROM table1 ORDER BY table1.C1 DESC', None, [('FetchStep', [('Any X,Z WHERE X modification_date Z, 999999 see_also X, X is Bookmark', [{'X': 'Bookmark', 'Z': 'Datetime'}])], [self.system], {}, {'X': 'table1.C0', 'X.modification_date': 'table1.C1', diff -r 00b1b6b906cf -r 97c55baefa0c server/test/unittest_multisources.py --- a/server/test/unittest_multisources.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/test/unittest_multisources.py Mon Jul 19 15:37:02 2010 +0200 @@ -111,11 +111,11 @@ self.assertEquals(len(rset), 4) # since they are orderd by eid, we know the 3 first one is coming from the system source # and the others from external source - self.assertEquals(rset.get_entity(0, 0).metainformation(), + self.assertEquals(rset.get_entity(0, 0).cw_metainformation(), {'source': {'adapter': 'native', 'uri': 'system'}, 'type': u'Card', 'extid': None}) externent = rset.get_entity(3, 0) - metainf = externent.metainformation() + metainf = externent.cw_metainformation() self.assertEquals(metainf['source'], {'adapter': 'pyrorql', 'base-url': 'http://extern.org/', 'uri': 'extern'}) self.assertEquals(metainf['type'], 'Card') self.assert_(metainf['extid']) @@ -134,6 +134,8 @@ self.repo.sources_by_uri['extern'].synchronize(MTIME) # in case fti_update has been run before self.failUnless(self.sexecute('Any X WHERE X has_text "affref"')) self.failUnless(self.sexecute('Affaire X WHERE X has_text "affref"')) + self.failUnless(self.sexecute('Any X ORDERBY FTIRANK(X) WHERE X has_text "affref"')) + self.failUnless(self.sexecute('Affaire X ORDERBY FTIRANK(X) WHERE X has_text "affref"')) def test_anon_has_text(self): self.repo.sources_by_uri['extern'].synchronize(MTIME) # in case fti_update has been run before @@ -145,6 +147,9 @@ cnx = self.login('anon') cu = cnx.cursor() rset = cu.execute('Any X WHERE X has_text "card"') + # 5: 4 card + 1 readable affaire + self.assertEquals(len(rset), 5, zip(rset.rows, rset.description)) + rset = cu.execute('Any X ORDERBY FTIRANK(X) WHERE X has_text "card"') self.assertEquals(len(rset), 5, zip(rset.rows, rset.description)) Connection_close(cnx) @@ -305,8 +310,9 @@ {'x': affaire.eid, 'u': ueid}) def test_nonregr2(self): - self.session.user.fire_transition('deactivate') - treid = self.session.user.latest_trinfo().eid + iworkflowable = self.session.user.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') + treid = iworkflowable.latest_trinfo().eid rset = self.sexecute('Any X ORDERBY D DESC WHERE E eid %(x)s, E wf_info_for X, X modification_date D', {'x': treid}) self.assertEquals(len(rset), 1) diff -r 00b1b6b906cf -r 97c55baefa0c server/test/unittest_querier.py --- a/server/test/unittest_querier.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/test/unittest_querier.py Mon Jul 19 15:37:02 2010 +0200 @@ -130,7 +130,7 @@ 'X': 'Affaire', 'ET': 'CWEType', 'ETN': 'String'}]) rql, solutions = partrqls[1] - self.assertEquals(rql, 'Any ETN,X WHERE X is ET, ET name ETN, ET is CWEType, X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWUser, Card, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Image, Note, Personne, RQLExpression, Societe, State, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)') + self.assertEquals(rql, 'Any ETN,X WHERE X is ET, ET name ETN, ET is CWEType, X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWUser, Card, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Note, Personne, RQLExpression, Societe, State, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)') self.assertListEquals(sorted(solutions), sorted([{'X': 'BaseTransition', 'ETN': 'String', 'ET': 'CWEType'}, {'X': 'Bookmark', 'ETN': 'String', 'ET': 'CWEType'}, @@ -155,7 +155,6 @@ {'X': 'ExternalUri', 'ETN': 'String', 'ET': 'CWEType'}, {'X': 'File', 'ETN': 'String', 'ET': 'CWEType'}, {'X': 'Folder', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Image', 'ETN': 'String', 'ET': 'CWEType'}, {'X': 'Note', 'ETN': 'String', 'ET': 'CWEType'}, {'X': 'Personne', 'ETN': 'String', 'ET': 'CWEType'}, {'X': 'RQLExpression', 'ETN': 'String', 'ET': 'CWEType'}, @@ -491,17 +490,17 @@ 'WHERE RT name N, RDEF relation_type RT ' 'HAVING COUNT(RDEF) > 10') self.assertListEquals(rset.rows, - [[u'description_format', 13], - [u'description', 14], + [[u'description_format', 12], + [u'description', 13], [u'name', 14], - [u'created_by', 38], - [u'creation_date', 38], - [u'cwuri', 38], - [u'in_basket', 38], - [u'is', 38], - [u'is_instance_of', 38], - [u'modification_date', 38], - [u'owned_by', 38]]) + [u'created_by', 37], + [u'creation_date', 37], + [u'cwuri', 37], + [u'in_basket', 37], + [u'is', 37], + [u'is_instance_of', 37], + [u'modification_date', 37], + [u'owned_by', 37]]) def test_select_aggregat_having_dumb(self): # dumb but should not raise an error diff -r 00b1b6b906cf -r 97c55baefa0c server/test/unittest_repository.py --- a/server/test/unittest_repository.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/test/unittest_repository.py Mon Jul 19 15:37:02 2010 +0200 @@ -33,7 +33,7 @@ from cubicweb import (BadConnectionId, RepositoryError, ValidationError, UnknownEid, AuthenticationError) -from cubicweb.selectors import implements +from cubicweb.selectors import is_instance from cubicweb.schema import CubicWebSchema, RQLConstraint from cubicweb.dbapi import connect, multiple_connections_unfix from cubicweb.devtools.testlib import CubicWebTC @@ -202,7 +202,7 @@ session = repo._get_session(cnxid) session.set_pool() user = session.user - user.fire_transition('deactivate') + user.cw_adapt_to('IWorkflowable').fire_transition('deactivate') rset = repo.execute(cnxid, 'TrInfo T WHERE T wf_info_for X, X eid %(x)s', {'x': user.eid}) self.assertEquals(len(rset), 1) repo.rollback(cnxid) @@ -390,7 +390,7 @@ # local hook class DummyBeforeHook(Hook): __regid__ = 'dummy-before-hook' - __select__ = Hook.__select__ & implements('EmailAddress') + __select__ = Hook.__select__ & is_instance('EmailAddress') events = ('before_update_entity',) def __call__(self): # safety belt: avoid potential infinite recursion if the test @@ -411,7 +411,7 @@ # local hook class DummyBeforeHook(Hook): __regid__ = 'dummy-before-hook' - __select__ = Hook.__select__ & implements('EmailAddress') + __select__ = Hook.__select__ & is_instance('EmailAddress') events = ('before_add_entity',) def __call__(self): # set_attributes is forbidden within before_add_entity() @@ -430,7 +430,7 @@ class DummyBeforeHook(Hook): _test = self # keep reference to test instance __regid__ = 'dummy-before-hook' - __select__ = Hook.__select__ & implements('Affaire') + __select__ = Hook.__select__ & is_instance('Affaire') events = ('before_update_entity',) def __call__(self): # invoiced attribute shouldn't be considered "edited" before the hook diff -r 00b1b6b906cf -r 97c55baefa0c server/test/unittest_rql2sql.py --- a/server/test/unittest_rql2sql.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/test/unittest_rql2sql.py Mon Jul 19 15:37:02 2010 +0200 @@ -22,11 +22,13 @@ from logilab.common.testlib import TestCase, unittest_main, mock_object from rql import BadRQLQuery +from rql.utils import register_function, FunctionDescr -#from cubicweb.server.sources.native import remove_unused_solutions -from cubicweb.server.sources.rql2sql import SQLGenerator, remove_unused_solutions +from cubicweb.devtools import TestServerConfiguration +from cubicweb.devtools.repotest import RQLGeneratorTC +from cubicweb.server.sources.rql2sql import remove_unused_solutions -from rql.utils import register_function, FunctionDescr + # add a dumb registered procedure class stockproc(FunctionDescr): supported_backends = ('postgres', 'sqlite', 'mysql') @@ -35,8 +37,6 @@ except AssertionError, ex: pass # already registered -from cubicweb.devtools import TestServerConfiguration -from cubicweb.devtools.repotest import RQLGeneratorTC config = TestServerConfiguration('data') config.bootstrap_cubes() @@ -424,13 +424,10 @@ GROUP BY T1.C1'''), ('Any MAX(X)+MIN(LENGTH(D)), N GROUPBY N ORDERBY 1, N, DF WHERE X data_name N, X data D, X data_format DF;', - '''SELECT (MAX(T1.C1) + MIN(LENGTH(T1.C0))), T1.C2 FROM (SELECT _X.cw_data AS C0, _X.cw_eid AS C1, _X.cw_data_name AS C2, _X.cw_data_format AS C3 + '''SELECT (MAX(_X.cw_eid) + MIN(LENGTH(_X.cw_data))), _X.cw_data_name FROM cw_File AS _X -UNION ALL -SELECT _X.cw_data AS C0, _X.cw_eid AS C1, _X.cw_data_name AS C2, _X.cw_data_format AS C3 -FROM cw_Image AS _X) AS T1 -GROUP BY T1.C2,T1.C3 -ORDER BY 1,2,T1.C3'''), +GROUP BY _X.cw_data_name,_X.cw_data_format +ORDER BY 1,2,_X.cw_data_format'''), ('DISTINCT Any S ORDERBY R WHERE A is Affaire, A sujet S, A ref R', '''SELECT T1.C0 FROM (SELECT DISTINCT _A.cw_sujet AS C0, _A.cw_ref AS C1 @@ -438,12 +435,9 @@ ORDER BY 2) AS T1'''), ('DISTINCT Any MAX(X)+MIN(LENGTH(D)), N GROUPBY N ORDERBY 2, DF WHERE X data_name N, X data D, X data_format DF;', - '''SELECT T1.C0,T1.C1 FROM (SELECT DISTINCT (MAX(T1.C1) + MIN(LENGTH(T1.C0))) AS C0, T1.C2 AS C1, T1.C3 AS C2 FROM (SELECT DISTINCT _X.cw_data AS C0, _X.cw_eid AS C1, _X.cw_data_name AS C2, _X.cw_data_format AS C3 + '''SELECT T1.C0,T1.C1 FROM (SELECT DISTINCT (MAX(_X.cw_eid) + MIN(LENGTH(_X.cw_data))) AS C0, _X.cw_data_name AS C1, _X.cw_data_format AS C2 FROM cw_File AS _X -UNION -SELECT DISTINCT _X.cw_data AS C0, _X.cw_eid AS C1, _X.cw_data_name AS C2, _X.cw_data_format AS C3 -FROM cw_Image AS _X) AS T1 -GROUP BY T1.C2,T1.C3 +GROUP BY _X.cw_data_name,_X.cw_data_format ORDER BY 2,3) AS T1 '''), @@ -1082,11 +1076,9 @@ WHERE rel_is0.eid_to=2'''), ] -from logilab.database import get_db_helper - class CWRQLTC(RQLGeneratorTC): schema = schema - + backend = 'sqlite' def test_nonregr_sol(self): delete = self.rqlhelper.parse( 'DELETE X read_permission READ_PERMISSIONSUBJECT,X add_permission ADD_PERMISSIONSUBJECT,' @@ -1112,12 +1104,7 @@ class PostgresSQLGeneratorTC(RQLGeneratorTC): schema = schema - - #capture = True - def setUp(self): - RQLGeneratorTC.setUp(self) - dbhelper = get_db_helper('postgres') - self.o = SQLGenerator(schema, dbhelper) + backend = 'postgres' def _norm_sql(self, sql): return sql.strip() @@ -1377,13 +1364,53 @@ UNION ALL SELECT _X.cw_eid FROM appears AS appears0, cw_Folder AS _X -WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu -"""), +WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu"""), ('Personne X where X has_text %(text)s, X travaille S, S has_text %(text)s', """SELECT _X.eid FROM appears AS appears0, appears AS appears2, entities AS _X, travaille_relation AS rel_travaille1 WHERE appears0.words @@ to_tsquery('default', 'hip&hop&momo') AND appears0.uid=_X.eid AND _X.type='Personne' AND _X.eid=rel_travaille1.eid_from AND appears2.uid=rel_travaille1.eid_to AND appears2.words @@ to_tsquery('default', 'hip&hop&momo')"""), + + ('Any X ORDERBY FTIRANK(X) DESC WHERE X has_text "toto tata"', + """SELECT appears0.uid +FROM appears AS appears0 +WHERE appears0.words @@ to_tsquery('default', 'toto&tata') +ORDER BY ts_rank(appears0.words, to_tsquery('default', 'toto&tata'))*appears0.weight DESC"""), + + ('Personne X ORDERBY FTIRANK(X) WHERE X has_text "toto tata"', + """SELECT _X.eid +FROM appears AS appears0, entities AS _X +WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=_X.eid AND _X.type='Personne' +ORDER BY ts_rank(appears0.words, to_tsquery('default', 'toto&tata'))*appears0.weight"""), + + ('Personne X ORDERBY FTIRANK(X) WHERE X has_text %(text)s', + """SELECT _X.eid +FROM appears AS appears0, entities AS _X +WHERE appears0.words @@ to_tsquery('default', 'hip&hop&momo') AND appears0.uid=_X.eid AND _X.type='Personne' +ORDER BY ts_rank(appears0.words, to_tsquery('default', 'hip&hop&momo'))*appears0.weight"""), + + ('Any X ORDERBY FTIRANK(X) WHERE X has_text "toto tata", X name "tutu", X is IN (Basket,Folder)', + """SELECT T1.C0 FROM (SELECT _X.cw_eid AS C0, ts_rank(appears0.words, to_tsquery('default', 'toto&tata'))*appears0.weight AS C1 +FROM appears AS appears0, cw_Basket AS _X +WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu +UNION ALL +SELECT _X.cw_eid AS C0, ts_rank(appears0.words, to_tsquery('default', 'toto&tata'))*appears0.weight AS C1 +FROM appears AS appears0, cw_Folder AS _X +WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu +ORDER BY 2) AS T1"""), + + ('Personne X ORDERBY FTIRANK(X),FTIRANK(S) WHERE X has_text %(text)s, X travaille S, S has_text %(text)s', + """SELECT _X.eid +FROM appears AS appears0, appears AS appears2, entities AS _X, travaille_relation AS rel_travaille1 +WHERE appears0.words @@ to_tsquery('default', 'hip&hop&momo') AND appears0.uid=_X.eid AND _X.type='Personne' AND _X.eid=rel_travaille1.eid_from AND appears2.uid=rel_travaille1.eid_to AND appears2.words @@ to_tsquery('default', 'hip&hop&momo') +ORDER BY ts_rank(appears0.words, to_tsquery('default', 'hip&hop&momo'))*appears0.weight,ts_rank(appears2.words, to_tsquery('default', 'hip&hop&momo'))*appears2.weight"""), + + + ('Any X, FTIRANK(X) WHERE X has_text "toto tata"', + """SELECT appears0.uid, ts_rank(appears0.words, to_tsquery('default', 'toto&tata'))*appears0.weight +FROM appears AS appears0 +WHERE appears0.words @@ to_tsquery('default', 'toto&tata')"""), + )): yield t @@ -1445,11 +1472,7 @@ class SqliteSQLGeneratorTC(PostgresSQLGeneratorTC): - - def setUp(self): - RQLGeneratorTC.setUp(self) - dbhelper = get_db_helper('sqlite') - self.o = SQLGenerator(schema, dbhelper) + backend = 'sqlite' def _norm_sql(self, sql): return sql.strip().replace(' ILIKE ', ' LIKE ') @@ -1547,6 +1570,26 @@ FROM appears AS appears0, cw_Folder AS _X WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu """), + + ('Any X ORDERBY FTIRANK(X) WHERE X has_text "toto tata"', + """SELECT DISTINCT appears0.uid +FROM appears AS appears0 +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata'))"""), + + ('Any X ORDERBY FTIRANK(X) WHERE X has_text "toto tata", X name "tutu", X is IN (Basket,Folder)', + """SELECT DISTINCT _X.cw_eid +FROM appears AS appears0, cw_Basket AS _X +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu +UNION +SELECT DISTINCT _X.cw_eid +FROM appears AS appears0, cw_Folder AS _X +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu +"""), + + ('Any X, FTIRANK(X) WHERE X has_text "toto tata"', + """SELECT DISTINCT appears0.uid, 1.0 +FROM appears AS appears0 +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata'))"""), )): yield t @@ -1560,11 +1603,7 @@ class MySQLGenerator(PostgresSQLGeneratorTC): - - def setUp(self): - RQLGeneratorTC.setUp(self) - dbhelper = get_db_helper('mysql') - self.o = SQLGenerator(schema, dbhelper) + backend = 'mysql' def _norm_sql(self, sql): sql = sql.strip().replace(' ILIKE ', ' LIKE ').replace('TRUE', '1').replace('FALSE', '0') @@ -1672,5 +1711,6 @@ ([{'A': 'RugbyGroup', 'B': 'RugbyTeam'}], {}, set()) ) + if __name__ == '__main__': unittest_main() diff -r 00b1b6b906cf -r 97c55baefa0c server/test/unittest_schemaserial.py --- a/server/test/unittest_schemaserial.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/test/unittest_schemaserial.py Mon Jul 19 15:37:02 2010 +0200 @@ -68,8 +68,6 @@ {'et': None, 'x': None}), ('SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s', {'et': None, 'x': None}), - # ('SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s', - # {'et': 'File', 'x': 'Image'}), ('SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s', {'et': None, 'x': None})]) diff -r 00b1b6b906cf -r 97c55baefa0c server/test/unittest_security.py --- a/server/test/unittest_security.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/test/unittest_security.py Mon Jul 19 15:37:02 2010 +0200 @@ -213,8 +213,7 @@ self.assertEquals(len(rset), 1) ent = rset.get_entity(0, 0) session.set_pool() # necessary - self.assertRaises(Unauthorized, - ent.e_schema.check_perm, session, 'update', eid=ent.eid) + self.assertRaises(Unauthorized, ent.cw_check_perm, 'update') self.assertRaises(Unauthorized, cu.execute, "SET P travaille S WHERE P is Personne, S is Societe") # test nothing has actually been inserted: @@ -405,7 +404,7 @@ # Note.para attribute editable by managers or if the note is in "todo" state note = self.execute("INSERT Note X: X para 'bidule'").get_entity(0, 0) self.commit() - note.fire_transition('markasdone') + note.cw_adapt_to('IWorkflowable').fire_transition('markasdone') self.execute('SET X para "truc" WHERE X eid %(x)s', {'x': note.eid}) self.commit() cnx = self.login('iaminusersgrouponly') @@ -414,13 +413,13 @@ self.assertRaises(Unauthorized, cnx.commit) note2 = cu.execute("INSERT Note X: X para 'bidule'").get_entity(0, 0) cnx.commit() - note2.fire_transition('markasdone') + note2.cw_adapt_to('IWorkflowable').fire_transition('markasdone') cnx.commit() self.assertEquals(len(cu.execute('Any X WHERE X in_state S, S name "todo", X eid %(x)s', {'x': note2.eid})), 0) cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid}) self.assertRaises(Unauthorized, cnx.commit) - note2.fire_transition('redoit') + note2.cw_adapt_to('IWorkflowable').fire_transition('redoit') cnx.commit() cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid}) cnx.commit() @@ -455,7 +454,7 @@ cnx.commit() self.restore_connection() affaire = self.execute('Any X WHERE X ref "ARCT01"').get_entity(0, 0) - affaire.fire_transition('abort') + affaire.cw_adapt_to('IWorkflowable').fire_transition('abort') self.commit() self.assertEquals(len(self.execute('TrInfo X WHERE X wf_info_for A, A ref "ARCT01"')), 1) @@ -557,14 +556,15 @@ cu = cnx.cursor() self.schema['Affaire'].set_action_permissions('read', ('users',)) aff = cu.execute('Any X WHERE X ref "ARCT01"').get_entity(0, 0) - aff.fire_transition('abort') + aff.cw_adapt_to('IWorkflowable').fire_transition('abort') cnx.commit() # though changing a user state (even logged user) is reserved to managers user = cnx.user(self.session) # XXX wether it should raise Unauthorized or ValidationError is not clear # the best would probably ValidationError if the transition doesn't exist # from the current state but Unauthorized if it exists but user can't pass it - self.assertRaises(ValidationError, user.fire_transition, 'deactivate') + self.assertRaises(ValidationError, + user.cw_adapt_to('IWorkflowable').fire_transition, 'deactivate') finally: # restore orig perms for action, perms in affaire_perms.iteritems(): @@ -572,18 +572,19 @@ def test_trinfo_security(self): aff = self.execute('INSERT Affaire X: X ref "ARCT01"').get_entity(0, 0) + iworkflowable = aff.cw_adapt_to('IWorkflowable') self.commit() - aff.fire_transition('abort') + iworkflowable.fire_transition('abort') self.commit() # can change tr info comment self.execute('SET TI comment %(c)s WHERE TI wf_info_for X, X ref "ARCT01"', {'c': u'bouh!'}) self.commit() - aff.clear_related_cache('wf_info_for', 'object') - trinfo = aff.latest_trinfo() + aff.cw_clear_relation_cache('wf_info_for', 'object') + trinfo = iworkflowable.latest_trinfo() self.assertEquals(trinfo.comment, 'bouh!') # but not from_state/to_state - aff.clear_related_cache('wf_info_for', role='object') + aff.cw_clear_relation_cache('wf_info_for', role='object') self.assertRaises(Unauthorized, self.execute, 'SET TI from_state S WHERE TI eid %(ti)s, S name "ben non"', {'ti': trinfo.eid}) diff -r 00b1b6b906cf -r 97c55baefa0c server/test/unittest_storage.py --- a/server/test/unittest_storage.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/test/unittest_storage.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,9 +15,7 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""unit tests for module cubicweb.server.sources.storages - -""" +"""unit tests for module cubicweb.server.sources.storages""" from __future__ import with_statement @@ -29,13 +27,13 @@ import tempfile from cubicweb import Binary, QueryError -from cubicweb.selectors import implements +from cubicweb.selectors import is_instance from cubicweb.server.sources import storages from cubicweb.server.hook import Hook, Operation class DummyBeforeHook(Hook): __regid__ = 'dummy-before-hook' - __select__ = Hook.__select__ & implements('File') + __select__ = Hook.__select__ & is_instance('File') events = ('before_add_entity',) def __call__(self): @@ -44,7 +42,7 @@ class DummyAfterHook(Hook): __regid__ = 'dummy-after-hook' - __select__ = Hook.__select__ & implements('File') + __select__ = Hook.__select__ & is_instance('File') events = ('after_add_entity',) def __call__(self): @@ -89,11 +87,11 @@ f1.set_attributes(data=Binary('the new data')) self.rollback() self.assertEquals(file(expected_filepath).read(), 'the-data') - f1.delete() + f1.cw_delete() self.failUnless(osp.isfile(expected_filepath)) self.rollback() self.failUnless(osp.isfile(expected_filepath)) - f1.delete() + f1.cw_delete() self.commit() self.failIf(osp.isfile(expected_filepath)) @@ -133,11 +131,17 @@ ex = self.assertRaises(QueryError, self.execute, '(Any D WHERE X data D, X is File)' ' UNION ' - '(Any D WHERE X data D, X is Image)') + '(Any D WHERE X title D, X is Bookmark)') self.assertEquals(str(ex), 'query fetch some source mapped attribute, some not') - ex = self.assertRaises(QueryError, - self.execute, 'Any D WHERE X data D') - self.assertEquals(str(ex), 'query fetch some source mapped attribute, some not') + + storages.set_attribute_storage(self.repo, 'State', 'name', + storages.BytesFileSystemStorage(self.tempdir)) + try: + ex = self.assertRaises(QueryError, + self.execute, 'Any D WHERE X name D, X is IN (State, Transition)') + self.assertEquals(str(ex), 'query fetch some source mapped attribute, some not') + finally: + storages.unset_attribute_storage(self.repo, 'State', 'name') def test_source_mapped_attribute_advanced(self): f1 = self.create_file() diff -r 00b1b6b906cf -r 97c55baefa0c server/test/unittest_undo.py --- a/server/test/unittest_undo.py Thu Jul 15 12:03:13 2010 +0200 +++ b/server/test/unittest_undo.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,9 +15,6 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -""" - -""" from __future__ import with_statement from cubicweb import ValidationError @@ -104,7 +101,7 @@ address=u'toto@logilab.org', reverse_use_email=toto) txuuid1 = self.commit() - toto.delete() + toto.cw_delete() txuuid2 = self.commit() undoable_transactions = self.cnx.undoable_transactions txs = undoable_transactions(action='D') @@ -147,7 +144,7 @@ self.commit() txs = self.cnx.undoable_transactions() self.assertEquals(len(txs), 2) - toto.delete() + toto.cw_delete() txuuid = self.commit() actions = self.cnx.transaction_info(txuuid).actions_list() self.assertEquals(len(actions), 1) @@ -160,8 +157,8 @@ self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': toto.eid})) self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': e.eid})) self.failUnless(self.execute('Any X WHERE X has_text "toto@logilab"')) - self.assertEquals(toto.state, 'activated') - self.assertEquals(toto.get_email(), 'toto@logilab.org') + self.assertEquals(toto.cw_adapt_to('IWorkflowable').state, 'activated') + self.assertEquals(toto.cw_adapt_to('IEmailable').get_email(), 'toto@logilab.org') self.assertEquals([(p.pkey, p.value) for p in toto.reverse_for_user], [('ui.default-text-format', 'text/rest')]) self.assertEquals([g.name for g in toto.in_group], @@ -186,7 +183,7 @@ c = session.create_entity('Card', title=u'hop', content=u'hop') p = session.create_entity('Personne', nom=u'louis', fiche=c) self.commit() - c.delete() + c.cw_delete() txuuid = self.commit() c2 = session.create_entity('Card', title=u'hip', content=u'hip') p.set_relations(fiche=c2) @@ -207,9 +204,9 @@ session.execute('DELETE U in_group G WHERE U eid %(x)s', {'x': self.toto.eid}) self.toto.set_relations(in_group=g) self.commit() - self.toto.delete() + self.toto.cw_delete() txuuid = self.commit() - g.delete() + g.cw_delete() self.commit() errors = self.cnx.undo_transaction(txuuid) self.assertEquals(errors, diff -r 00b1b6b906cf -r 97c55baefa0c skeleton/data/external_resources.tmpl --- a/skeleton/data/external_resources.tmpl Thu Jul 15 12:03:13 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,11 +0,0 @@ -# -*- shell-script -*- -############################################################################### -# -# put here information about external resources used by your components, -# or to overides existing external resources configuration -# -############################################################################### - -# CSS stylesheets to include in HTML headers -# uncomment the line below to use template specific stylesheet -# STYLESHEETS = DATADIR/cubes.%(cubename)s.css diff -r 00b1b6b906cf -r 97c55baefa0c skeleton/test/test_CUBENAME.py --- a/skeleton/test/test_CUBENAME.py Thu Jul 15 12:03:13 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,35 +0,0 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""template automatic tests - -""" - -from logilab.common.testlib import TestCase, unittest_main - -class DefaultTC(TestCase): - def test_something(self): - self.skip('this cube has no test') - -## uncomment the import if you want to activate automatic test for your -## template - -# from cubicweb.devtools.testlib import AutomaticWebTest - - -if __name__ == '__main__': - unittest_main() diff -r 00b1b6b906cf -r 97c55baefa0c skeleton/test/test_CUBENAME.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/skeleton/test/test_CUBENAME.py.tmpl Mon Jul 19 15:37:02 2010 +0200 @@ -0,0 +1,37 @@ +# copyright %(year)s %(author)s, all rights reserved. +# contact %(author-web-site)s -- mailto:%(author-email)s +# +%(long-license)s +"""%(distname)s automatic tests + + +uncomment code below if you want to activate automatic test for your cube: + +.. sourcecode:: python + + from cubicweb.devtools.testlib import AutomaticWebTest + + class AutomaticWebTest(AutomaticWebTest): + '''provides `to_test_etypes` and/or `list_startup_views` implementation + to limit test scope + ''' + + def to_test_etypes(self): + '''only test views for entities of the returned types''' + return set(('My', 'Cube', 'Entity', 'Types')) + + def list_startup_views(self): + '''only test startup views of the returned identifiers''' + return ('some', 'startup', 'views') +""" + +from cubicweb.devtools import testlib + +class DefaultTC(testlib.CubicWebTC): + def test_something(self): + self.skip('this cube has no test') + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 00b1b6b906cf -r 97c55baefa0c skeleton/uiprops.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/skeleton/uiprops.py.tmpl Mon Jul 19 15:37:02 2010 +0200 @@ -0,0 +1,15 @@ +############################################################################### +# +# Put here information about external resources / styles used by your cube, +# or to overides existing UI properties. +# +# Existing properties are available through the `sheet` dictionary available +# in the global namespace. You also have access to a `data` function which +# will return proper url for resources in the 'data' directory. +# +# /!\ this file should not be imported /!\ +############################################################################### + +# CSS stylesheets to include in HTML headers +# uncomment the line below to use template specific stylesheet +# STYLESHEETS = sheet['STYLESHEETS'] + [data('cubes.%(cubename)s.css')] diff -r 00b1b6b906cf -r 97c55baefa0c sobjects/notification.py --- a/sobjects/notification.py Thu Jul 15 12:03:13 2010 +0200 +++ b/sobjects/notification.py Mon Jul 19 15:37:02 2010 +0200 @@ -46,7 +46,8 @@ mode = self._cw.vreg.config['default-recipients-mode'] if mode == 'users': execute = self._cw.execute - dests = [(u.get_email(), u.property_value('ui.language')) + dests = [(u.cw_adapt_to('IEmailable').get_email(), + u.property_value('ui.language')) for u in execute(self.user_rql, build_descr=True).entities()] elif mode == 'default-dest-addrs': lang = self._cw.vreg.property_value('ui.language') diff -r 00b1b6b906cf -r 97c55baefa0c sobjects/test/data/sobjects/__init__.py --- a/sobjects/test/data/sobjects/__init__.py Thu Jul 15 12:03:13 2010 +0200 +++ b/sobjects/test/data/sobjects/__init__.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,11 +15,9 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -""" -""" -from cubicweb.selectors import implements +from cubicweb.selectors import is_instance from cubicweb.sobjects.notification import StatusChangeMixIn, NotificationView class UserStatusChangeView(StatusChangeMixIn, NotificationView): - __select__ = NotificationView.__select__ & implements('CWUser') + __select__ = NotificationView.__select__ & is_instance('CWUser') diff -r 00b1b6b906cf -r 97c55baefa0c sobjects/test/unittest_notification.py --- a/sobjects/test/unittest_notification.py Thu Jul 15 12:03:13 2010 +0200 +++ b/sobjects/test/unittest_notification.py Mon Jul 19 15:37:02 2010 +0200 @@ -85,7 +85,7 @@ def test_status_change_view(self): req = self.request() u = self.create_user('toto', req=req) - u.fire_transition('deactivate', comment=u'yeah') + u.cw_adapt_to('IWorkflowable').fire_transition('deactivate', comment=u'yeah') self.failIf(MAILBOX) self.commit() self.assertEquals(len(MAILBOX), 1) diff -r 00b1b6b906cf -r 97c55baefa0c sobjects/test/unittest_supervising.py --- a/sobjects/test/unittest_supervising.py Thu Jul 15 12:03:13 2010 +0200 +++ b/sobjects/test/unittest_supervising.py Mon Jul 19 15:37:02 2010 +0200 @@ -84,7 +84,7 @@ self.assertEquals(op.to_send[0][1], ['test@logilab.fr']) self.commit() # some other changes ####### - user.fire_transition('deactivate') + user.cw_adapt_to('IWorkflowable').fire_transition('deactivate') sentops = [op for op in session.pending_operations if isinstance(op, SupervisionMailOp)] self.assertEquals(len(sentops), 1) diff -r 00b1b6b906cf -r 97c55baefa0c sobjects/textparsers.py --- a/sobjects/textparsers.py Thu Jul 15 12:03:13 2010 +0200 +++ b/sobjects/textparsers.py Mon Jul 19 15:37:02 2010 +0200 @@ -74,10 +74,14 @@ if not hasattr(entity, 'in_state'): self.error('bad change state instruction for eid %s', eid) continue - tr = entity.current_workflow and entity.current_workflow.transition_by_name(trname) + iworkflowable = entity.cw_adapt_to('IWorkflowable') + if iworkflowable.current_workflow: + tr = iworkflowable.current_workflow.transition_by_name(trname) + else: + tr = None if tr and tr.may_be_fired(entity.eid): try: - trinfo = entity.fire_transition(tr) + trinfo = iworkflowable.fire_transition(tr) caller.fire_event('state-changed', {'trinfo': trinfo, 'entity': entity}) except: diff -r 00b1b6b906cf -r 97c55baefa0c test/data/scripts/script1.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/scripts/script1.py Mon Jul 19 15:37:02 2010 +0200 @@ -0,0 +1,3 @@ +assert 'data/scripts/script1.py' == __file__ +assert '__main__' == __name__ +assert [] == __args__, __args__ diff -r 00b1b6b906cf -r 97c55baefa0c test/data/scripts/script2.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/scripts/script2.py Mon Jul 19 15:37:02 2010 +0200 @@ -0,0 +1,3 @@ +assert 'data/scripts/script2.py' == __file__ +assert '__main__' == __name__ +assert ['-v'] == __args__, __args__ diff -r 00b1b6b906cf -r 97c55baefa0c test/data/scripts/script3.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/scripts/script3.py Mon Jul 19 15:37:02 2010 +0200 @@ -0,0 +1,3 @@ +assert 'data/scripts/script3.py' == __file__ +assert '__main__' == __name__ +assert ['-vd', '-f', 'FILE.TXT'] == __args__, __args__ diff -r 00b1b6b906cf -r 97c55baefa0c test/unittest_cwctl.py --- a/test/unittest_cwctl.py Thu Jul 15 12:03:13 2010 +0200 +++ b/test/unittest_cwctl.py Mon Jul 19 15:37:02 2010 +0200 @@ -24,8 +24,12 @@ from logilab.common.testlib import TestCase, unittest_main from cubicweb.cwconfig import CubicWebConfiguration +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.server.migractions import ServerMigrationHelper + CubicWebConfiguration.load_cwctl_plugins() # XXX necessary? + class CubicWebCtlTC(TestCase): def setUp(self): self.stream = StringIO() @@ -37,5 +41,25 @@ from cubicweb.cwctl import ListCommand ListCommand().run([]) + +class CubicWebShellTC(CubicWebTC): + + def test_process_script_args_context(self): + repo = self.cnx._repo + mih = ServerMigrationHelper(None, repo=repo, cnx=self.cnx, + interactive=False, + # hack so it don't try to load fs schema + schema=1) + scripts = {'script1.py': list(), + 'script2.py': ['-v'], + 'script3.py': ['-vd', '-f', 'FILE.TXT'], + } + mih.cmd_process_script('data/scripts/script1.py', funcname=None) + for script, args in scripts.items(): + scriptname = os.path.join('data/scripts/', script) + self.assert_(os.path.exists(scriptname)) + mih.cmd_process_script(scriptname, None, scriptargs=args) + + if __name__ == '__main__': unittest_main() diff -r 00b1b6b906cf -r 97c55baefa0c test/unittest_entity.py --- a/test/unittest_entity.py Thu Jul 15 12:03:13 2010 +0200 +++ b/test/unittest_entity.py Mon Jul 19 15:37:02 2010 +0200 @@ -97,27 +97,27 @@ user = self.execute('INSERT CWUser X: X login "toto", X upassword %(pwd)s, X in_group G WHERE G name "users"', {'pwd': 'toto'}).get_entity(0, 0) self.commit() - user.fire_transition('deactivate') + user.cw_adapt_to('IWorkflowable').fire_transition('deactivate') self.commit() eid2 = self.execute('INSERT CWUser X: X login "tutu", X upassword %(pwd)s', {'pwd': 'toto'})[0][0] e = self.execute('Any X WHERE X eid %(x)s', {'x': eid2}).get_entity(0, 0) e.copy_relations(user.eid) self.commit() - e.clear_related_cache('in_state', 'subject') - self.assertEquals(e.state, 'activated') + e.cw_clear_relation_cache('in_state', 'subject') + self.assertEquals(e.cw_adapt_to('IWorkflowable').state, 'activated') def test_related_cache_both(self): user = self.execute('Any X WHERE X eid %(x)s', {'x':self.user().eid}).get_entity(0, 0) adeleid = self.execute('INSERT EmailAddress X: X address "toto@logilab.org", U use_email X WHERE U login "admin"')[0][0] self.commit() - self.assertEquals(user._related_cache, {}) + self.assertEquals(user._cw_related_cache, {}) email = user.primary_email[0] - self.assertEquals(sorted(user._related_cache), ['primary_email_subject']) - self.assertEquals(email._related_cache.keys(), ['primary_email_object']) + self.assertEquals(sorted(user._cw_related_cache), ['primary_email_subject']) + self.assertEquals(email._cw_related_cache.keys(), ['primary_email_object']) groups = user.in_group - self.assertEquals(sorted(user._related_cache), ['in_group_subject', 'primary_email_subject']) + self.assertEquals(sorted(user._cw_related_cache), ['in_group_subject', 'primary_email_subject']) for group in groups: - self.failIf('in_group_subject' in group._related_cache, group._related_cache.keys()) + self.failIf('in_group_subject' in group._cw_related_cache, group._cw_related_cache.keys()) def test_related_limit(self): req = self.request() @@ -197,20 +197,20 @@ Note.fetch_attrs, Note.fetch_order = fetch_config(('type',)) SubNote.fetch_attrs, SubNote.fetch_order = fetch_config(('type',)) p = self.request().create_entity('Personne', nom=u'pouet') - self.assertEquals(p.related_rql('evaluee'), + self.assertEquals(p.cw_related_rql('evaluee'), 'Any X,AA,AB ORDERBY AA ASC WHERE E eid %(x)s, E evaluee X, ' 'X type AA, X modification_date AB') Personne.fetch_attrs, Personne.fetch_order = fetch_config(('nom', )) # XXX - self.assertEquals(p.related_rql('evaluee'), + self.assertEquals(p.cw_related_rql('evaluee'), 'Any X,AA ORDERBY AA DESC ' 'WHERE E eid %(x)s, E evaluee X, X modification_date AA') tag = self.vreg['etypes'].etype_class('Tag')(self.request()) - self.assertEquals(tag.related_rql('tags', 'subject'), + self.assertEquals(tag.cw_related_rql('tags', 'subject'), 'Any X,AA ORDERBY AA DESC ' 'WHERE E eid %(x)s, E tags X, X modification_date AA') - self.assertEquals(tag.related_rql('tags', 'subject', ('Personne',)), + self.assertEquals(tag.cw_related_rql('tags', 'subject', ('Personne',)), 'Any X,AA,AB ORDERBY AA ASC ' 'WHERE E eid %(x)s, E tags X, X is IN (Personne), X nom AA, ' 'X modification_date AB') @@ -219,47 +219,47 @@ tag = self.vreg['etypes'].etype_class('Tag')(self.request()) for ttype in self.schema['tags'].objects(): self.vreg['etypes'].etype_class(ttype).fetch_attrs = ('modification_date',) - self.assertEquals(tag.related_rql('tags', 'subject'), + self.assertEquals(tag.cw_related_rql('tags', 'subject'), 'Any X,AA ORDERBY AA DESC ' 'WHERE E eid %(x)s, E tags X, X modification_date AA') def test_unrelated_rql_security_1(self): user = self.request().user - rql = user.unrelated_rql('use_email', 'EmailAddress', 'subject')[0] + rql = user.cw_unrelated_rql('use_email', 'EmailAddress', 'subject')[0] self.assertEquals(rql, 'Any O,AA,AB,AC ORDERBY AC DESC ' 'WHERE NOT S use_email O, S eid %(x)s, O is EmailAddress, O address AA, O alias AB, O modification_date AC') self.create_user('toto') self.login('toto') user = self.request().user - rql = user.unrelated_rql('use_email', 'EmailAddress', 'subject')[0] + rql = user.cw_unrelated_rql('use_email', 'EmailAddress', 'subject')[0] self.assertEquals(rql, 'Any O,AA,AB,AC ORDERBY AC DESC ' 'WHERE NOT S use_email O, S eid %(x)s, O is EmailAddress, O address AA, O alias AB, O modification_date AC') user = self.execute('Any X WHERE X login "admin"').get_entity(0, 0) - self.assertRaises(Unauthorized, user.unrelated_rql, 'use_email', 'EmailAddress', 'subject') + self.assertRaises(Unauthorized, user.cw_unrelated_rql, 'use_email', 'EmailAddress', 'subject') self.login('anon') user = self.request().user - self.assertRaises(Unauthorized, user.unrelated_rql, 'use_email', 'EmailAddress', 'subject') + self.assertRaises(Unauthorized, user.cw_unrelated_rql, 'use_email', 'EmailAddress', 'subject') def test_unrelated_rql_security_2(self): email = self.execute('INSERT EmailAddress X: X address "hop"').get_entity(0, 0) - rql = email.unrelated_rql('use_email', 'CWUser', 'object')[0] + rql = email.cw_unrelated_rql('use_email', 'CWUser', 'object')[0] self.assertEquals(rql, 'Any S,AA,AB,AC,AD ORDERBY AA ASC ' 'WHERE NOT S use_email O, O eid %(x)s, S is CWUser, S login AA, S firstname AB, S surname AC, S modification_date AD') - #rql = email.unrelated_rql('use_email', 'Person', 'object')[0] + #rql = email.cw_unrelated_rql('use_email', 'Person', 'object')[0] #self.assertEquals(rql, '') self.login('anon') email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0) - rql = email.unrelated_rql('use_email', 'CWUser', 'object')[0] + rql = email.cw_unrelated_rql('use_email', 'CWUser', 'object')[0] self.assertEquals(rql, 'Any S,AA,AB,AC,AD ORDERBY AA ' 'WHERE NOT EXISTS(S use_email O), O eid %(x)s, S is CWUser, S login AA, S firstname AB, S surname AC, S modification_date AD, ' 'A eid %(B)s, EXISTS(S identity A, NOT A in_group C, C name "guests", C is CWGroup)') - #rql = email.unrelated_rql('use_email', 'Person', 'object')[0] + #rql = email.cw_unrelated_rql('use_email', 'Person', 'object')[0] #self.assertEquals(rql, '') def test_unrelated_rql_security_nonexistant(self): self.login('anon') email = self.vreg['etypes'].etype_class('EmailAddress')(self.request()) - rql = email.unrelated_rql('use_email', 'CWUser', 'object')[0] + rql = email.cw_unrelated_rql('use_email', 'CWUser', 'object')[0] self.assertEquals(rql, 'Any S,AA,AB,AC,AD ORDERBY AA ' 'WHERE S is CWUser, S login AA, S firstname AB, S surname AC, S modification_date AD, ' 'A eid %(B)s, EXISTS(S identity A, NOT A in_group C, C name "guests", C is CWGroup)') @@ -442,8 +442,8 @@ e['data_format'] = 'text/html' e['data_encoding'] = 'ascii' e._cw.transaction_data = {} # XXX req should be a session - self.assertEquals(set(e.get_words()), - set(['an', 'html', 'file', 'du', 'html', 'some', 'data'])) + self.assertEquals(e.cw_adapt_to('IFTIndexable').get_words(), + {'C': [u'du', u'html', 'an', 'html', 'file', u'some', u'data']}) def test_nonregr_relation_cache(self): @@ -462,9 +462,9 @@ trinfo = self.execute('Any X WHERE X eid %(x)s', {'x': eid}).get_entity(0, 0) trinfo.complete() self.failUnless(isinstance(trinfo['creation_date'], datetime)) - self.failUnless(trinfo.relation_cached('from_state', 'subject')) - self.failUnless(trinfo.relation_cached('to_state', 'subject')) - self.failUnless(trinfo.relation_cached('wf_info_for', 'subject')) + self.failUnless(trinfo.cw_relation_cached('from_state', 'subject')) + self.failUnless(trinfo.cw_relation_cached('to_state', 'subject')) + self.failUnless(trinfo.cw_relation_cached('wf_info_for', 'subject')) self.assertEquals(trinfo.by_transition, ()) def test_request_cache(self): @@ -508,7 +508,7 @@ def test_metainformation_and_external_absolute_url(self): req = self.request() note = req.create_entity('Note', type=u'z') - metainf = note.metainformation() + metainf = note.cw_metainformation() self.assertEquals(metainf, {'source': {'adapter': 'native', 'uri': 'system'}, 'type': u'Note', 'extid': None}) self.assertEquals(note.absolute_url(), 'http://testing.fr/cubicweb/note/%s' % note.eid) metainf['source'] = metainf['source'].copy() diff -r 00b1b6b906cf -r 97c55baefa0c test/unittest_rset.py --- a/test/unittest_rset.py Thu Jul 15 12:03:13 2010 +0200 +++ b/test/unittest_rset.py Mon Jul 19 15:37:02 2010 +0200 @@ -233,10 +233,10 @@ self.assertEquals(e['surname'], 'di mascio') self.assertRaises(KeyError, e.__getitem__, 'firstname') self.assertRaises(KeyError, e.__getitem__, 'creation_date') - self.assertEquals(pprelcachedict(e._related_cache), []) + self.assertEquals(pprelcachedict(e._cw_related_cache), []) e.complete() self.assertEquals(e['firstname'], 'adrien') - self.assertEquals(pprelcachedict(e._related_cache), []) + self.assertEquals(pprelcachedict(e._cw_related_cache), []) def test_get_entity_advanced(self): self.request().create_entity('Bookmark', title=u'zou', path=u'/view') @@ -249,19 +249,19 @@ self.assertEquals(e['title'], 'zou') self.assertRaises(KeyError, e.__getitem__, 'path') self.assertEquals(e.view('text'), 'zou') - self.assertEquals(pprelcachedict(e._related_cache), []) + self.assertEquals(pprelcachedict(e._cw_related_cache), []) e = rset.get_entity(0, 1) self.assertEquals(e.cw_row, 0) self.assertEquals(e.cw_col, 1) self.assertEquals(e['login'], 'anon') self.assertRaises(KeyError, e.__getitem__, 'firstname') - self.assertEquals(pprelcachedict(e._related_cache), + self.assertEquals(pprelcachedict(e._cw_related_cache), []) e.complete() self.assertEquals(e['firstname'], None) self.assertEquals(e.view('text'), 'anon') - self.assertEquals(pprelcachedict(e._related_cache), + self.assertEquals(pprelcachedict(e._cw_related_cache), []) self.assertRaises(NotAnEntity, rset.get_entity, 0, 2) @@ -273,7 +273,7 @@ seid = self.execute('State X WHERE X name "activated"')[0][0] # for_user / in_group are prefetched in CWUser __init__, in_state should # be filed from our query rset - self.assertEquals(pprelcachedict(e._related_cache), + self.assertEquals(pprelcachedict(e._cw_related_cache), [('in_state_subject', [seid])]) def test_get_entity_advanced_prefilled_cache(self): @@ -283,7 +283,7 @@ 'X title XT, S name SN, U login UL, X eid %s' % e.eid) e = rset.get_entity(0, 0) self.assertEquals(e['title'], 'zou') - self.assertEquals(pprelcachedict(e._related_cache), + self.assertEquals(pprelcachedict(e._cw_related_cache), [('created_by_subject', [5])]) # first level of recursion u = e.created_by[0] @@ -302,9 +302,9 @@ e = rset.get_entity(0, 0) # if any of the assertion below fails with a KeyError, the relation is not cached # related entities should be an empty list - self.assertEquals(e.related_cache('primary_email', 'subject', True), ()) + self.assertEquals(e._cw_relation_cache('primary_email', 'subject', True), ()) # related rset should be an empty rset - cached = e.related_cache('primary_email', 'subject', False) + cached = e._cw_relation_cache('primary_email', 'subject', False) self.assertIsInstance(cached, ResultSet) self.assertEquals(cached.rowcount, 0) @@ -405,5 +405,19 @@ rset = self.execute('Any D, COUNT(U) GROUPBY D WHERE U is CWUser, U creation_date D') self.assertEquals(rset.related_entity(0,0), (None, None)) + def test_str(self): + rset = self.execute('(Any X,N WHERE X is CWGroup, X name N)') + self.assertIsInstance(str(rset), basestring) + self.assertEquals(len(str(rset).splitlines()), 1) + + def test_repr(self): + rset = self.execute('(Any X,N WHERE X is CWGroup, X name N)') + self.assertIsInstance(repr(rset), basestring) + self.assertTrue(len(repr(rset).splitlines()) > 1) + + rset = self.execute('(Any X WHERE X is CWGroup, X name "managers")') + self.assertIsInstance(str(rset), basestring) + self.assertEquals(len(str(rset).splitlines()), 1) + if __name__ == '__main__': unittest_main() diff -r 00b1b6b906cf -r 97c55baefa0c test/unittest_schema.py --- a/test/unittest_schema.py Thu Jul 15 12:03:13 2010 +0200 +++ b/test/unittest_schema.py Mon Jul 19 15:37:02 2010 +0200 @@ -176,7 +176,7 @@ 'CWCache', 'CWConstraint', 'CWConstraintType', 'CWEType', 'CWAttribute', 'CWGroup', 'EmailAddress', 'CWRelation', 'CWPermission', 'CWProperty', 'CWRType', 'CWUser', - 'ExternalUri', 'File', 'Float', 'Image', 'Int', 'Interval', 'Note', + 'ExternalUri', 'File', 'Float', 'Int', 'Interval', 'Note', 'Password', 'Personne', 'RQLExpression', 'Societe', 'State', 'String', 'SubNote', 'SubWorkflowExitPoint', diff -r 00b1b6b906cf -r 97c55baefa0c test/unittest_selectors.py --- a/test/unittest_selectors.py Thu Jul 15 12:03:13 2010 +0200 +++ b/test/unittest_selectors.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,15 +15,14 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""unit tests for selectors mechanism - -""" +"""unit tests for selectors mechanism""" from logilab.common.testlib import TestCase, unittest_main +from cubicweb import Binary from cubicweb.devtools.testlib import CubicWebTC from cubicweb.appobject import Selector, AndSelector, OrSelector -from cubicweb.selectors import implements, match_user_groups +from cubicweb.selectors import is_instance, adaptable, match_user_groups from cubicweb.interfaces import IDownloadable from cubicweb.web import action @@ -93,12 +92,12 @@ self.assertEquals(selector(None), 2) def test_search_selectors(self): - sel = implements('something') - self.assertIs(sel.search_selector(implements), sel) + sel = is_instance('something') + self.assertIs(sel.search_selector(is_instance), sel) csel = AndSelector(sel, Selector()) - self.assertIs(csel.search_selector(implements), sel) + self.assertIs(csel.search_selector(is_instance), sel) csel = AndSelector(Selector(), sel) - self.assertIs(csel.search_selector(implements), sel) + self.assertIs(csel.search_selector(is_instance), sel) def test_inplace_and(self): selector = _1_() @@ -140,16 +139,17 @@ class ImplementsSelectorTC(CubicWebTC): def test_etype_priority(self): req = self.request() - cls = self.vreg['etypes'].etype_class('File') - anyscore = implements('Any').score_class(cls, req) - idownscore = implements(IDownloadable).score_class(cls, req) + f = req.create_entity('File', data_name=u'hop.txt', data=Binary('hop')) + rset = f.as_rset() + anyscore = is_instance('Any')(f.__class__, req, rset=rset) + idownscore = adaptable('IDownloadable')(f.__class__, req, rset=rset) self.failUnless(idownscore > anyscore, (idownscore, anyscore)) - filescore = implements('File').score_class(cls, req) + filescore = is_instance('File')(f.__class__, req, rset=rset) self.failUnless(filescore > idownscore, (filescore, idownscore)) def test_etype_inheritance_no_yams_inheritance(self): cls = self.vreg['etypes'].etype_class('Personne') - self.failIf(implements('Societe').score_class(cls, self.request())) + self.failIf(is_instance('Societe').score_class(cls, self.request())) class MatchUserGroupsTC(CubicWebTC): diff -r 00b1b6b906cf -r 97c55baefa0c test/unittest_uilib.py --- a/test/unittest_uilib.py Thu Jul 15 12:03:13 2010 +0200 +++ b/test/unittest_uilib.py Mon Jul 19 15:37:02 2010 +0200 @@ -142,6 +142,14 @@ self.assertEquals(uilib.soup2xhtml('hop hop', 'ascii'), 'hop hop') + def test_js(self): + self.assertEquals(str(uilib.js.pouet(1, "2")), + 'pouet(1,"2")') + self.assertEquals(str(uilib.js.cw.pouet(1, "2")), + 'cw.pouet(1,"2")') + self.assertEquals(str(uilib.js.cw.pouet(1, "2").pouet(None)), + 'cw.pouet(1,"2").pouet(null)') + if __name__ == '__main__': unittest_main() diff -r 00b1b6b906cf -r 97c55baefa0c test/unittest_utils.py --- a/test/unittest_utils.py Thu Jul 15 12:03:13 2010 +0200 +++ b/test/unittest_utils.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,16 +15,16 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""unit tests for module cubicweb.utils - -""" +"""unit tests for module cubicweb.utils""" import re import decimal import datetime from logilab.common.testlib import TestCase, unittest_main + from cubicweb.utils import make_uid, UStringIO, SizeConstrainedList, RepeatList +from cubicweb.entity import Entity try: from cubicweb.utils import CubicWebJsonEncoder, json @@ -99,6 +99,7 @@ l.pop(2) self.assertEquals(l, [(1, 3)]*2) + class SizeConstrainedListTC(TestCase): def test_append(self): @@ -117,6 +118,7 @@ l.extend(extension) yield self.assertEquals, l, expected + class JSONEncoderTC(TestCase): def setUp(self): if json is None: @@ -136,6 +138,20 @@ def test_encoding_decimal(self): self.assertEquals(self.encode(decimal.Decimal('1.2')), '1.2') + def test_encoding_bare_entity(self): + e = Entity(None) + e['pouet'] = 'hop' + e.eid = 2 + self.assertEquals(json.loads(self.encode(e)), + {'pouet': 'hop', 'eid': 2}) + + def test_encoding_entity_in_list(self): + e = Entity(None) + e['pouet'] = 'hop' + e.eid = 2 + self.assertEquals(json.loads(self.encode([e])), + [{'pouet': 'hop', 'eid': 2}]) + def test_encoding_unknown_stuff(self): self.assertEquals(self.encode(TestCase), 'null') diff -r 00b1b6b906cf -r 97c55baefa0c test/unittest_vregistry.py --- a/test/unittest_vregistry.py Thu Jul 15 12:03:13 2010 +0200 +++ b/test/unittest_vregistry.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,9 +15,7 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -""" -""" from logilab.common.testlib import unittest_main, TestCase from os.path import join @@ -27,7 +25,7 @@ from cubicweb.cwvreg import CubicWebVRegistry, UnknownProperty from cubicweb.devtools import TestServerConfiguration from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.interfaces import IMileStone +from cubicweb.view import EntityAdapter from cubes.card.entities import Card @@ -56,21 +54,26 @@ def test_load_subinterface_based_appobjects(self): - self.vreg.reset() self.vreg.register_objects([join(BASE, 'web', 'views', 'iprogress.py')]) # check progressbar was kicked self.failIf(self.vreg['views'].get('progressbar')) - class MyCard(Card): - __implements__ = (IMileStone,) - self.vreg.reset() + # we've to emulate register_objects to add custom MyCard objects + path = [join(BASE, 'entities', '__init__.py'), + join(BASE, 'entities', 'adapters.py'), + join(BASE, 'web', 'views', 'iprogress.py')] + filemods = self.vreg.init_registration(path, None) + for filepath, modname in filemods: + self.vreg.load_file(filepath, modname) + class CardIProgressAdapter(EntityAdapter): + __regid__ = 'IProgress' self.vreg._loadedmods[__name__] = {} - self.vreg.register(MyCard) - self.vreg.register_objects([join(BASE, 'entities', '__init__.py'), - join(BASE, 'web', 'views', 'iprogress.py')]) + self.vreg.register(CardIProgressAdapter) + self.vreg.initialization_completed() # check progressbar isn't kicked self.assertEquals(len(self.vreg['views']['progressbar']), 1) def test_properties(self): + self.vreg.reset() self.failIf('system.version.cubicweb' in self.vreg['propertydefs']) self.failUnless(self.vreg.property_info('system.version.cubicweb')) self.assertRaises(UnknownProperty, self.vreg.property_info, 'a.non.existent.key') diff -r 00b1b6b906cf -r 97c55baefa0c uilib.py --- a/uilib.py Thu Jul 15 12:03:13 2010 +0200 +++ b/uilib.py Mon Jul 19 15:37:02 2010 +0200 @@ -31,6 +31,8 @@ from logilab.mtconverter import xml_escape, html_unescape from logilab.common.date import ustrftime +from cubicweb.utils import json_dumps + def rql_for_eid(eid): """return the rql query necessary to fetch entity with the given eid. This @@ -228,6 +230,54 @@ # HTML generation helper functions ############################################ +class _JSId(object): + def __init__(self, id, parent=None): + self.id = id + self.parent = parent + def __unicode__(self): + if self.parent: + return u'%s.%s' % (self.parent, self.id) + return unicode(self.id) + def __str__(self): + return unicode(self).encode('utf8') + def __getattr__(self, attr): + return _JSId(attr, self) + def __call__(self, *args): + return _JSCallArgs(args, self) + +class _JSCallArgs(_JSId): + def __init__(self, args, parent=None): + assert isinstance(args, tuple) + self.args = args + self.parent = parent + def __unicode__(self): + args = u','.join(json_dumps(arg) for arg in self.args) + if self.parent: + return u'%s(%s)' % (self.parent, args) + return args + +class _JS(object): + def __getattr__(self, attr): + return _JSId(attr) + +"""magic object to return strings suitable to call some javascript function with +the given arguments (which should be correctly typed). + +>>> str(js.pouet(1, "2")) +'pouet(1,"2")' +>>> str(js.cw.pouet(1, "2")) +'cw.pouet(1,"2")' +>>> str(js.cw.pouet(1, "2").pouet(None)) +'cw.pouet(1,"2").pouet(null)') +""" +js = _JS() + +def domid(string): + """return a valid DOM id from a string (should also be usable in jQuery + search expression...) + """ + return string.replace('.', '_').replace('-', '_') + HTML4_EMPTY_TAGS = frozenset(('base', 'meta', 'link', 'hr', 'br', 'param', 'img', 'area', 'input', 'col')) diff -r 00b1b6b906cf -r 97c55baefa0c utils.py --- a/utils.py Thu Jul 15 12:03:13 2010 +0200 +++ b/utils.py Mon Jul 19 15:37:02 2010 +0200 @@ -121,6 +121,9 @@ def __init__(self, size, item): self._size = size self._item = item + def __repr__(self): + return '' % ( + id(self), self._item, self._size) def __len__(self): return self._size def __nonzero__(self): @@ -324,32 +327,23 @@ try: # may not be there if cubicweb-web not installed - if sys.version_info < (2,6): + if sys.version_info < (2, 6): import simplejson as json else: import json except ImportError: - pass + json_dumps = None + else: class CubicWebJsonEncoder(json.JSONEncoder): """define a json encoder to be able to encode yams std types""" - # _iterencode is the only entry point I've found to use a custom encode - # hook early enough: .default() is called if nothing else matched before, - # .iterencode() is called once on the main structure to encode and then - # never gets called again. - # For the record, our main use case is in FormValidateController with: - # json.dumps((status, args, entity), cls=CubicWebJsonEncoder) - # where we want all the entity attributes, including eid, to be part - # of the json object dumped. - # This would have once more been easier if Entity didn't extend dict. - def _iterencode(self, obj, markers=None): - if hasattr(obj, '__json_encode__'): - obj = obj.__json_encode__() - return json.JSONEncoder._iterencode(self, obj, markers) - def default(self, obj): + if hasattr(obj, 'eid'): + d = obj.cw_attr_cache.copy() + d['eid'] = obj.eid + return d if isinstance(obj, datetime.datetime): return obj.strftime('%Y/%m/%d %H:%M:%S') elif isinstance(obj, datetime.date): @@ -367,6 +361,9 @@ # just return None in those cases. return None + def json_dumps(value): + return json.dumps(value, cls=CubicWebJsonEncoder) + @deprecated('[3.7] merge_dicts is deprecated') def merge_dicts(dict1, dict2): diff -r 00b1b6b906cf -r 97c55baefa0c view.py --- a/view.py Thu Jul 15 12:03:13 2010 +0200 +++ b/view.py Mon Jul 19 15:37:02 2010 +0200 @@ -34,6 +34,7 @@ from cubicweb.appobject import AppObject from cubicweb.utils import UStringIO, HTMLStream from cubicweb.schema import display_name +from cubicweb.vregistry import classid # robots control NOINDEX = u'' @@ -366,6 +367,17 @@ __select__ = non_final_entity() category = 'entityview' + def call(self, **kwargs): + if self.cw_rset is None: + self.entity_call(self.cw_extra_kwargs.pop('entity')) + else: + super(EntityView, self).call(**kwargs) + + def cell_call(self, row, col, **kwargs): + self.entity_call(self.cw_rset.get_entity(row, col), **kwargs) + + def entity_call(self, entity, **kwargs): + raise NotImplementedError() class StartupView(View): """base class for views which doesn't need a particular result set to be @@ -519,3 +531,37 @@ # XXX a generic '%s%s' % (self.__regid__, self.__registry__.capitalize()) would probably be nicer def div_id(self): return '%sComponent' % self.__regid__ + + +class Adapter(AppObject): + """base class for adapters""" + __registry__ = 'adapters' + + +class EntityAdapter(Adapter): + """base class for entity adapters (eg adapt an entity to an interface)""" + def __init__(self, _cw, **kwargs): + try: + self.entity = kwargs.pop('entity') + except KeyError: + self.entity = kwargs['rset'].get_entity(kwargs.get('row') or 0, + kwargs.get('col') or 0) + Adapter.__init__(self, _cw, **kwargs) + + +def implements_adapter_compat(iface): + def _pre39_compat(func): + def decorated(self, *args, **kwargs): + entity = self.entity + if hasattr(entity, func.__name__): + warn('[3.9] %s method is deprecated, define it on a custom ' + '%s for %s instead' % (func.__name__, iface, + classid(entity.__class__)), + DeprecationWarning) + member = getattr(entity, func.__name__) + if callable(member): + return member(*args, **kwargs) + return member + return func(self, *args, **kwargs) + return decorated + return _pre39_compat diff -r 00b1b6b906cf -r 97c55baefa0c vregistry.py --- a/vregistry.py Thu Jul 15 12:03:13 2010 +0200 +++ b/vregistry.py Mon Jul 19 15:37:02 2010 +0200 @@ -44,7 +44,7 @@ from cubicweb import CW_SOFTWARE_ROOT from cubicweb import RegistryNotFound, ObjectNotFound, NoSelectableObject -from cubicweb.appobject import AppObject +from cubicweb.appobject import AppObject, class_regid def _toload_info(path, extrapath, _toload=None): """return a dictionary of : and an ordered list of @@ -83,16 +83,6 @@ """returns a unique identifier for an appobject class""" return '%s.%s' % (cls.__module__, cls.__name__) -def class_regid(cls): - """returns a unique identifier for an appobject class""" - if 'id' in cls.__dict__: - warn('[3.6] %s.%s: id is deprecated, use __regid__' - % (cls.__module__, cls.__name__), DeprecationWarning) - cls.__regid__ = cls.id - if hasattr(cls, 'id') and not isinstance(cls.id, property): - return cls.id - return cls.__regid__ - def class_registries(cls, registryname): if registryname: return (registryname,) @@ -235,8 +225,8 @@ % (args, kwargs.keys(), [repr(v) for v in appobjects])) if len(winners) > 1: - # log in production environement, error while debugging - if self.config.debugmode: + # log in production environement / test, error while debugging + if self.config.debugmode or self.config.mode == 'test': raise Exception('select ambiguity, args: %s\nkwargs: %s %s' % (args, kwargs.keys(), [repr(v) for v in winners])) @@ -405,6 +395,7 @@ # initialization methods ################################################### def init_registration(self, path, extrapath=None): + self.reset() # compute list of all modules that have to be loaded self._toloadmods, filemods = _toload_info(path, extrapath) # XXX is _loadedmods still necessary ? It seems like it's useful diff -r 00b1b6b906cf -r 97c55baefa0c web/__init__.py --- a/web/__init__.py Thu Jul 15 12:03:13 2010 +0200 +++ b/web/__init__.py Mon Jul 19 15:37:02 2010 +0200 @@ -17,26 +17,19 @@ # with CubicWeb. If not, see . """CubicWeb web client core. You'll need a apache-modpython or twisted publisher to get a full CubicWeb web application - +""" -""" __docformat__ = "restructuredtext en" _ = unicode -import sys -if sys.version_info < (2,6): - import simplejson as json -else: - import json - -dumps = json.dumps - from urllib import quote as urlquote from logilab.common.deprecation import deprecated from cubicweb.web._exceptions import * -from cubicweb.utils import CubicWebJsonEncoder +from cubicweb.utils import json_dumps + +dumps = deprecated('[3.9] use cubicweb.utils.json_dumps instead of dumps')(json_dumps) INTERNAL_FIELD_VALUE = '__cubicweb_internal_field__' @@ -65,9 +58,6 @@ FACETTES = set() -def json_dumps(value): - return dumps(value, cls=CubicWebJsonEncoder) - def jsonize(function): def newfunc(*args, **kwargs): value = function(*args, **kwargs) @@ -77,7 +67,7 @@ return json_dumps(repr(value)) return newfunc -@deprecated('[3.4] use req.build_ajax_replace_url() instead') +@deprecated('[3.4] use req.ajax_replace_url() instead') def ajax_replace_url(nodeid, rql, vid=None, swap=False, **extraparams): """builds a replacePageChunk-like url >>> ajax_replace_url('foo', 'Person P') diff -r 00b1b6b906cf -r 97c55baefa0c web/_exceptions.py --- a/web/_exceptions.py Thu Jul 15 12:03:13 2010 +0200 +++ b/web/_exceptions.py Mon Jul 19 15:37:02 2010 +0200 @@ -16,12 +16,12 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""exceptions used in the core of the CubicWeb web application +"""exceptions used in the core of the CubicWeb web application""" -""" __docformat__ = "restructuredtext en" from cubicweb._exceptions import * +from cubicweb.utils import json_dumps class PublishException(CubicWebException): """base class for publishing related exception""" @@ -66,8 +66,7 @@ self.reason = reason def dumps(self): - from cubicweb.web import json - return json.dumps({'reason': self.reason}) + return json_dumps({'reason': self.reason}) class LogOut(PublishException): """raised to ask for deauthentication of a logged in user""" diff -r 00b1b6b906cf -r 97c55baefa0c web/action.py --- a/web/action.py Thu Jul 15 12:03:13 2010 +0200 +++ b/web/action.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""abstract action classes for CubicWeb web client +"""abstract action classes for CubicWeb web client""" -""" __docformat__ = "restructuredtext en" _ = unicode diff -r 00b1b6b906cf -r 97c55baefa0c web/application.py --- a/web/application.py Thu Jul 15 12:03:13 2010 +0200 +++ b/web/application.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""CubicWeb web client application object +"""CubicWeb web client application object""" -""" from __future__ import with_statement __docformat__ = "restructuredtext en" @@ -234,7 +233,7 @@ def _update_last_login_time(self, req): # XXX should properly detect missing permission / non writeable source # and avoid "except (RepositoryError, Unauthorized)" below - if req.user.metainformation()['source']['adapter'] == 'ldapuser': + if req.user.cw_metainformation()['source']['adapter'] == 'ldapuser': return try: req.execute('SET X last_login_time NOW WHERE X eid %(x)s', @@ -282,12 +281,12 @@ to publish HTTP request. """ - def __init__(self, config, debug=None, + def __init__(self, config, session_handler_fact=CookieSessionHandler, vreg=None): self.info('starting web instance from %s', config.apphome) if vreg is None: - vreg = cwvreg.CubicWebVRegistry(config, debug=debug) + vreg = cwvreg.CubicWebVRegistry(config) self.vreg = vreg # connect to the repository and get instance's schema self.repo = config.repository(vreg) diff -r 00b1b6b906cf -r 97c55baefa0c web/box.py --- a/web/box.py Thu Jul 15 12:03:13 2010 +0200 +++ b/web/box.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""abstract box classes for CubicWeb web client +"""abstract box classes for CubicWeb web client""" -""" __docformat__ = "restructuredtext en" _ = unicode @@ -26,10 +25,11 @@ from cubicweb import Unauthorized, role as get_role, target as get_target from cubicweb.schema import display_name from cubicweb.selectors import (no_cnx, one_line_rset, primary_view, - match_context_prop, partial_has_related_entities) + match_context_prop, partial_relation_possible, + partial_has_related_entities) from cubicweb.view import View, ReloadableMixIn - -from cubicweb.web import INTERNAL_FIELD_VALUE +from cubicweb.uilib import domid, js +from cubicweb.web import INTERNAL_FIELD_VALUE, stdmsgs from cubicweb.web.htmlwidgets import (BoxLink, BoxWidget, SideBoxWidget, RawBoxItem, BoxSeparator) from cubicweb.web.action import UnregisteredAction @@ -143,7 +143,7 @@ def to_display_rql(self): assert self.rql is not None, self.__regid__ - return (self.rql, {'x': self._cw.user.eid}, 'x') + return (self.rql, {'x': self._cw.user.eid}) class EntityBoxTemplate(BoxTemplate): @@ -224,8 +224,8 @@ """returns the list of unrelated entities, using the entity's appropriate vocabulary function """ - skip = set(e.eid for e in entity.related(self.rtype, get_role(self), - entities=True)) + skip = set(unicode(e.eid) for e in entity.related(self.rtype, get_role(self), + entities=True)) skip.add(None) skip.add(INTERNAL_FIELD_VALUE) filteretype = getattr(self, 'etype', None) @@ -241,3 +241,92 @@ entities.append(entity) return entities + +class AjaxEditRelationBoxTemplate(EntityBoxTemplate): + __select__ = EntityBoxTemplate.__select__ & partial_relation_possible() + + # view used to display related entties + item_vid = 'incontext' + # values separator when multiple values are allowed + separator = ',' + # msgid of the message to display when some new relation has been added/removed + added_msg = None + removed_msg = None + + # class attributes below *must* be set in concret classes (additionaly to + # rtype / role [/ target_etype]. They should correspond to js_* methods on + # the json controller + + # function(eid) + # -> expected to return a list of values to display as input selector + # vocabulary + fname_vocabulary = None + + # function(eid, value) + # -> handle the selector's input (eg create necessary entities and/or + # relations). If the relation is multiple, you'll get a list of value, else + # a single string value. + fname_validate = None + + # function(eid, linked entity eid) + # -> remove the relation + fname_remove = None + + def cell_call(self, row, col, **kwargs): + req = self._cw + entity = self.cw_rset.get_entity(row, col) + related = entity.related(self.rtype, self.role) + rdef = entity.e_schema.rdef(self.rtype, self.role, self.target_etype) + if self.role == 'subject': + mayadd = rdef.has_perm(req, 'add', fromeid=entity.eid) + maydel = rdef.has_perm(req, 'delete', fromeid=entity.eid) + else: + mayadd = rdef.has_perm(req, 'add', toeid=entity.eid) + maydel = rdef.has_perm(req, 'delete', toeid=entity.eid) + if not (related or mayadd): + return + if mayadd or maydel: + req.add_js(('cubicweb.ajax.js', 'cubicweb.ajax.box.js')) + _ = req._ + w = self.w + divid = domid(self.__regid__) + unicode(entity.eid) + w(u'\n') diff -r 00b1b6b906cf -r 97c55baefa0c web/component.py --- a/web/component.py Thu Jul 15 12:03:13 2010 +0200 +++ b/web/component.py Mon Jul 19 15:37:02 2010 +0200 @@ -15,9 +15,10 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""abstract component class and base components definition for CubicWeb web client +"""abstract component class and base components definition for CubicWeb web +client +""" -""" __docformat__ = "restructuredtext en" _ = unicode @@ -25,7 +26,7 @@ from logilab.mtconverter import xml_escape from cubicweb import role -from cubicweb.web import json +from cubicweb.utils import json_dumps from cubicweb.view import Component from cubicweb.selectors import ( paginated_rset, one_line_rset, primary_view, match_context_prop, @@ -61,9 +62,15 @@ context = 'navcontentbottom' def call(self, view=None): - return self.cell_call(0, 0, view=view) + if self.cw_rset is None: + self.entity_call(self.cw_extra_kwargs.pop('entity')) + else: + self.cell_call(0, 0, view=view) def cell_call(self, row, col, view=None): + self.entity_call(self.cw_rset.get_entity(row, col), view=view) + + def entity_call(self, entity, view=None): raise NotImplementedError() @@ -126,10 +133,12 @@ if self.stop_param in params: del params[self.stop_param] - def page_url(self, path, params, start, stop): + def page_url(self, path, params, start=None, stop=None): params = dict(params) - params.update({self.start_param : start, - self.stop_param : stop,}) + if start is not None: + params[self.start_param] = start + if stop is not None: + params[self.stop_param] = stop view = self.cw_extra_kwargs.get('view') if view is not None and hasattr(view, 'page_navigation_url'): url = view.page_navigation_url(self, path, params) @@ -137,8 +146,9 @@ rql = params.pop('rql', self.cw_rset.printable_rql()) # latest 'true' used for 'swap' mode url = 'javascript: replacePageChunk(%s, %s, %s, %s, true)' % ( - json.dumps(params.get('divid', 'pageContent')), - json.dumps(rql), json.dumps(params.pop('vid', None)), json.dumps(params)) + json_dumps(params.get('divid', 'pageContent')), + json_dumps(rql), json_dumps(params.pop('vid', None)), + json_dumps(params)) else: url = self._cw.build_url(path, **params) return url diff -r 00b1b6b906cf -r 97c55baefa0c web/controller.py --- a/web/controller.py Thu Jul 15 12:03:13 2010 +0200 +++ b/web/controller.py Mon Jul 19 15:37:02 2010 +0200 @@ -23,6 +23,7 @@ from cubicweb.selectors import yes from cubicweb.appobject import AppObject +from cubicweb.mail import format_mail from cubicweb.web import LOGGER, Redirect, RequestError @@ -79,18 +80,20 @@ # generic methods useful for concrete implementations ###################### - def process_rql(self, rql): + def process_rql(self): """execute rql if specified""" - # XXX assigning to self really necessary? - self.cw_rset = None + req = self._cw + rql = req.form.get('rql') if rql: - self._cw.ensure_ro_rql(rql) + req.ensure_ro_rql(rql) if not isinstance(rql, unicode): - rql = unicode(rql, self._cw.encoding) - pp = self._cw.vreg['components'].select_or_none('magicsearch', self._cw) + rql = unicode(rql, req.encoding) + pp = req.vreg['components'].select_or_none('magicsearch', req) if pp is not None: - self.cw_rset = pp.process_query(rql) - return self.cw_rset + return pp.process_query(rql) + if 'eid' in req.form: + return req.eid_rset(req.form['eid']) + return None def notify_edited(self, entity): """called by edit_entity() to notify which entity is edited""" @@ -104,6 +107,16 @@ view.set_http_cache_headers() self._cw.validate_cache() + def sendmail(self, recipient, subject, body): + senderemail = self._cw.user.cw_adapt_to('IEmailable').get_email() + msg = format_mail({'email' : senderemail, + 'name' : self._cw.user.dc_title(),}, + [recipient], body, subject) + if not self._cw.vreg.config.sendmails([(msg, [recipient])]): + msg = self._cw._('could not connect to the SMTP server') + url = self._cw.build_url(__message=msg) + raise Redirect(url) + def reset(self): """reset form parameters and redirect to a view determinated by given parameters diff -r 00b1b6b906cf -r 97c55baefa0c web/data/actionBoxHeader.png Binary file web/data/actionBoxHeader.png has changed diff -r 00b1b6b906cf -r 97c55baefa0c web/data/boxHeader.png Binary file web/data/boxHeader.png has changed diff -r 00b1b6b906cf -r 97c55baefa0c web/data/button.png Binary file web/data/button.png has changed diff -r 00b1b6b906cf -r 97c55baefa0c web/data/cubicweb.acl.css --- a/web/data/cubicweb.acl.css Thu Jul 15 12:03:13 2010 +0200 +++ b/web/data/cubicweb.acl.css Mon Jul 19 15:37:02 2010 +0200 @@ -6,78 +6,35 @@ */ /******************************************************************************/ -/* security edition form (views/management.py) */ +/* security edition form (views/management.py) web/views/schema.py */ /******************************************************************************/ h2.schema{ - background : #ff7700; - color: #fff; - font-weight: bold; - padding : 0.1em 0.3em; + color: %(aColor)s; } - -h3.schema{ +table.schemaInfo td a.users{ + color : #00CC33; font-weight: bold; } -h4 a, -h4 a:link, -h4 a:visited{ - color:#000; - } - -table.schemaInfo { - margin: 1em 0em; - text-align: left; - border: 1px solid black; - border-collapse: collapse; - width:100%; -} - -table.schemaInfo th, -table.schemaInfo td { - padding: .3em .5em; - border: 1px solid grey; - width:33%; -} - - -table.schemaInfo tr th { - padding: 0.2em 0px 0.2em 5px; - background-image:none; - background-color:#dfdfdf; -} - -table.schemaInfo thead tr { - border: 1px solid #dfdfdf; -} - -table.schemaInfo td { - padding: 3px 10px 3px 5px; - -} - -a.users{ - color : #00CC33; - font-weight: bold } - -a.guests{ - color : #ff7700; +table.schemaInfo td a.guests{ + color: #ff7700; font-weight: bold; } -a.owners{ - color : #8b0000; +table.schemaInfo td a.owners{ + color: #8b0000; font-weight: bold; } -a.managers{ +table.schemaInfo td a.managers{ color: #000000; + font-weight: bold; } .discret, -a.grey{ +table.schemaInfo td a.grey{ color:#666; } @@ -86,39 +43,9 @@ } .red{ - color : #ff7700; + color: #ff7700; } div#schema_security{ width:100%; - } -/******************************************************************************/ -/* user groups edition form (views/euser.py) */ -/******************************************************************************/ - -table#groupedit { - margin: 1ex 1em; - text-align: left; - border: 1px solid black; - border-collapse: collapse; -} - -table#groupedit th, -table#groupedit td { - padding: 0.5em 1em; -} - -table#groupedit tr { - border-bottom: 1px solid black; -} - -table#groupedit tr.nogroup { - border: 1px solid red; - margin: 1px; -} - -table#groupedit td { - text-align: center; - padding: 0.5em; -} - + } \ No newline at end of file diff -r 00b1b6b906cf -r 97c55baefa0c web/data/cubicweb.ajax.box.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/web/data/cubicweb.ajax.box.js Mon Jul 19 15:37:02 2010 +0200 @@ -0,0 +1,81 @@ +/** + * Functions for ajax boxes. + * + * :organization: Logilab + * :copyright: 2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. + * :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr + * + */ + +function ajaxBoxValidateSelectorInput(boxid, eid, separator, fname, msg) { + var holderid = cw.utils.domid(boxid) + eid + 'Holder'; + var value = $('#' + holderid + 'Input').val(); + if (separator) { + value = $.map(value.split(separator), jQuery.trim); + } + var d = loadRemote('json', ajaxFuncArgs(fname, null, eid, value)); + d.addCallback(function() { + $('#' + holderid).empty(); + var formparams = ajaxFuncArgs('render', null, 'boxes', boxid, eid); + $('#' + cw.utils.domid(boxid) + eid).loadxhtml('json', formparams); + if (msg) { + document.location.hash = '#header'; + updateMessage(msg); + } + }); +} + +function ajaxBoxRemoveLinkedEntity(boxid, eid, relatedeid, delfname, msg) { + var d = loadRemote('json', ajaxFuncArgs(delfname, null, eid, relatedeid)); + d.addCallback(function() { + var formparams = ajaxFuncArgs('render', null, 'boxes', boxid, eid); + $('#' + cw.utils.domid(boxid) + eid).loadxhtml('json', formparams); + if (msg) { + document.location.hash = '#header'; + updateMessage(msg); + } + }); +} + +function ajaxBoxShowSelector(boxid, eid, + unrelfname, + addfname, msg, + oklabel, cancellabel, + separator) { + var holderid = cw.utils.domid(boxid) + eid + 'Holder'; + var holder = $('#' + holderid); + if (holder.children().length) { + holder.empty(); + } + else { + var inputid = holderid + 'Input'; + var deferred = loadRemote('json', ajaxFuncArgs(unrelfname, null, eid)); + deferred.addCallback(function (unrelated) { + var input = INPUT({'type': 'text', 'id': inputid, 'size': 20}); + holder.append(input).show(); + $input = $(input); + $input.keypress(function (event) { + if (event.keyCode == KEYS.KEY_ENTER) { + // XXX not very user friendly: we should test that the suggestions + // aren't visible anymore + ajaxBoxValidateSelectorInput(boxid, eid, separator, addfname, msg); + } + }); + var buttons = DIV({'class' : "sgformbuttons"}, + A({'href' : "javascript: noop();", + 'onclick' : cw.utils.strFuncCall('ajaxBoxValidateSelectorInput', + boxid, eid, separator, addfname, msg)}, + oklabel), + ' / ', + A({'href' : "javascript: noop();", + 'onclick' : '$("#' + holderid + '").empty()'}, + cancellabel)); + holder.append(buttons); + $input.autocomplete(unrelated, { + multiple: separator, + max: 15 + }); + $input.focus(); + }); + } +} diff -r 00b1b6b906cf -r 97c55baefa0c web/data/cubicweb.ajax.js --- a/web/data/cubicweb.ajax.js Thu Jul 15 12:03:13 2010 +0200 +++ b/web/data/cubicweb.ajax.js Mon Jul 19 15:37:02 2010 +0200 @@ -1,33 +1,132 @@ -/* - * :organization: Logilab - * :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. - * :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +/* copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. + * contact http://www.logilab.fr/ -- mailto:contact@logilab.fr + * + * This file is part of CubicWeb. + * + * CubicWeb is free software: you can redistribute it and/or modify it under the + * terms of the GNU Lesser General Public License as published by the Free + * Software Foundation, either version 2.1 of the License, or (at your option) + * any later version. + * + * CubicWeb is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS + * FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more + * details. + * + * You should have received a copy of the GNU Lesser General Public License along + * with CubicWeb. If not, see . */ -CubicWeb.require('python.js'); -CubicWeb.require('htmlhelpers.js'); +/** + * .. function:: Deferred + * + * dummy ultra minimalist implementation of deferred for jQuery + */ +function Deferred() { + this.__init__(this); +} + +jQuery.extend(Deferred.prototype, { + __init__: function() { + this._onSuccess = []; + this._onFailure = []; + this._req = null; + this._result = null; + this._error = null; + }, + + addCallback: function(callback) { + if (this._req.readyState == 4) { + if (this._result) { + var args = [this._result, this._req]; + jQuery.merge(args, cw.utils.sliceList(arguments, 1)); + callback.apply(null, args); + } + } + else { + this._onSuccess.push([callback, cw.utils.sliceList(arguments, 1)]); + } + return this; + }, + + addErrback: function(callback) { + if (this._req.readyState == 4) { + if (this._error) { + callback.apply(null, [this._error, this._req]); + } + } + else { + this._onFailure.push([callback, cw.utils.sliceList(arguments, 1)]); + } + return this; + }, + + success: function(result) { + this._result = result; + try { + for (var i = 0; i < this._onSuccess.length; i++) { + var callback = this._onSuccess[i][0]; + var args = [result, this._req]; + jQuery.merge(args, this._onSuccess[i][1]); + callback.apply(null, args); + } + } catch(error) { + this.error(this.xhr, null, error); + } + }, + + error: function(xhr, status, error) { + this._error = error; + for (var i = 0; i < this._onFailure.length; i++) { + var callback = this._onFailure[i][0]; + var args = [error, this._req]; + jQuery.merge(args, this._onFailure[i][1]); + callback.apply(null, args); + } + } + +}); + var JSON_BASE_URL = baseuri() + 'json?'; -function _loadAjaxHtmlHead(node, head, tag, srcattr) { - var loaded = []; +//============= utility function handling remote calls responses. ==============// +function _loadAjaxHtmlHead($node, $head, tag, srcattr) { var jqtagfilter = tag + '[' + srcattr + ']'; - jQuery('head ' + jqtagfilter).each(function(i) { - loaded.push(this.getAttribute(srcattr)); - }); - node.find(tag).each(function(i) { - if (this.getAttribute(srcattr)) { - if (!loaded.contains(this.getAttribute(srcattr))) { - jQuery(this).appendTo(head); + if (cw['loaded_'+srcattr] === undefined) { + cw['loaded_'+srcattr] = []; + var loaded = cw['loaded_'+srcattr]; + jQuery('head ' + jqtagfilter).each(function(i) { + loaded.push(this.getAttribute(srcattr)); + }); + } else { + var loaded = cw['loaded_'+srcattr]; + } + $node.find(tag).each(function(i) { + var url = this.getAttribute(srcattr); + if (url) { + if (jQuery.inArray(url, loaded) == -1) { + // take care to * - *================================================== + * */ if (typeof SimileAjax == "undefined") { @@ -213,9 +213,9 @@ SimileAjax.loaded = true; })(); } -/*================================================== +/* * Platform Utility Functions and Constants - *================================================== + * */ /* This must be called after our jQuery has been loaded @@ -319,9 +319,10 @@ SimileAjax.Platform.getDefaultLocale = function() { return SimileAjax.Platform.clientLocale; -};/*================================================== +}; +/* * Debug Utility Functions - *================================================== + * */ SimileAjax.Debug = { @@ -678,9 +679,9 @@ } }; })(); -/*================================================== +/* * DOM Utility Functions - *================================================== + * */ SimileAjax.DOM = new Object(); @@ -1040,9 +1041,9 @@ SimileAjax.includeCssFile(document, SimileAjax.urlPrefix + "styles/graphics-ie6.css"); } -/*================================================== +/* * Opacity, translucency - *================================================== + * */ SimileAjax.Graphics._createTranslucentImage1 = function(url, verticalAlign) { var elmt = document.createElement("img"); @@ -1119,9 +1120,9 @@ } }; -/*================================================== +/* * Bubble - *================================================== + * */ SimileAjax.Graphics.bubbleConfig = { @@ -1479,9 +1480,9 @@ }; }; -/*================================================== +/* * Animation - *================================================== + * */ /** @@ -1549,11 +1550,11 @@ } }; -/*================================================== +/* * CopyPasteButton * * Adapted from http://spaces.live.com/editorial/rayozzie/demo/liveclip/liveclipsample/techPreview.html. - *================================================== + * */ /** @@ -1606,9 +1607,9 @@ return div; }; -/*================================================== +/* * getWidthHeight - *================================================== + * */ SimileAjax.Graphics.getWidthHeight = function(el) { // RETURNS hash {width: w, height: h} in pixels @@ -1633,9 +1634,9 @@ }; -/*================================================== +/* * FontRenderingContext - *================================================== + * */ SimileAjax.Graphics.getFontRenderingContext = function(elmt, width) { return new SimileAjax.Graphics._FontRenderingContext(elmt, width); @@ -2127,9 +2128,9 @@ var d = new Date().getTimezoneOffset(); return d / -60; }; -/*================================================== +/* * String Utility Functions and Constants - *================================================== + * */ String.prototype.trim = function() { @@ -2170,9 +2171,9 @@ } return result; }; -/*================================================== +/* * HTML Utility Functions - *================================================== + * */ SimileAjax.HTML = new Object(); @@ -2655,9 +2656,9 @@ return (this._a.length > 0) ? this._a[this._a.length - 1] : null; }; -/*================================================== +/* * Event Index - *================================================== + * */ SimileAjax.EventIndex = function(unit) { @@ -2889,9 +2890,9 @@ return this._index < this._events.length() ? this._events.elementAt(this._index++) : null; } -};/*================================================== +};/* * Default Unit - *================================================== + * */ SimileAjax.NativeDateUnit = new Object(); @@ -2953,9 +2954,9 @@ return new Date(v.getTime() + n); }; -/*================================================== +/* * General, miscellaneous SimileAjax stuff - *================================================== + * */ SimileAjax.ListenerQueue = function(wildcardHandlerName) { @@ -2998,7 +2999,7 @@ } }; -/*====================================================================== +/* * History * * This is a singleton that keeps track of undoable user actions and @@ -3020,7 +3021,7 @@ * * An iframe is inserted into the document's body element to track * onload events. - *====================================================================== + * */ SimileAjax.History = { @@ -3632,7 +3633,7 @@ } return elmt; }; -/*================================================== +/* * Timeline API * * This file will load all the Javascript files @@ -3696,7 +3697,7 @@ * Note that the Ajax version is usually NOT the same as the Timeline version. * See variable simile_ajax_ver below for the current version * - *================================================== + * */ (function() { @@ -3928,7 +3929,7 @@ loadMe(); } })(); -/*================================================= +/* * * Coding standards: * @@ -3950,14 +3951,14 @@ * We also want to use jslint: http://www.jslint.com/ * * - *================================================== + * */ -/*================================================== +/* * Timeline VERSION - *================================================== + * */ // Note: version is also stored in the build.xml file Timeline.version = 'pre 2.4.0'; // use format 'pre 1.2.3' for trunk versions @@ -3965,9 +3966,9 @@ Timeline.display_version = Timeline.version + ' (with Ajax lib ' + Timeline.ajax_lib_version + ')'; // cf method Timeline.writeVersion -/*================================================== +/* * Timeline - *================================================== + * */ Timeline.strings = {}; // localization string tables Timeline.HORIZONTAL = 0; @@ -4183,9 +4184,9 @@ -/*================================================== +/* * Timeline Implementation object - *================================================== + * */ Timeline._Impl = function(elmt, bandInfos, orientation, unit, timelineID) { SimileAjax.WindowManager.initialize(); @@ -4585,7 +4586,7 @@ this.paint(); }; -/*================================================= +/* * * Coding standards: * @@ -4607,14 +4608,14 @@ * We also want to use jslint: http://www.jslint.com/ * * - *================================================== + * */ -/*================================================== +/* * Band - *================================================== + * */ Timeline._Band = function(timeline, bandInfo, index) { // hack for easier subclassing @@ -5344,9 +5345,9 @@ Timeline._Band.prototype.closeBubble = function() { SimileAjax.WindowManager.cancelPopups(); }; -/*================================================== +/* * Classic Theme - *================================================== + * */ @@ -5523,14 +5524,14 @@ }; this.mouseWheel = 'scroll'; // 'default', 'zoom', 'scroll' -};/*================================================== +};/* * An "ether" is a object that maps date/time to pixel coordinates. - *================================================== + * */ -/*================================================== +/* * Linear Ether - *================================================== + * */ Timeline.LinearEther = function(params) { @@ -5601,9 +5602,9 @@ }; -/*================================================== +/* * Hot Zone Ether - *================================================== + * */ Timeline.HotZoneEther = function(params) { @@ -5828,9 +5829,9 @@ Timeline.HotZoneEther.prototype._getScale = function() { return this._interval / this._pixelsPerInterval; }; -/*================================================== +/* * Gregorian Ether Painter - *================================================== + * */ Timeline.GregorianEtherPainter = function(params) { @@ -5919,9 +5920,9 @@ }; -/*================================================== +/* * Hot Zone Gregorian Ether Painter - *================================================== + * */ Timeline.HotZoneGregorianEtherPainter = function(params) { @@ -6080,9 +6081,9 @@ } }; -/*================================================== +/* * Year Count Ether Painter - *================================================== + * */ Timeline.YearCountEtherPainter = function(params) { @@ -6169,9 +6170,9 @@ Timeline.YearCountEtherPainter.prototype.softPaint = function() { }; -/*================================================== +/* * Quarterly Ether Painter - *================================================== + * */ Timeline.QuarterlyEtherPainter = function(params) { @@ -6257,9 +6258,9 @@ Timeline.QuarterlyEtherPainter.prototype.softPaint = function() { }; -/*================================================== +/* * Ether Interval Marker Layout - *================================================== + * */ Timeline.EtherIntervalMarkerLayout = function(timeline, band, theme, align, showLine) { @@ -6363,9 +6364,9 @@ }; }; -/*================================================== +/* * Ether Highlight Layout - *================================================== + * */ Timeline.EtherHighlight = function(timeline, band, theme, backgroundLayer) { @@ -6404,9 +6405,9 @@ } } }; -/*================================================== +/* * Event Utils - *================================================== + * */ Timeline.EventUtils = {}; @@ -6421,7 +6422,7 @@ }; Timeline.EventUtils.decodeEventElID = function(elementID) { - /*================================================== + /* * * Use this function to decode an event element's id on a band (label div, * tape div or icon img). @@ -6447,7 +6448,7 @@ * by using Timeline.getTimeline, Timeline.getBand, or * Timeline.getEvent and passing in the element's id * - *================================================== + * */ var parts = elementID.split('-'); @@ -6467,9 +6468,9 @@ // elType should be one of {label | icon | tapeN | highlightN} return elType + "-tl-" + timeline.timelineID + "-" + band.getIndex() + "-" + evt.getID(); -};/*================================================== +};/* * Gregorian Date Labeller - *================================================== + * */ Timeline.GregorianDateLabeller = function(locale, timeZone) { @@ -6558,9 +6559,9 @@ return { text: text, emphasized: emphasized }; } -/*================================================== +/* * Default Event Source - *================================================== + * */ @@ -7125,12 +7126,12 @@ }; -/*================================================== +/* * Original Event Painter - *================================================== + * */ -/*================================================== +/* * * To enable a single event listener to monitor everything * on a Timeline, we need a way to map from an event's icon, @@ -7152,7 +7153,7 @@ * You can then retrieve the band/timeline objects and event object * by using Timeline.EventUtils.decodeEventElID * - *================================================== + * */ /* @@ -7818,9 +7819,9 @@ this._eventPaintListeners[i](this._band, op, evt, els); } }; -/*================================================== +/* * Detailed Event Painter - *================================================== + * */ // Note: a number of features from original-painter @@ -8509,9 +8510,9 @@ this._onSelectListeners[i](eventID); } }; -/*================================================== +/* * Overview Event Painter - *================================================== + * */ Timeline.OverviewEventPainter = function(params) { @@ -8767,9 +8768,9 @@ Timeline.OverviewEventPainter.prototype.showBubble = function(evt) { // not implemented }; -/*================================================== +/* * Compact Event Painter - *================================================== + * */ Timeline.CompactEventPainter = function(params) { @@ -9831,9 +9832,9 @@ this._onSelectListeners[i](eventIDs); } }; -/*================================================== +/* * Span Highlight Decorator - *================================================== + * */ Timeline.SpanHighlightDecorator = function(params) { @@ -9948,9 +9949,9 @@ Timeline.SpanHighlightDecorator.prototype.softPaint = function() { }; -/*================================================== +/* * Point Highlight Decorator - *================================================== + * */ Timeline.PointHighlightDecorator = function(params) { @@ -10015,9 +10016,9 @@ Timeline.PointHighlightDecorator.prototype.softPaint = function() { }; -/*================================================== +/* * Default Unit - *================================================== + * */ Timeline.NativeDateUnit = new Object(); @@ -10083,35 +10084,35 @@ return new Date(v.getTime() + n); }; -/*================================================== +/* * Common localization strings - *================================================== + * */ Timeline.strings["fr"] = { wikiLinkLabel: "Discute" }; -/*================================================== +/* * Localization of labellers.js - *================================================== + * */ Timeline.GregorianDateLabeller.monthNames["fr"] = [ "jan", "fev", "mar", "avr", "mai", "jui", "jui", "aou", "sep", "oct", "nov", "dec" ]; -/*================================================== +/* * Common localization strings - *================================================== + * */ Timeline.strings["en"] = { wikiLinkLabel: "Discuss" }; -/*================================================== +/* * Localization of labellers.js - *================================================== + * */ Timeline.GregorianDateLabeller.monthNames["en"] = [ diff -r 00b1b6b906cf -r 97c55baefa0c web/data/cubicweb.timeline-ext.js --- a/web/data/cubicweb.timeline-ext.js Thu Jul 15 12:03:13 2010 +0200 +++ b/web/data/cubicweb.timeline-ext.js Mon Jul 19 15:37:02 2010 +0200 @@ -1,49 +1,49 @@ -/* +/** * :organization: Logilab - * :copyright: 2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. + * :copyright: 2008-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. * :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr * */ - -/* provide our own custom date parser since the default +/** + * provide our own custom date parser since the default * one only understands iso8601 and gregorian dates */ SimileAjax.NativeDateUnit.getParser = Timeline.NativeDateUnit.getParser = function(format) { if (typeof format == "string") { - if (format.indexOf('%') != -1) { - return function(datestring) { - if (datestring) { - return strptime(datestring, format); - } - return null; - }; - } + if (format.indexOf('%') != - 1) { + return function(datestring) { + if (datestring) { + return strptime(datestring, format); + } + return null; + }; + } format = format.toLowerCase(); } if (format == "iso8601" || format == "iso 8601") { - return Timeline.DateTime.parseIso8601DateTime; + return Timeline.DateTime.parseIso8601DateTime; } return Timeline.DateTime.parseGregorianDateTime; }; /*** CUBICWEB EVENT PAINTER *****************************************************/ Timeline.CubicWebEventPainter = function(params) { -// Timeline.OriginalEventPainter.apply(this, arguments); - this._params = params; - this._onSelectListeners = []; + // Timeline.OriginalEventPainter.apply(this, arguments); + this._params = params; + this._onSelectListeners = []; - this._filterMatcher = null; - this._highlightMatcher = null; - this._frc = null; + this._filterMatcher = null; + this._highlightMatcher = null; + this._frc = null; - this._eventIdToElmt = {}; + this._eventIdToElmt = {}; }; Timeline.CubicWebEventPainter.prototype = new Timeline.OriginalEventPainter(); Timeline.CubicWebEventPainter.prototype._paintEventLabel = function( - evt, text, left, top, width, height, theme) { +evt, text, left, top, width, height, theme) { var doc = this._timeline.getDocument(); var labelDiv = doc.createElement("div"); @@ -54,15 +54,21 @@ labelDiv.style.top = top + "px"; if (evt._obj.onclick) { - labelDiv.appendChild(A({'href': evt._obj.onclick}, text)); + labelDiv.appendChild(A({ + 'href': evt._obj.onclick + }, + text)); } else if (evt._obj.image) { - labelDiv.appendChild(IMG({src: evt._obj.image, width: '30px', height: '30px'})); + labelDiv.appendChild(IMG({ + src: evt._obj.image, + width: '30px', + height: '30px' + })); } else { - labelDiv.innerHTML = text; + labelDiv.innerHTML = text; } - if(evt._title != null) - labelDiv.title = evt._title; + if (evt._title != null) labelDiv.title = evt._title; var color = evt.getTextColor(); if (color == null) { @@ -72,29 +78,31 @@ labelDiv.style.color = color; } var classname = evt.getClassName(); - if(classname) labelDiv.className +=' ' + classname; + if (classname) labelDiv.className += ' ' + classname; this._eventLayer.appendChild(labelDiv); return { - left: left, - top: top, - width: width, + left: left, + top: top, + width: width, height: height, - elmt: labelDiv + elmt: labelDiv }; }; +Timeline.CubicWebEventPainter.prototype._showBubble = function(x, y, evt) { + var div = DIV({ + id: 'xxx' + }); + var width = this._params.theme.event.bubble.width; + if (!evt._obj.bubbleUrl) { + evt.fillInfoBubble(div, this._params.theme, this._band.getLabeller()); + } + SimileAjax.WindowManager.cancelPopups(); + SimileAjax.Graphics.createBubbleForContentAndPoint(div, x, y, width); + if (evt._obj.bubbleUrl) { + jQuery('#xxx').loadxhtml(evt._obj.bubbleUrl, null, 'post', 'replace'); + } +}; -Timeline.CubicWebEventPainter.prototype._showBubble = function(x, y, evt) { - var div = DIV({id: 'xxx'}); - var width = this._params.theme.event.bubble.width; - if (!evt._obj.bubbleUrl) { - evt.fillInfoBubble(div, this._params.theme, this._band.getLabeller()); - } - SimileAjax.WindowManager.cancelPopups(); - SimileAjax.Graphics.createBubbleForContentAndPoint(div, x, y, width); - if (evt._obj.bubbleUrl) { - jQuery('#xxx').loadxhtml(evt._obj.bubbleUrl, null, 'post', 'replace'); - } -}; diff -r 00b1b6b906cf -r 97c55baefa0c web/data/cubicweb.widgets.js --- a/web/data/cubicweb.widgets.js Thu Jul 15 12:03:13 2010 +0200 +++ b/web/data/cubicweb.widgets.js Mon Jul 19 15:37:02 2010 +0200 @@ -1,4 +1,6 @@ -/* +/** + * Functions dedicated to widgets. + * * :organization: Logilab * :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. * :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr @@ -9,144 +11,175 @@ // widget namespace Widgets = {}; - -/* this function takes a DOM node defining a widget and +/** + * .. function:: buildWidget(wdgnode) + * + * this function takes a DOM node defining a widget and * instantiates / builds the appropriate widget class */ function buildWidget(wdgnode) { var wdgclass = Widgets[wdgnode.getAttribute('cubicweb:wdgtype')]; if (wdgclass) { - var wdg = new wdgclass(wdgnode); + var wdg = new wdgclass(wdgnode); } } -/* This function is called on load and is in charge to build +/** + * .. function:: buildWidgets(root) + * + * This function is called on load and is in charge to build * JS widgets according to DOM nodes found in the page */ function buildWidgets(root) { root = root || document; jQuery(root).find('.widget').each(function() { - if (this.getAttribute('cubicweb:loadtype') == 'auto') { - buildWidget(this); - } + if (this.getAttribute('cubicweb:loadtype') == 'auto') { + buildWidget(this); + } }); } - // we need to differenciate cases where initFacetBoxEvents is called // with one argument or without any argument. If we use `initFacetBoxEvents` // as the direct callback on the jQuery.ready event, jQuery will pass some argument // of his, so we use this small anonymous function instead. -jQuery(document).ready(function() {buildWidgets();}); +jQuery(document).ready(function() { + buildWidgets(); +}); +function postJSON(url, data, callback) { + return jQuery.post(url, data, callback, 'json'); +} + +function getJSON(url, data, callback) { + return jQuery.get(url, data, callback, 'json'); +} Widgets.SuggestField = defclass('SuggestField', null, { __init__: function(node, options) { - var multi = node.getAttribute('cubicweb:multi') || "no"; - options = options || {}; - options.multiple = (multi == "yes") ? true : false; - var dataurl = node.getAttribute('cubicweb:dataurl'); + var multi = node.getAttribute('cubicweb:multi') || "no"; + options = options || {}; + options.multiple = (multi == "yes") ? true: false; + var dataurl = node.getAttribute('cubicweb:dataurl'); var method = postJSON; - if (options.method == 'get'){ - method = function(url, data, callback) { - // We can't rely on jQuery.getJSON because the server - // might set the Content-Type's response header to 'text/plain' - jQuery.get(url, data, function(response) { - callback(evalJSON(response)); - }); - }; - } - var self = this; // closure - method(dataurl, null, function(data) { - // in case we received a list of couple, we assume that the first - // element is the real value to be sent, and the second one is the - // value to be displayed - if (data.length && data[0].length == 2) { - options.formatItem = function(row) { return row[1]; }; - self.hideRealValue(node); - self.setCurrentValue(node, data); - } - jQuery(node).autocomplete(data, options); - }); + if (options.method == 'get') { + method = function(url, data, callback) { + // We can't rely on jQuery.getJSON because the server + // might set the Content-Type's response header to 'text/plain' + jQuery.get(url, data, function(response) { + callback(cw.evalJSON(response)); + }); + }; + } + var self = this; // closure + method(dataurl, null, function(data) { + // in case we received a list of couple, we assume that the first + // element is the real value to be sent, and the second one is the + // value to be displayed + if (data.length && data[0].length == 2) { + options.formatItem = function(row) { + return row[1]; + }; + self.hideRealValue(node); + self.setCurrentValue(node, data); + } + jQuery(node).autocomplete(data, options); + }); }, hideRealValue: function(node) { - var hidden = INPUT({'type': "hidden", 'name': node.name, 'value': node.value}); - node.parentNode.appendChild(hidden); - // remove 'name' attribute from visible input so that it is not submitted - // and set correct value in the corresponding hidden field - jQuery(node).removeAttr('name').bind('result', function(_, row, _) { - hidden.value = row[0]; - }); + var hidden = INPUT({ + 'type': "hidden", + 'name': node.name, + 'value': node.value + }); + node.parentNode.appendChild(hidden); + // remove 'name' attribute from visible input so that it is not submitted + // and set correct value in the corresponding hidden field + jQuery(node).removeAttr('name').bind('result', function(_, row, _) { + hidden.value = row[0]; + }); }, setCurrentValue: function(node, data) { - // called when the data is loaded to reset the correct displayed - // value in the visible input field (typically replacing an eid - // by a displayable value) - var curvalue = node.value; - if (!node.value) { - return; - } - for (var i=0,length=data.length; i