# HG changeset patch # User Sylvain Thénault # Date 1277996797 -7200 # Node ID 9db65b381028a2da447e41852fa7229a1c734b8c # Parent 60880c81e32e32e9fba2b80a2fe2343fd9e1a990# Parent b5640328ffad0e233b5991b81c583d6c179d85d8 backport stable diff -r b5640328ffad -r 9db65b381028 MANIFEST.in --- a/MANIFEST.in Thu Jul 01 09:23:39 2010 +0200 +++ b/MANIFEST.in Thu Jul 01 17:06:37 2010 +0200 @@ -5,13 +5,14 @@ include bin/cubicweb-* include man/cubicweb-ctl.1 -recursive-include doc README makefile *.conf *.py *.rst *.txt *.html *.png *.svg *.zargo *.dia +recursive-include doc README makefile *.conf *.css *.py *.rst *.txt *.html *.png *.svg *.zargo *.dia recursive-include misc *.py *.png *.display include web/views/*.pt recursive-include web/data external_resources *.js *.css *.py *.png *.gif *.ico *.ttf recursive-include web/wdoc *.rst *.png *.xml ChangeLog* +recursive-include devtools/data *.js *.css recursive-include i18n *.pot *.po recursive-include schemas *.py *.sql @@ -21,10 +22,15 @@ recursive-include sobjects/test/data bootstrap_cubes *.py recursive-include hooks/test/data bootstrap_cubes *.py recursive-include server/test/data bootstrap_cubes *.py source* -recursive-include web/test/data bootstrap_cubes *.py -recursive-include devtools/test/data bootstrap_cubes *.py *.txt +recursive-include devtools/test/data bootstrap_cubes *.py *.txt *.js +recursive-include web/test/data bootstrap_cubes pouet.css *.py + +recursive-include web/test/jstests *.js *.html *.css *.json +recursive-include web/test/windmill *.py recursive-include skeleton *.py *.css *.js *.po compat *.in *.tmpl +prune doc/book/en/.static/ +prune doc/book/fr/.static/ prune misc/cwfs prune goa diff -r b5640328ffad -r 9db65b381028 __pkginfo__.py --- a/__pkginfo__.py Thu Jul 01 09:23:39 2010 +0200 +++ b/__pkginfo__.py Thu Jul 01 17:06:37 2010 +0200 @@ -41,9 +41,9 @@ __depends__ = { 'logilab-common': '>= 0.50.2', - 'logilab-mtconverter': '>= 0.6.0', + 'logilab-mtconverter': '>= 0.8.0', 'rql': '>= 0.26.2', - 'yams': '>= 0.28.1', + 'yams': '>= 0.29.1', 'docutils': '>= 0.6', #gettext # for xgettext, msgcat, etc... # web dependancies @@ -52,7 +52,7 @@ 'Twisted': '', # XXX graphviz # server dependencies - 'logilab-database': '>= 1.0.5', + 'logilab-database': '>= 1.1.0', 'pysqlite': '>= 2.5.5', # XXX install pysqlite2 } @@ -77,6 +77,7 @@ join('server', 'test', 'data'), join('hooks', 'test', 'data'), join('web', 'test', 'data'), + join('devtools', 'data'), join('devtools', 'test', 'data'), 'schemas', 'skeleton'] diff -r b5640328ffad -r 9db65b381028 appobject.py --- a/appobject.py Thu Jul 01 09:23:39 2010 +0200 +++ b/appobject.py Thu Jul 01 17:06:37 2010 +0200 @@ -39,6 +39,92 @@ from logilab.common.decorators import classproperty from logilab.common.logging_ext import set_log_methods +from cubicweb.cwconfig import CubicWebConfiguration + +def class_regid(cls): + """returns a unique identifier for an appobject class""" + if 'id' in cls.__dict__: + warn('[3.6] %s.%s: id is deprecated, use __regid__' + % (cls.__module__, cls.__name__), DeprecationWarning) + cls.__regid__ = cls.id + if hasattr(cls, 'id') and not isinstance(cls.id, property): + return cls.id + return cls.__regid__ + +# helpers for debugging selectors +TRACED_OIDS = None + +def _trace_selector(cls, selector, args, ret): + # /!\ lltrace decorates pure function or __call__ method, this + # means argument order may be different + if isinstance(cls, Selector): + selname = str(cls) + vobj = args[0] + else: + selname = selector.__name__ + vobj = cls + if TRACED_OIDS == 'all' or class_regid(vobj) in TRACED_OIDS: + #SELECTOR_LOGGER.warning('selector %s returned %s for %s', selname, ret, cls) + print '%s -> %s for %s(%s)' % (selname, ret, vobj, vobj.__regid__) + +def lltrace(selector): + """use this decorator on your selectors so the becomes traceable with + :class:`traced_selection` + """ + # don't wrap selectors if not in development mode + if CubicWebConfiguration.mode == 'system': # XXX config.debug + return selector + def traced(cls, *args, **kwargs): + ret = selector(cls, *args, **kwargs) + if TRACED_OIDS is not None: + _trace_selector(cls, selector, args, ret) + return ret + traced.__name__ = selector.__name__ + traced.__doc__ = selector.__doc__ + return traced + +class traced_selection(object): + """ + Typical usage is : + + .. sourcecode:: python + + >>> from cubicweb.selectors import traced_selection + >>> with traced_selection(): + ... # some code in which you want to debug selectors + ... # for all objects + + Don't forget the 'from __future__ import with_statement' at the module top-level + if you're using python prior to 2.6. + + This will yield lines like this in the logs:: + + selector one_line_rset returned 0 for + + You can also give to :class:`traced_selection` the identifiers of objects on + which you want to debug selection ('oid1' and 'oid2' in the example above). + + .. sourcecode:: python + + >>> with traced_selection( ('regid1', 'regid2') ): + ... # some code in which you want to debug selectors + ... # for objects with __regid__ 'regid1' and 'regid2' + + A potentially usefull point to set up such a tracing function is + the `cubicweb.vregistry.Registry.select` method body. + """ + + def __init__(self, traced='all'): + self.traced = traced + + def __enter__(self): + global TRACED_OIDS + TRACED_OIDS = self.traced + + def __exit__(self, exctype, exc, traceback): + global TRACED_OIDS + TRACED_OIDS = None + return traceback is None # selector base classes and operations ######################################## @@ -175,6 +261,7 @@ class AndSelector(MultiSelector): """and-chained selectors (formerly known as chainall)""" + @lltrace def __call__(self, cls, *args, **kwargs): score = 0 for selector in self.selectors: @@ -187,6 +274,7 @@ class OrSelector(MultiSelector): """or-chained selectors (formerly known as chainfirst)""" + @lltrace def __call__(self, cls, *args, **kwargs): for selector in self.selectors: partscore = selector(cls, *args, **kwargs) @@ -199,6 +287,7 @@ def __init__(self, selector): self.selector = selector + @lltrace def __call__(self, cls, *args, **kwargs): score = self.selector(cls, *args, **kwargs) return int(not score) diff -r b5640328ffad -r 9db65b381028 cwconfig.py --- a/cwconfig.py Thu Jul 01 09:23:39 2010 +0200 +++ b/cwconfig.py Thu Jul 01 17:06:37 2010 +0200 @@ -295,8 +295,6 @@ # log_format = '%(asctime)s - [%(threadName)s] (%(name)s) %(levelname)s: %(message)s' # nor remove appobjects based on unused interface [???] cleanup_interface_sobjects = True - # debug mode - debugmode = False if (CWDEV and _forced_mode != 'system'): @@ -662,12 +660,14 @@ vregpath.append(path + '.py') return vregpath - def __init__(self): + def __init__(self, debugmode=False): register_stored_procedures() ConfigurationMixIn.__init__(self) + self.debugmode = debugmode self.adjust_sys_path() self.load_defaults() - self.translations = {} + # will be properly initialized later by _gettext_init + self.translations = {'en': (unicode, lambda ctx, msgid: unicode(msgid) )} self._site_loaded = set() # don't register ReStructured Text directives by simple import, avoid pb # with eg sphinx. @@ -683,25 +683,23 @@ # overriden in CubicWebConfiguration self.cls_adjust_sys_path() - def init_log(self, logthreshold=None, debug=False, - logfile=None, syslog=False): + def init_log(self, logthreshold=None, logfile=None, syslog=False): """init the log service""" if logthreshold is None: - if debug: + if self.debugmode: logthreshold = 'DEBUG' else: logthreshold = self['log-threshold'] - self.debugmode = debug if sys.platform == 'win32': # no logrotate on win32, so use logging rotation facilities # for now, hard code weekly rotation every sunday, and 52 weeks kept # idea: make this configurable? - init_log(debug, syslog, logthreshold, logfile, self.log_format, + init_log(self.debugmode, syslog, logthreshold, logfile, self.log_format, rotation_parameters={'when': 'W6', # every sunday 'interval': 1, 'backupCount': 52}) else: - init_log(debug, syslog, logthreshold, logfile, self.log_format) + init_log(self.debugmode, syslog, logthreshold, logfile, self.log_format) # configure simpleTal logger logging.getLogger('simpleTAL').setLevel(logging.ERROR) @@ -843,12 +841,12 @@ return mdir @classmethod - def config_for(cls, appid, config=None): + def config_for(cls, appid, config=None, debugmode=False): """return a configuration instance for the given instance identifier """ config = config or guess_configuration(cls.instance_home(appid)) configcls = configuration_cls(config) - return configcls(appid) + return configcls(appid, debugmode) @classmethod def possible_configurations(cls, appid): @@ -916,9 +914,9 @@ # instance methods used to get instance specific resources ############# - def __init__(self, appid): + def __init__(self, appid, debugmode=False): self.appid = appid - CubicWebNoAppConfiguration.__init__(self) + CubicWebNoAppConfiguration.__init__(self, debugmode) self._cubes = None self.load_file_configuration(self.main_config_file()) @@ -999,7 +997,7 @@ super(CubicWebConfiguration, self).load_configuration() if self.apphome and self.set_language: # init gettext - self._set_language() + self._gettext_init() def _load_site_cubicweb(self, sitefile): # overriden to register cube specific options @@ -1008,12 +1006,12 @@ self.register_options(mod.options) self.load_defaults() - def init_log(self, logthreshold=None, debug=False, force=False): + def init_log(self, logthreshold=None, force=False): """init the log service""" if not force and hasattr(self, '_logging_initialized'): return self._logging_initialized = True - CubicWebNoAppConfiguration.init_log(self, logthreshold, debug, + CubicWebNoAppConfiguration.init_log(self, logthreshold, logfile=self.get('log-file')) # read a config file if it exists logconfig = join(self.apphome, 'logging.conf') @@ -1034,7 +1032,7 @@ if lang != 'en': yield lang - def _set_language(self): + def _gettext_init(self): """set language for gettext""" from gettext import translation path = join(self.apphome, 'i18n') @@ -1114,6 +1112,7 @@ def register_stored_procedures(): from logilab.database import FunctionDescr from rql.utils import register_function, iter_funcnode_variables + from rql.nodes import SortTerm, Constant, VariableRef global _EXT_REGISTERED if _EXT_REGISTERED: @@ -1159,6 +1158,34 @@ register_function(TEXT_LIMIT_SIZE) + class FTIRANK(FunctionDescr): + """return ranking of a variable that must be used as some has_text + relation subject in the query's restriction. Usually used to sort result + of full-text search by ranking. + """ + supported_backends = ('postgres',) + rtype = 'Float' + + def st_check_backend(self, backend, funcnode): + """overriden so that on backend not supporting fti ranking, the + function is removed when in an orderby clause, or replaced by a 1.0 + constant. + """ + if not self.supports(backend): + parent = funcnode.parent + while parent is not None and not isinstance(parent, SortTerm): + parent = parent.parent + if isinstance(parent, SortTerm): + parent.parent.remove(parent) + else: + funcnode.parent.replace(funcnode, Constant(1.0, 'Float')) + parent = funcnode + for vref in parent.iget_nodes(VariableRef): + vref.unregister_reference() + + register_function(FTIRANK) + + class FSPATH(FunctionDescr): """return path of some bytes attribute stored using the Bytes File-System Storage (bfss) diff -r b5640328ffad -r 9db65b381028 cwctl.py --- a/cwctl.py Thu Jul 01 09:23:39 2010 +0200 +++ b/cwctl.py Thu Jul 01 17:06:37 2010 +0200 @@ -17,9 +17,8 @@ # with CubicWeb. If not, see . """the cubicweb-ctl tool, based on logilab.common.clcommands to provide a pluggable commands system. - +""" -""" __docformat__ = "restructuredtext en" # *ctl module should limit the number of import to be imported as quickly as @@ -477,23 +476,23 @@ def start_instance(self, appid): """start the instance's server""" - debug = self['debug'] - force = self['force'] - loglevel = self['loglevel'] - config = cwcfg.config_for(appid) - if loglevel is not None: - loglevel = 'LOG_%s' % loglevel.upper() - config.global_set_option('log-threshold', loglevel) - config.init_log(loglevel, debug=debug, force=True) + config = cwcfg.config_for(appid, debugmode=self['debug']) + init_cmdline_log_threshold(config, self['loglevel']) if self['profile']: config.global_set_option('profile', self.config.profile) helper = self.config_helper(config, cmdname='start') pidf = config['pid-file'] - if exists(pidf) and not force: + if exists(pidf) and not self['force']: msg = "%s seems to be running. Remove %s by hand if necessary or use \ the --force option." raise ExecutionError(msg % (appid, pidf)) - helper.start_server(config, debug) + helper.start_server(config) + + +def init_cmdline_log_threshold(config, loglevel): + if loglevel is not None: + config.global_set_option('log-threshold', loglevel.upper()) + config.init_log(config['log-threshold'], force=True) class StopInstanceCommand(InstanceCommand): @@ -788,11 +787,15 @@ repository internals (session, etc...) so most migration commands won't be available. + Arguments after bare "--" string will not be processed by the shell command + You can use it to pass extra arguments to your script and expect for + them in '__args__' afterwards. + the identifier of the instance to connect. """ name = 'shell' - arguments = ' [batch command file]' + arguments = ' [batch command file(s)] [-- + ''' + % data] + if server_data is not None: + host, port = server_data + html.append('') + html.append('') + html.append('') + + for dep in depends: + html.append(' ' % file_path(dep)) + + html.append(' ') + html.append(' '% (file_path(test_file),)) + html.append(''' + +
+
+

QUnit example

+

+

+
    + +''') + return u'\n'.join(html) + + + + + + + +if __name__ == '__main__': + unittest_main() diff -r b5640328ffad -r 9db65b381028 devtools/repotest.py --- a/devtools/repotest.py Thu Jul 01 09:23:39 2010 +0200 +++ b/devtools/repotest.py Thu Jul 01 17:06:37 2010 +0200 @@ -18,8 +18,8 @@ """some utilities to ease repository testing This module contains functions to initialize a new repository. +""" -""" __docformat__ = "restructuredtext en" from pprint import pprint @@ -134,24 +134,32 @@ schema._eid_index[rdef.eid] = rdef -from logilab.common.testlib import TestCase +from logilab.common.testlib import TestCase, mock_object +from logilab.database import get_db_helper + from rql import RQLHelper + from cubicweb.devtools.fake import FakeRepo, FakeSession from cubicweb.server import set_debug from cubicweb.server.querier import QuerierHelper from cubicweb.server.session import Session -from cubicweb.server.sources.rql2sql import remove_unused_solutions +from cubicweb.server.sources.rql2sql import SQLGenerator, remove_unused_solutions class RQLGeneratorTC(TestCase): - schema = None # set this in concret test + schema = backend = None # set this in concret test def setUp(self): self.repo = FakeRepo(self.schema) + self.repo.system_source = mock_object(dbdriver=self.backend) self.rqlhelper = RQLHelper(self.schema, special_relations={'eid': 'uid', - 'has_text': 'fti'}) + 'has_text': 'fti'}, + backend=self.backend) self.qhelper = QuerierHelper(self.repo, self.schema) ExecutionPlan._check_permissions = _dummy_check_permissions rqlannotation._select_principal = _select_principal + if self.backend is not None: + dbhelper = get_db_helper(self.backend) + self.o = SQLGenerator(self.schema, dbhelper) def tearDown(self): ExecutionPlan._check_permissions = _orig_check_permissions @@ -270,6 +278,7 @@ self.system = self.sources[-1] do_monkey_patch() self._dumb_sessions = [] # by hi-jacked parent setup + self.repo.vreg.rqlhelper.backend = 'postgres' # so FTIRANK is considered def add_source(self, sourcecls, uri): self.sources.append(sourcecls(self.repo, self.o.schema, diff -r b5640328ffad -r 9db65b381028 devtools/test/data/dbfill.conf --- a/devtools/test/data/dbfill.conf Thu Jul 01 09:23:39 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,22 +0,0 @@ -[BASE] -APPLICATION_SCHEMA = /home/adim/cvs_work/soft_prive/ginco/applications/crm/schema -APPLICATION_HOME = /home/adim/etc/erudi.d/crmadim # ??? -FAKEDB_NAME = crmtest -ENCODING = UTF-8 -HOST = crater -USER = adim -PASSWORD = adim - - -[ENTITIES] -default = 20 #means default is 20 entities -Person = 10 # means 10 Persons -Company = 5# means 5 companies - - -[RELATIONS] -Person works_for Company = 4 -Division subsidiary_of Company = 3 - -[DEFAULT_VALUES] -Person.firstname = data/firstnames.txt diff -r b5640328ffad -r 9db65b381028 devtools/test/data/js_examples/dep_1.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/test/data/js_examples/dep_1.js Thu Jul 01 17:06:37 2010 +0200 @@ -0,0 +1,1 @@ +a = 4; diff -r b5640328ffad -r 9db65b381028 devtools/test/data/js_examples/deps_2.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/test/data/js_examples/deps_2.js Thu Jul 01 17:06:37 2010 +0200 @@ -0,0 +1,1 @@ +b = a +2; diff -r b5640328ffad -r 9db65b381028 devtools/test/data/js_examples/test_simple_failure.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/test/data/js_examples/test_simple_failure.js Thu Jul 01 17:06:37 2010 +0200 @@ -0,0 +1,18 @@ +$(document).ready(function() { + + module("air"); + + test("test 1", function() { + equals(2, 4); + }); + + test("test 2", function() { + equals('', '45'); + equals('1024', '32'); + }); + + module("able"); + test("test 3", function() { + same(1, 1); + }); +}); diff -r b5640328ffad -r 9db65b381028 devtools/test/data/js_examples/test_simple_success.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/test/data/js_examples/test_simple_success.js Thu Jul 01 17:06:37 2010 +0200 @@ -0,0 +1,17 @@ +$(document).ready(function() { + + module("air"); + + test("test 1", function() { + equals(2, 2); + }); + + test("test 2", function() { + equals('45', '45'); + }); + + module("able"); + test("test 3", function() { + same(1, 1); + }); +}); diff -r b5640328ffad -r 9db65b381028 devtools/test/data/js_examples/test_with_dep.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/test/data/js_examples/test_with_dep.js Thu Jul 01 17:06:37 2010 +0200 @@ -0,0 +1,9 @@ +$(document).ready(function() { + + module("air"); + + test("test 1", function() { + equals(a, 4); + }); + +}); diff -r b5640328ffad -r 9db65b381028 devtools/test/data/js_examples/test_with_ordered_deps.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/test/data/js_examples/test_with_ordered_deps.js Thu Jul 01 17:06:37 2010 +0200 @@ -0,0 +1,9 @@ +$(document).ready(function() { + + module("air"); + + test("test 1", function() { + equals(b, 6); + }); + +}); diff -r b5640328ffad -r 9db65b381028 devtools/test/data/js_examples/utils.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/test/data/js_examples/utils.js Thu Jul 01 17:06:37 2010 +0200 @@ -0,0 +1,29 @@ +function datetuple(d) { + return [d.getFullYear(), d.getMonth()+1, d.getDate(), + d.getHours(), d.getMinutes()]; +} + +function pprint(obj) { + print('{'); + for(k in obj) { + print(' ' + k + ' = ' + obj[k]); + } + print('}'); +} + +function arrayrepr(array) { + return '[' + array.join(', ') + ']'; +} + +function assertArrayEquals(array1, array2) { + if (array1.length != array2.length) { + throw new crosscheck.AssertionFailure(array1.join(', ') + ' != ' + array2.join(', ')); + } + for (var i=0; i ,] []): + +The should contains the qunit test. defines the list +of javascript file that must be imported before the test script. Dependencies +are included their definition order. are additional files copied in the +test directory. both and are optionnal. +``jquery.js`` is preincluded in for all test. + +.. sourcecode:: python + + from cubicweb.qunit import QUnitTestCase + + class MyQUnitTest(QUnitTestCase): + + all_js_tests = ( + ("relative/path/to/my_simple_testcase.js",) + ("relative/path/to/my_qunit_testcase.js",( + "rel/path/to/dependency_1.js", + "rel/path/to/dependency_2.js",)), + ("relative/path/to/my_complexe_qunit_testcase.js",( + "rel/path/to/dependency_1.js", + "rel/path/to/dependency_2.js", + ),( + "rel/path/file_dependency.html", + "path/file_dependency.json") + ), + ) diff -r b5640328ffad -r 9db65b381028 doc/book/en/makefile --- a/doc/book/en/makefile Thu Jul 01 09:23:39 2010 +0200 +++ b/doc/book/en/makefile Thu Jul 01 17:06:37 2010 +0200 @@ -11,6 +11,10 @@ PAPER = #BUILDDIR = build BUILDDIR = ~/tmp/cwdoc +CWDIR = ../../.. +JSDIR = ${CWDIR}/web/data +JSTORST = ${CWDIR}/doc/tools/pyjsrest.py +BUILDJS = devweb/js_api # Internal variables for sphinx PAPEROPT_a4 = -D latex_paper_size=a4 @@ -18,6 +22,7 @@ ALLSPHINXOPTS = -d ${BUILDDIR}/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + .PHONY: help clean html web pickle htmlhelp latex changes linkcheck help: @@ -36,6 +41,7 @@ rm -rf apidoc/ rm -f *.html -rm -rf ${BUILDDIR}/* + -rm -rf ${BUILDJS} all: ${TARGET} apidoc html @@ -48,12 +54,16 @@ epydoc --html -o apidoc -n "cubicweb" --exclude=setup --exclude=__pkginfo__ ../../../ # run sphinx ### -html: +html: js mkdir -p ${BUILDDIR}/html ${BUILDDIR}/doctrees $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) ${BUILDDIR}/html @echo @echo "Build finished. The HTML pages are in ${BUILDDIR}/html." +js: + mkdir -p ${BUILDJS} + $(JSTORST) -p ${JSDIR} -o ${BUILDJS} + pickle: mkdir -p ${BUILDDIR}/pickle ${BUILDDIR}/doctrees $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) ${BUILDDIR}/pickle diff -r b5640328ffad -r 9db65b381028 doc/book/en/tutorials/index.rst --- a/doc/book/en/tutorials/index.rst Thu Jul 01 09:23:39 2010 +0200 +++ b/doc/book/en/tutorials/index.rst Thu Jul 01 17:06:37 2010 +0200 @@ -17,3 +17,4 @@ base/index advanced/index + tools/windmill.rst diff -r b5640328ffad -r 9db65b381028 doc/book/en/tutorials/tools/windmill.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/book/en/tutorials/tools/windmill.rst Thu Jul 01 17:06:37 2010 +0200 @@ -0,0 +1,133 @@ +========================== +Use Windmill with CubicWeb +========================== + +Windmill_ implements cross browser testing, in-browser recording and playback, +and functionality for fast accurate debugging and test environment integration. + +.. _Windmill: http://www.getwindmill.com/ + +`Online features list `_ is available. + + +Installation +============ + +Windmill +-------- + +You have to install Windmill manually for now. If you're using Debian, there is +no binary package (`yet `_). + +The simplest solution is to use a *setuptools/pip* command (for a clean +environment, take a look to the `virtualenv +`_ project as well):: + + pip install windmill + curl -O http://github.com/windmill/windmill/tarball/master + +Some install instructions are `available `_. + +Be sure to have the windmill module in your PYTHONPATH afterwards:: + + python -c "import windmill" + +X dummy +------- + +In order to reduce unecessary system load from your test machines, It's +recommended to use X dummy server for testing the Unix web clients, you need a +dummy video X driver (as xserver-xorg-video-dummy package in Debian) coupled +with a light X server as `Xvfb `_. + + The dummy driver is a special driver available with the XFree86 DDX. To use + the dummy driver, simply substitue it for your normal card driver in the + Device section of your xorg.conf configuration file. For example, if you + normally uses an ati driver, then you will have a Device section with + Driver "ati" to let the X server know that you want it to load and use the + ati driver; however, for these conformance tests, you would change that + line to Driver "dummy" and remove any other ati specific options from the + Device section. + + *From: http://www.x.org/wiki/XorgTesting* + +Then, you can run the X server with the following command : + + /usr/bin/X11/Xvfb :1 -ac -screen 0 1280x1024x8 -fbdir /tmp + + +Windmill usage +============== + +Record your use case +-------------------- + +- start your instance manually +- start Windmill_ with url site as last argument (read Usage_ or use *'-h'* + option to find required command line arguments) +- use the record button +- click on save to obtain python code of your use case +- copy the content to a new file in a *windmill* directory + +.. _Usage: http://wiki.github.com/windmill/windmill/running-tests + +If you are using firefox as client, consider the "firebug" option. + +You can refine the test by the *loadtest* windmill option: + + windmill -m firebug loadtest= + + +Integrate Windmill tests into CubicWeb +====================================== + +Run your tests +-------------- + +You can easily run your windmill test suite through `pytest` or :mod:`unittest`. +You have to copy a *test_windmill.py* file from :mod:`web.test`. + +By default, CubicWeb will use **firefox** as the default browser and will try +to run test instance server on localhost. In the general case, You've no need +to change anything. + +Check the :class:`cubicweb.devtools.cwwindmill.CubicWebServerTC` class for server +parameters and :class:`cubicweb.devtools.cwwindmill.CubicWebWindmillUseCase` for +Windmill configuration. + +Best practises +-------------- + +Don't run another instance on the same port. You risk to silence some +regressions (test runner will automatically fail in further versions). + +Start your use case by using an assert on the expected primary url page. +Otherwise all your tests could fail without clear explanation of the used +navigation. + +In the same location of the *test_windmill.py*, create a *windmill/* with your +windmill recorded use cases. + +Then, you can launch the test series with:: + + % pytest test/test_windmill.py + +For instance, you can start CubicWeb framework use tests by:: + + % pytest web/test/test_windmill.py + + +Preferences +=========== + +A *.windmill/prefs.py* could be used to redefine default configuration values. + +.. define CubicWeb preferences in the parent test case instead with a dedicated firefox profile + +For managing browser extensions, read `advanced topic chapter +`_. + +More configuration examples could be seen in *windmill/conf/global_settings.py* +as template. + + diff -r b5640328ffad -r 9db65b381028 doc/refactoring-the-css-with-uiprops.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/refactoring-the-css-with-uiprops.rst Thu Jul 01 17:06:37 2010 +0200 @@ -0,0 +1,73 @@ +========================================= +Refactoring the CSSs with UI properties +========================================= + +Overview +========= + +Managing styles progressively became difficult in CubicWeb. The +introduction of uiprops is an attempt to fix this problem. + +The goal is to make it possible to use variables in our CSSs. + +These variables are defined or computed in the uiprops.py python file +and inserted in the CSS using the Python string interpolation syntax. + +A quick example, put in ``uiprops.py``:: + + defaultBgColor = '#eee' + +and in your css:: + + body { background-color: %(defaultBgColor)s; } + + +The good practices are: + +- define a variable in uiprops to avoid repetitions in the CSS + (colors, borders, fonts, etc.) + +- define a variable in uiprops when you need to compute values + (compute a color palette, etc.) + +The algorithm implemented in CubicWeb is the following: + +- read uiprops file while walk up the chain of cube dependencies: if + cube myblog depends on cube comment, the variables defined in myblog + will have precedence over the ones in comment + +- replace the %(varname)s in all the CSSs of all the cubes + +Keep in mind that the browser will then interpret the CSSs and apply +the standard cascading mechanism. + +FAQ +==== + +- How do I keep the old style? + + Put ``STYLESHEET = [data('cubicweb.old.css')]`` in your uiprops.py + file and think about something else. + +- What are the changes in cubicweb.css? + + Version 3.9.0 of cubicweb changed the following in the default html + markup and css: + + =============== ================================== + old new + =============== ================================== + .navcol #navColumnLeft, #navColumnRight + #contentcol #contentColumn + .footer #footer + .logo #logo + .simpleMessage .loginMessage + .appMsg (styles are removed from css) + .searchMessage (styles are removed from css) + =============== ================================== + + Introduction of the new cubicweb.reset.css based on Eric Meyer's + reset css. + + Lots of margin, padding, etc. + diff -r b5640328ffad -r 9db65b381028 doc/tools/pyjsrest.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/tools/pyjsrest.py Thu Jul 01 17:06:37 2010 +0200 @@ -0,0 +1,110 @@ +#!/usr/bin/env python +""" +Parser for Javascript comments. +""" +from __future__ import with_statement + +import sys, os, getopt, re + +def clean_comment(match): + comment = match.group() + comment = strip_stars(comment) + return comment + +# Rest utilities +def rest_title(title, level, level_markups=['=', '=', '-', '~', '+', '`']): + size = len(title) + if level == 0: + return '\n'.join((level_markups[level] * size, title, level_markups[0] * size)) + '\n' + return '\n'.join(('\n' + title, level_markups[level] * size)) + '\n' + +def get_doc_comments(text): + """ + Return a list of all documentation comments in the file text. Each + comment is a pair, with the first element being the comment text and + the second element being the line after it, which may be needed to + guess function & arguments. + + >>> get_doc_comments(read_file('examples/module.js'))[0][0][:40] + '/**\n * This is the module documentation.' + >>> get_doc_comments(read_file('examples/module.js'))[1][0][7:50] + 'This is documentation for the first method.' + >>> get_doc_comments(read_file('examples/module.js'))[1][1] + 'function the_first_function(arg1, arg2) ' + >>> get_doc_comments(read_file('examples/module.js'))[2][0] + '/** This is the documentation for the second function. */' + + """ + return [clean_comment(match) for match in re.finditer('/\*\*.*?\*/', + text, re.DOTALL|re.MULTILINE)] + +RE_STARS = re.compile('^\s*?\* ?', re.MULTILINE) + + +def strip_stars(doc_comment): + """ + Strip leading stars from a doc comment. + + >>> strip_stars('/** This is a comment. */') + 'This is a comment.' + >>> strip_stars('/**\n * This is a\n * multiline comment. */') + 'This is a\n multiline comment.' + >>> strip_stars('/** \n\t * This is a\n\t * multiline comment. \n*/') + 'This is a\n multiline comment.' + + """ + return RE_STARS.sub('', doc_comment[3:-2]).strip() + +def parse_js_files(args=sys.argv): + """ + Main command-line invocation. + """ + try: + opts, args = getopt.gnu_getopt(args[1:], 'p:o:h', [ + 'jspath=', 'output=', 'help']) + opts = dict(opts) + except getopt.GetoptError: + usage() + sys.exit(2) + + rst_dir = opts.get('--output') or opts.get('-o') + if rst_dir is None and len(args) != 1: + rst_dir = 'apidocs' + js_dir = opts.get('--jspath') or opts.get('-p') + if not os.path.exists(os.path.join(rst_dir)): + os.makedirs(os.path.join(rst_dir)) + + f_index = open(os.path.join(rst_dir, 'index.rst'), 'wb') + f_index.write(''' +.. toctree:: + :maxdepth: 1 + +''' +) + for js_path, js_dirs, js_files in os.walk(js_dir): + rst_path = re.sub('%s%s*' % (js_dir, os.path.sep), '', js_path) + for js_file in js_files: + if not js_file.endswith('.js'): + continue + if not os.path.exists(os.path.join(rst_dir, rst_path)): + os.makedirs(os.path.join(rst_dir, rst_path)) + rst_content = extract_rest(js_path, js_file) + filename = os.path.join(rst_path, js_file[:-3]) + # add to index + f_index.write(' %s\n' % filename) + # save rst file + with open(os.path.join(rst_dir, filename) + '.rst', 'wb') as f_rst: + f_rst.write(rst_content) + f_index.close() + +def extract_rest(js_dir, js_file): + js_filepath = os.path.join(js_dir, js_file) + filecontent = open(js_filepath, 'U').read() + comments = get_doc_comments(filecontent) + rst = rest_title(js_file, 0) + rst += '.. module:: %s\n\n' % js_file + rst += '\n\n'.join(comments) + return rst + +if __name__ == '__main__': + parse_js_files() diff -r b5640328ffad -r 9db65b381028 entities/__init__.py --- a/entities/__init__.py Thu Jul 01 09:23:39 2010 +0200 +++ b/entities/__init__.py Thu Jul 01 17:06:37 2010 +0200 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""base application's entities class implementation: `AnyEntity` +"""base application's entities class implementation: `AnyEntity`""" -""" __docformat__ = "restructuredtext en" from warnings import warn @@ -28,33 +27,13 @@ from cubicweb import Unauthorized, typed_eid from cubicweb.entity import Entity -from cubicweb.interfaces import IBreadCrumbs, IFeed - class AnyEntity(Entity): """an entity instance has e_schema automagically set on the class and instances have access to their issuing cursor """ __regid__ = 'Any' - __implements__ = (IBreadCrumbs, IFeed) - - fetch_attrs = ('modification_date',) - @classmethod - def fetch_order(cls, attr, var): - """class method used to control sort order when multiple entities of - this type are fetched - """ - return cls.fetch_unrelated_order(attr, var) - - @classmethod - def fetch_unrelated_order(cls, attr, var): - """class method used to control sort order when multiple entities of - this type are fetched to use in edition (eg propose them to create a - new relation on an edited entity). - """ - if attr == 'modification_date': - return '%s DESC' % var - return None + __implements__ = () # meta data api ########################################################### @@ -63,7 +42,7 @@ for rschema, attrschema in self.e_schema.attribute_definitions(): if rschema.meta: continue - value = self.get_value(rschema.type) + value = self.cw_attr_value(rschema.type) if value: # make the value printable (dates, floats, bytes, etc.) return self.printable_value(rschema.type, value, attrschema.type, @@ -120,32 +99,6 @@ except (Unauthorized, IndexError): return None - def breadcrumbs(self, view=None, recurs=False): - path = [self] - if hasattr(self, 'parent'): - parent = self.parent() - if parent is not None: - try: - path = parent.breadcrumbs(view, True) + [self] - except TypeError: - warn("breadcrumbs method's now takes two arguments " - "(view=None, recurs=False), please update", - DeprecationWarning) - path = parent.breadcrumbs(view) + [self] - if not recurs: - if view is None: - if 'vtitle' in self._cw.form: - # embeding for instance - path.append( self._cw.form['vtitle'] ) - elif view.__regid__ != 'primary' and hasattr(view, 'title'): - path.append( self._cw._(view.title) ) - return path - - ## IFeed interface ######################################################## - - def rss_feed_url(self): - return self.absolute_url(vid='rss') - # abstractions making the whole things (well, some at least) working ###### def sortvalue(self, rtype=None): @@ -154,7 +107,7 @@ """ if rtype is None: return self.dc_title().lower() - value = self.get_value(rtype) + value = self.cw_attr_value(rtype) # do not restrict to `unicode` because Bytes will return a `str` value if isinstance(value, basestring): return self.printable_value(rtype, format='text/plain').lower() @@ -189,35 +142,8 @@ self.__linkto[(rtype, role)] = linkedto return linkedto - # edit controller callbacks ############################################### - - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - if hasattr(self, 'parent') and self.parent(): - return self.parent().rest_path(), {} - return str(self.e_schema).lower(), {} - - def pre_web_edit(self): - """callback called by the web editcontroller when an entity will be - created/modified, to let a chance to do some entity specific stuff. - - Do nothing by default. - """ - pass - # server side helpers ##################################################### - def notification_references(self, view): - """used to control References field of email send on notification - for this entity. `view` is the notification view. - - Should return a list of eids which can be used to generate message ids - of previously sent email - """ - return () - # XXX: store a reference to the AnyEntity class since it is hijacked in goa # configuration and we need the actual reference to avoid infinite loops # in mro diff -r b5640328ffad -r 9db65b381028 entities/adapters.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/entities/adapters.py Thu Jul 01 17:06:37 2010 +0200 @@ -0,0 +1,443 @@ +# copyright 2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""some basic entity adapter implementations, for interfaces used in the +framework itself. +""" + +__docformat__ = "restructuredtext en" + +from itertools import chain +from warnings import warn + +from logilab.mtconverter import TransformError +from logilab.common.decorators import cached + +from cubicweb.view import EntityAdapter, implements_adapter_compat +from cubicweb.selectors import implements, relation_possible +from cubicweb.interfaces import IDownloadable, ITree, IProgress, IMileStone + + +class IEmailableAdapter(EntityAdapter): + __regid__ = 'IEmailable' + __select__ = relation_possible('primary_email') | relation_possible('use_email') + + def get_email(self): + if getattr(self.entity, 'primary_email', None): + return self.entity.primary_email[0].address + if getattr(self.entity, 'use_email', None): + return self.entity.use_email[0].address + return None + + def allowed_massmail_keys(self): + """returns a set of allowed email substitution keys + + The default is to return the entity's attribute list but you might + override this method to allow extra keys. For instance, a Person + class might want to return a `companyname` key. + """ + return set(rschema.type + for rschema, attrtype in self.entity.e_schema.attribute_definitions() + if attrtype.type not in ('Password', 'Bytes')) + + def as_email_context(self): + """returns the dictionary as used by the sendmail controller to + build email bodies. + + NOTE: the dictionary keys should match the list returned by the + `allowed_massmail_keys` method. + """ + return dict( (attr, getattr(self.entity, attr)) + for attr in self.allowed_massmail_keys() ) + + +class INotifiableAdapter(EntityAdapter): + __regid__ = 'INotifiable' + __select__ = implements('Any') + + @implements_adapter_compat('INotifiableAdapter') + def notification_references(self, view): + """used to control References field of email send on notification + for this entity. `view` is the notification view. + + Should return a list of eids which can be used to generate message + identifiers of previously sent email(s) + """ + itree = self.entity.cw_adapt_to('ITree') + if itree is not None: + return itree.path()[:-1] + return () + + +class IFTIndexableAdapter(EntityAdapter): + __regid__ = 'IFTIndexable' + __select__ = implements('Any') + + def fti_containers(self, _done=None): + if _done is None: + _done = set() + entity = self.entity + _done.add(entity.eid) + containers = tuple(entity.e_schema.fulltext_containers()) + if containers: + for rschema, target in containers: + if target == 'object': + targets = getattr(entity, rschema.type) + else: + targets = getattr(entity, 'reverse_%s' % rschema) + for entity in targets: + if entity.eid in _done: + continue + for container in entity.cw_adapt_to('IFTIndexable').fti_containers(_done): + yield container + yielded = True + else: + yield entity + + # weight in ABCD + entity_weight = 1.0 + attr_weight = {} + + def get_words(self): + """used by the full text indexer to get words to index + + this method should only be used on the repository side since it depends + on the logilab.database package + + :rtype: list + :return: the list of indexable word of this entity + """ + from logilab.database.fti import tokenize + # take care to cases where we're modyfying the schema + entity = self.entity + pending = self._cw.transaction_data.setdefault('pendingrdefs', set()) + words = {} + for rschema in entity.e_schema.indexable_attributes(): + if (entity.e_schema, rschema) in pending: + continue + weight = self.attr_weight.get(rschema, 'C') + try: + value = entity.printable_value(rschema, format='text/plain') + except TransformError: + continue + except: + self.exception("can't add value of %s to text index for entity %s", + rschema, entity.eid) + continue + if value: + words.setdefault(weight, []).extend(tokenize(value)) + for rschema, role in entity.e_schema.fulltext_relations(): + if role == 'subject': + for entity_ in getattr(entity, rschema.type): + merge_weight_dict(words, entity_.cw_adapt_to('IFTIndexable').get_words()) + else: # if role == 'object': + for entity_ in getattr(entity, 'reverse_%s' % rschema.type): + merge_weight_dict(words, entity_.cw_adapt_to('IFTIndexable').get_words()) + return words + +def merge_weight_dict(maindict, newdict): + for weight, words in newdict.iteritems(): + maindict.setdefault(weight, []).extend(words) + +class IDownloadableAdapter(EntityAdapter): + """interface for downloadable entities""" + __regid__ = 'IDownloadable' + __select__ = implements(IDownloadable) # XXX for bw compat, else should be abstract + + @implements_adapter_compat('IDownloadable') + def download_url(self): # XXX not really part of this interface + """return an url to download entity's content""" + raise NotImplementedError + @implements_adapter_compat('IDownloadable') + def download_content_type(self): + """return MIME type of the downloadable content""" + raise NotImplementedError + @implements_adapter_compat('IDownloadable') + def download_encoding(self): + """return encoding of the downloadable content""" + raise NotImplementedError + @implements_adapter_compat('IDownloadable') + def download_file_name(self): + """return file name of the downloadable content""" + raise NotImplementedError + @implements_adapter_compat('IDownloadable') + def download_data(self): + """return actual data of the downloadable content""" + raise NotImplementedError + + +class ITreeAdapter(EntityAdapter): + """This adapter has to be overriden to be configured using the + tree_relation, child_role and parent_role class attributes to + benefit from this default implementation + """ + __regid__ = 'ITree' + __select__ = implements(ITree) # XXX for bw compat, else should be abstract + + child_role = 'subject' + parent_role = 'object' + + @property + def tree_relation(self): + warn('[3.9] tree_attribute is deprecated, define tree_relation on a custom ' + 'ITree for %s instead' % (self.entity.__class__), + DeprecationWarning) + return self.entity.tree_attribute + + @implements_adapter_compat('ITree') + def children_rql(self): + """returns RQL to get children + + XXX should be removed from the public interface + """ + return self.entity.cw_related_rql(self.tree_relation, self.parent_role) + + @implements_adapter_compat('ITree') + def different_type_children(self, entities=True): + """return children entities of different type as this entity. + + according to the `entities` parameter, return entity objects or the + equivalent result set + """ + res = self.entity.related(self.tree_relation, self.parent_role, + entities=entities) + eschema = self.entity.e_schema + if entities: + return [e for e in res if e.e_schema != eschema] + return res.filtered_rset(lambda x: x.e_schema != eschema, self.entity.cw_col) + + @implements_adapter_compat('ITree') + def same_type_children(self, entities=True): + """return children entities of the same type as this entity. + + according to the `entities` parameter, return entity objects or the + equivalent result set + """ + res = self.entity.related(self.tree_relation, self.parent_role, + entities=entities) + eschema = self.entity.e_schema + if entities: + return [e for e in res if e.e_schema == eschema] + return res.filtered_rset(lambda x: x.e_schema is eschema, self.entity.cw_col) + + @implements_adapter_compat('ITree') + def is_leaf(self): + """returns true if this node as no child""" + return len(self.children()) == 0 + + @implements_adapter_compat('ITree') + def is_root(self): + """returns true if this node has no parent""" + return self.parent() is None + + @implements_adapter_compat('ITree') + def root(self): + """return the root object""" + return self._cw.entity_from_eid(self.path()[0]) + + @implements_adapter_compat('ITree') + def parent(self): + """return the parent entity if any, else None (e.g. if we are on the + root) + """ + try: + return self.entity.related(self.tree_relation, self.child_role, + entities=True)[0] + except (KeyError, IndexError): + return None + + @implements_adapter_compat('ITree') + def children(self, entities=True, sametype=False): + """return children entities + + according to the `entities` parameter, return entity objects or the + equivalent result set + """ + if sametype: + return self.same_type_children(entities) + else: + return self.entity.related(self.tree_relation, self.parent_role, + entities=entities) + + @implements_adapter_compat('ITree') + def iterparents(self, strict=True): + def _uptoroot(self): + curr = self + while True: + curr = curr.parent() + if curr is None: + break + yield curr + curr = curr.cw_adapt_to('ITree') + if not strict: + return chain([self.entity], _uptoroot(self)) + return _uptoroot(self) + + @implements_adapter_compat('ITree') + def iterchildren(self, _done=None): + """iterates over the item's children""" + if _done is None: + _done = set() + for child in self.children(): + if child.eid in _done: + self.error('loop in %s tree', child.__regid__.lower()) + continue + yield child + _done.add(child.eid) + + @implements_adapter_compat('ITree') + def prefixiter(self, _done=None): + if _done is None: + _done = set() + if self.entity.eid in _done: + return + _done.add(self.entity.eid) + yield self.entity + for child in self.same_type_children(): + for entity in child.cw_adapt_to('ITree').prefixiter(_done): + yield entity + + @cached + @implements_adapter_compat('ITree') + def path(self): + """returns the list of eids from the root object to this object""" + path = [] + adapter = self + entity = adapter.entity + while entity is not None: + if entity.eid in path: + self.error('loop in %s tree', entity.__regid__.lower()) + break + path.append(entity.eid) + try: + # check we are not jumping to another tree + if (adapter.tree_relation != self.tree_relation or + adapter.child_role != self.child_role): + break + entity = adapter.parent() + adapter = entity.cw_adapt_to('ITree') + except AttributeError: + break + path.reverse() + return path + + +class IProgressAdapter(EntityAdapter): + """something that has a cost, a state and a progression. + + You should at least override progress_info an in_progress methods on concret + implementations. + """ + __regid__ = 'IProgress' + __select__ = implements(IProgress) # XXX for bw compat, should be abstract + + @property + @implements_adapter_compat('IProgress') + def cost(self): + """the total cost""" + return self.progress_info()['estimated'] + + @property + @implements_adapter_compat('IProgress') + def revised_cost(self): + return self.progress_info().get('estimatedcorrected', self.cost) + + @property + @implements_adapter_compat('IProgress') + def done(self): + """what is already done""" + return self.progress_info()['done'] + + @property + @implements_adapter_compat('IProgress') + def todo(self): + """what remains to be done""" + return self.progress_info()['todo'] + + @implements_adapter_compat('IProgress') + def progress_info(self): + """returns a dictionary describing progress/estimated cost of the + version. + + - mandatory keys are (''estimated', 'done', 'todo') + + - optional keys are ('notestimated', 'notestimatedcorrected', + 'estimatedcorrected') + + 'noestimated' and 'notestimatedcorrected' should default to 0 + 'estimatedcorrected' should default to 'estimated' + """ + raise NotImplementedError + + @implements_adapter_compat('IProgress') + def finished(self): + """returns True if status is finished""" + return not self.in_progress() + + @implements_adapter_compat('IProgress') + def in_progress(self): + """returns True if status is not finished""" + raise NotImplementedError + + @implements_adapter_compat('IProgress') + def progress(self): + """returns the % progress of the task item""" + try: + return 100. * self.done / self.revised_cost + except ZeroDivisionError: + # total cost is 0 : if everything was estimated, task is completed + if self.progress_info().get('notestimated'): + return 0. + return 100 + + @implements_adapter_compat('IProgress') + def progress_class(self): + return '' + + +class IMileStoneAdapter(IProgressAdapter): + __regid__ = 'IMileStone' + __select__ = implements(IMileStone) # XXX for bw compat, should be abstract + + parent_type = None # specify main task's type + + @implements_adapter_compat('IMileStone') + def get_main_task(self): + """returns the main ITask entity""" + raise NotImplementedError + + @implements_adapter_compat('IMileStone') + def initial_prevision_date(self): + """returns the initial expected end of the milestone""" + raise NotImplementedError + + @implements_adapter_compat('IMileStone') + def eta_date(self): + """returns expected date of completion based on what remains + to be done + """ + raise NotImplementedError + + @implements_adapter_compat('IMileStone') + def completion_date(self): + """returns date on which the subtask has been completed""" + raise NotImplementedError + + @implements_adapter_compat('IMileStone') + def contractors(self): + """returns the list of persons supposed to work on this task""" + raise NotImplementedError diff -r b5640328ffad -r 9db65b381028 entities/authobjs.py --- a/entities/authobjs.py Thu Jul 01 09:23:39 2010 +0200 +++ b/entities/authobjs.py Thu Jul 01 17:06:37 2010 +0200 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""entity classes user and group entities +"""entity classes user and group entities""" -""" __docformat__ = "restructuredtext en" from logilab.common.decorators import cached diff -r b5640328ffad -r 9db65b381028 entities/lib.py --- a/entities/lib.py Thu Jul 01 09:23:39 2010 +0200 +++ b/entities/lib.py Thu Jul 01 17:06:37 2010 +0200 @@ -48,13 +48,13 @@ @property def email_of(self): - return self.reverse_use_email and self.reverse_use_email[0] + return self.reverse_use_email and self.reverse_use_email[0] or None @property def prefered(self): return self.prefered_form and self.prefered_form[0] or self - @deprecated('use .prefered') + @deprecated('[3.6] use .prefered') def canonical_form(self): return self.prefered_form and self.prefered_form[0] or self @@ -89,14 +89,6 @@ return self.display_address() return super(EmailAddress, self).printable_value(attr, value, attrtype, format) - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - if self.email_of: - return self.email_of.rest_path(), {} - return super(EmailAddress, self).after_deletion_path() - class Bookmark(AnyEntity): """customized class for Bookmark entities""" @@ -133,12 +125,6 @@ except UnknownProperty: return u'' - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - return 'view', {} - class CWCache(AnyEntity): """Cache""" diff -r b5640328ffad -r 9db65b381028 entities/schemaobjs.py --- a/entities/schemaobjs.py Thu Jul 01 09:23:39 2010 +0200 +++ b/entities/schemaobjs.py Thu Jul 01 17:06:37 2010 +0200 @@ -115,14 +115,6 @@ scard, self.relation_type[0].name, ocard, self.to_entity[0].name) - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - if self.relation_type: - return self.relation_type[0].rest_path(), {} - return super(CWRelation, self).after_deletion_path() - @property def rtype(self): return self.relation_type[0] @@ -139,6 +131,7 @@ rschema = self._cw.vreg.schema.rschema(self.rtype.name) return rschema.rdefs[(self.stype.name, self.otype.name)] + class CWAttribute(CWRelation): __regid__ = 'CWAttribute' @@ -160,14 +153,6 @@ def dc_title(self): return '%s(%s)' % (self.cstrtype[0].name, self.value or u'') - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - if self.reverse_constrained_by: - return self.reverse_constrained_by[0].rest_path(), {} - return super(CWConstraint, self).after_deletion_path() - @property def type(self): return self.cstrtype[0].name @@ -201,14 +186,6 @@ def check_expression(self, *args, **kwargs): return self._rqlexpr().check(*args, **kwargs) - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - if self.expression_of: - return self.expression_of.rest_path(), {} - return super(RQLExpression, self).after_deletion_path() - class CWPermission(AnyEntity): __regid__ = 'CWPermission' @@ -218,12 +195,3 @@ if self.label: return '%s (%s)' % (self._cw._(self.name), self.label) return self._cw._(self.name) - - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - permissionof = getattr(self, 'reverse_require_permission', ()) - if len(permissionof) == 1: - return permissionof[0].rest_path(), {} - return super(CWPermission, self).after_deletion_path() diff -r b5640328ffad -r 9db65b381028 entities/test/unittest_base.py --- a/entities/test/unittest_base.py Thu Jul 01 09:23:39 2010 +0200 +++ b/entities/test/unittest_base.py Thu Jul 01 17:06:37 2010 +0200 @@ -27,7 +27,7 @@ from cubicweb.devtools.testlib import CubicWebTC from cubicweb import ValidationError -from cubicweb.interfaces import IMileStone, IWorkflowable +from cubicweb.interfaces import IMileStone, ICalendarable from cubicweb.entities import AnyEntity @@ -106,7 +106,7 @@ def test_allowed_massmail_keys(self): e = self.execute('CWUser U WHERE U login "member"').get_entity(0, 0) # Bytes/Password attributes should be omited - self.assertEquals(e.allowed_massmail_keys(), + self.assertEquals(e.cw_adapt_to('IEmailable').allowed_massmail_keys(), set(('surname', 'firstname', 'login', 'last_login_time', 'creation_date', 'modification_date', 'cwuri', 'eid')) ) @@ -115,8 +115,9 @@ class InterfaceTC(CubicWebTC): def test_nonregr_subclasses_and_mixins_interfaces(self): + from cubicweb.entities.wfobjs import WorkflowableMixIn + WorkflowableMixIn.__implements__ = (ICalendarable,) CWUser = self.vreg['etypes'].etype_class('CWUser') - self.failUnless(implements(CWUser, IWorkflowable)) class MyUser(CWUser): __implements__ = (IMileStone,) self.vreg._loadedmods[__name__] = {} @@ -126,10 +127,10 @@ # a copy is done systematically self.failUnless(issubclass(MyUser_, MyUser)) self.failUnless(implements(MyUser_, IMileStone)) - self.failUnless(implements(MyUser_, IWorkflowable)) + self.failUnless(implements(MyUser_, ICalendarable)) # original class should not have beed modified, only the copy self.failUnless(implements(MyUser, IMileStone)) - self.failIf(implements(MyUser, IWorkflowable)) + self.failIf(implements(MyUser, ICalendarable)) class SpecializedEntityClassesTC(CubicWebTC): diff -r b5640328ffad -r 9db65b381028 entities/test/unittest_wfobjs.py --- a/entities/test/unittest_wfobjs.py Thu Jul 01 09:23:39 2010 +0200 +++ b/entities/test/unittest_wfobjs.py Thu Jul 01 17:06:37 2010 +0200 @@ -100,35 +100,38 @@ def test_workflow_base(self): e = self.create_user('toto') - self.assertEquals(e.state, 'activated') - e.change_state('deactivated', u'deactivate 1') + iworkflowable = e.cw_adapt_to('IWorkflowable') + self.assertEquals(iworkflowable.state, 'activated') + iworkflowable.change_state('deactivated', u'deactivate 1') self.commit() - e.change_state('activated', u'activate 1') + iworkflowable.change_state('activated', u'activate 1') self.commit() - e.change_state('deactivated', u'deactivate 2') + iworkflowable.change_state('deactivated', u'deactivate 2') self.commit() - e.clear_related_cache('wf_info_for', 'object') + e.cw_clear_relation_cache('wf_info_for', 'object') self.assertEquals([tr.comment for tr in e.reverse_wf_info_for], ['deactivate 1', 'activate 1', 'deactivate 2']) - self.assertEquals(e.latest_trinfo().comment, 'deactivate 2') + self.assertEquals(iworkflowable.latest_trinfo().comment, 'deactivate 2') def test_possible_transitions(self): user = self.execute('CWUser X').get_entity(0, 0) - trs = list(user.possible_transitions()) + iworkflowable = user.cw_adapt_to('IWorkflowable') + trs = list(iworkflowable.possible_transitions()) self.assertEquals(len(trs), 1) self.assertEquals(trs[0].name, u'deactivate') self.assertEquals(trs[0].destination(None).name, u'deactivated') # test a std user get no possible transition cnx = self.login('member') # fetch the entity using the new session - trs = list(cnx.user().possible_transitions()) + trs = list(cnx.user().cw_adapt_to('IWorkflowable').possible_transitions()) self.assertEquals(len(trs), 0) def _test_manager_deactivate(self, user): - user.clear_related_cache('in_state', 'subject') + iworkflowable = user.cw_adapt_to('IWorkflowable') + user.cw_clear_relation_cache('in_state', 'subject') self.assertEquals(len(user.in_state), 1) - self.assertEquals(user.state, 'deactivated') - trinfo = user.latest_trinfo() + self.assertEquals(iworkflowable.state, 'deactivated') + trinfo = iworkflowable.latest_trinfo() self.assertEquals(trinfo.previous_state.name, 'activated') self.assertEquals(trinfo.new_state.name, 'deactivated') self.assertEquals(trinfo.comment, 'deactivate user') @@ -137,7 +140,8 @@ def test_change_state(self): user = self.user() - user.change_state('deactivated', comment=u'deactivate user') + iworkflowable = user.cw_adapt_to('IWorkflowable') + iworkflowable.change_state('deactivated', comment=u'deactivate user') trinfo = self._test_manager_deactivate(user) self.assertEquals(trinfo.transition, None) @@ -154,33 +158,36 @@ def test_fire_transition(self): user = self.user() - user.fire_transition('deactivate', comment=u'deactivate user') + iworkflowable = user.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate', comment=u'deactivate user') user.clear_all_caches() - self.assertEquals(user.state, 'deactivated') + self.assertEquals(iworkflowable.state, 'deactivated') self._test_manager_deactivate(user) trinfo = self._test_manager_deactivate(user) self.assertEquals(trinfo.transition.name, 'deactivate') def test_goback_transition(self): - wf = self.session.user.current_workflow + wf = self.session.user.cw_adapt_to('IWorkflowable').current_workflow asleep = wf.add_state('asleep') - wf.add_transition('rest', (wf.state_by_name('activated'), wf.state_by_name('deactivated')), - asleep) + wf.add_transition('rest', (wf.state_by_name('activated'), + wf.state_by_name('deactivated')), + asleep) wf.add_transition('wake up', asleep) user = self.create_user('stduser') - user.fire_transition('rest') + iworkflowable = user.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('rest') self.commit() - user.fire_transition('wake up') + iworkflowable.fire_transition('wake up') self.commit() - self.assertEquals(user.state, 'activated') - user.fire_transition('deactivate') + self.assertEquals(iworkflowable.state, 'activated') + iworkflowable.fire_transition('deactivate') self.commit() - user.fire_transition('rest') + iworkflowable.fire_transition('rest') self.commit() - user.fire_transition('wake up') + iworkflowable.fire_transition('wake up') self.commit() user.clear_all_caches() - self.assertEquals(user.state, 'deactivated') + self.assertEquals(iworkflowable.state, 'deactivated') # XXX test managers can change state without matching transition @@ -189,18 +196,18 @@ self.create_user('tutu') cnx = self.login('tutu') req = self.request() - member = req.entity_from_eid(self.member.eid) + iworkflowable = req.entity_from_eid(self.member.eid).cw_adapt_to('IWorkflowable') ex = self.assertRaises(ValidationError, - member.fire_transition, 'deactivate') + iworkflowable.fire_transition, 'deactivate') self.assertEquals(ex.errors, {'by_transition-subject': "transition may not be fired"}) cnx.close() cnx = self.login('member') req = self.request() - member = req.entity_from_eid(self.member.eid) - member.fire_transition('deactivate') + iworkflowable = req.entity_from_eid(self.member.eid).cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') cnx.commit() ex = self.assertRaises(ValidationError, - member.fire_transition, 'activate') + iworkflowable.fire_transition, 'activate') self.assertEquals(ex.errors, {'by_transition-subject': "transition may not be fired"}) def test_fire_transition_owned_by(self): @@ -250,43 +257,44 @@ [(swfstate2, state2), (swfstate3, state3)]) self.assertEquals(swftr1.destination(None).eid, swfstate1.eid) # workflows built, begin test - self.group = self.request().create_entity('CWGroup', name=u'grp1') + group = self.request().create_entity('CWGroup', name=u'grp1') self.commit() - self.assertEquals(self.group.current_state.eid, state1.eid) - self.assertEquals(self.group.current_workflow.eid, mwf.eid) - self.assertEquals(self.group.main_workflow.eid, mwf.eid) - self.assertEquals(self.group.subworkflow_input_transition(), None) - self.group.fire_transition('swftr1', u'go') + iworkflowable = group.cw_adapt_to('IWorkflowable') + self.assertEquals(iworkflowable.current_state.eid, state1.eid) + self.assertEquals(iworkflowable.current_workflow.eid, mwf.eid) + self.assertEquals(iworkflowable.main_workflow.eid, mwf.eid) + self.assertEquals(iworkflowable.subworkflow_input_transition(), None) + iworkflowable.fire_transition('swftr1', u'go') self.commit() - self.group.clear_all_caches() - self.assertEquals(self.group.current_state.eid, swfstate1.eid) - self.assertEquals(self.group.current_workflow.eid, swf.eid) - self.assertEquals(self.group.main_workflow.eid, mwf.eid) - self.assertEquals(self.group.subworkflow_input_transition().eid, swftr1.eid) - self.group.fire_transition('tr1', u'go') + group.clear_all_caches() + self.assertEquals(iworkflowable.current_state.eid, swfstate1.eid) + self.assertEquals(iworkflowable.current_workflow.eid, swf.eid) + self.assertEquals(iworkflowable.main_workflow.eid, mwf.eid) + self.assertEquals(iworkflowable.subworkflow_input_transition().eid, swftr1.eid) + iworkflowable.fire_transition('tr1', u'go') self.commit() - self.group.clear_all_caches() - self.assertEquals(self.group.current_state.eid, state2.eid) - self.assertEquals(self.group.current_workflow.eid, mwf.eid) - self.assertEquals(self.group.main_workflow.eid, mwf.eid) - self.assertEquals(self.group.subworkflow_input_transition(), None) + group.clear_all_caches() + self.assertEquals(iworkflowable.current_state.eid, state2.eid) + self.assertEquals(iworkflowable.current_workflow.eid, mwf.eid) + self.assertEquals(iworkflowable.main_workflow.eid, mwf.eid) + self.assertEquals(iworkflowable.subworkflow_input_transition(), None) # force back to swfstate1 is impossible since we can't any more find # subworkflow input transition ex = self.assertRaises(ValidationError, - self.group.change_state, swfstate1, u'gadget') + iworkflowable.change_state, swfstate1, u'gadget') self.assertEquals(ex.errors, {'to_state-subject': "state doesn't belong to entity's workflow"}) self.rollback() # force back to state1 - self.group.change_state('state1', u'gadget') - self.group.fire_transition('swftr1', u'au') - self.group.clear_all_caches() - self.group.fire_transition('tr2', u'chapeau') + iworkflowable.change_state('state1', u'gadget') + iworkflowable.fire_transition('swftr1', u'au') + group.clear_all_caches() + iworkflowable.fire_transition('tr2', u'chapeau') self.commit() - self.group.clear_all_caches() - self.assertEquals(self.group.current_state.eid, state3.eid) - self.assertEquals(self.group.current_workflow.eid, mwf.eid) - self.assertEquals(self.group.main_workflow.eid, mwf.eid) - self.assertListEquals(parse_hist(self.group.workflow_history), + group.clear_all_caches() + self.assertEquals(iworkflowable.current_state.eid, state3.eid) + self.assertEquals(iworkflowable.current_workflow.eid, mwf.eid) + self.assertEquals(iworkflowable.main_workflow.eid, mwf.eid) + self.assertListEquals(parse_hist(iworkflowable.workflow_history), [('state1', 'swfstate1', 'swftr1', 'go'), ('swfstate1', 'swfstate2', 'tr1', 'go'), ('swfstate2', 'state2', 'swftr1', 'exiting from subworkflow subworkflow'), @@ -337,8 +345,9 @@ self.commit() group = self.request().create_entity('CWGroup', name=u'grp1') self.commit() + iworkflowable = group.cw_adapt_to('IWorkflowable') for trans in ('identify', 'release', 'close'): - group.fire_transition(trans) + iworkflowable.fire_transition(trans) self.commit() @@ -362,6 +371,7 @@ self.commit() group = self.request().create_entity('CWGroup', name=u'grp1') self.commit() + iworkflowable = group.cw_adapt_to('IWorkflowable') for trans, nextstate in (('identify', 'xsigning'), ('xabort', 'created'), ('identify', 'xsigning'), @@ -369,10 +379,10 @@ ('release', 'xsigning'), ('xabort', 'identified') ): - group.fire_transition(trans) + iworkflowable.fire_transition(trans) self.commit() group.clear_all_caches() - self.assertEquals(group.state, nextstate) + self.assertEquals(iworkflowable.state, nextstate) class CustomWorkflowTC(CubicWebTC): @@ -389,35 +399,38 @@ self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', {'wf': wf.eid, 'x': self.member.eid}) self.member.clear_all_caches() - self.assertEquals(self.member.state, 'activated')# no change before commit + iworkflowable = self.member.cw_adapt_to('IWorkflowable') + self.assertEquals(iworkflowable.state, 'activated')# no change before commit self.commit() self.member.clear_all_caches() - self.assertEquals(self.member.current_workflow.eid, wf.eid) - self.assertEquals(self.member.state, 'asleep') - self.assertEquals(self.member.workflow_history, ()) + self.assertEquals(iworkflowable.current_workflow.eid, wf.eid) + self.assertEquals(iworkflowable.state, 'asleep') + self.assertEquals(iworkflowable.workflow_history, ()) def test_custom_wf_replace_state_keep_history(self): """member in inital state with some history, state is redirected and state change is recorded to history """ - self.member.fire_transition('deactivate') - self.member.fire_transition('activate') + iworkflowable = self.member.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') + iworkflowable.fire_transition('activate') wf = add_wf(self, 'CWUser') wf.add_state('asleep', initial=True) self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', {'wf': wf.eid, 'x': self.member.eid}) self.commit() self.member.clear_all_caches() - self.assertEquals(self.member.current_workflow.eid, wf.eid) - self.assertEquals(self.member.state, 'asleep') - self.assertEquals(parse_hist(self.member.workflow_history), + self.assertEquals(iworkflowable.current_workflow.eid, wf.eid) + self.assertEquals(iworkflowable.state, 'asleep') + self.assertEquals(parse_hist(iworkflowable.workflow_history), [('activated', 'deactivated', 'deactivate', None), ('deactivated', 'activated', 'activate', None), ('activated', 'asleep', None, 'workflow changed to "CWUser"')]) def test_custom_wf_no_initial_state(self): """try to set a custom workflow which has no initial state""" - self.member.fire_transition('deactivate') + iworkflowable = self.member.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') wf = add_wf(self, 'CWUser') wf.add_state('asleep') self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', @@ -438,7 +451,8 @@ """member in some state shared by the new workflow, nothing has to be done """ - self.member.fire_transition('deactivate') + iworkflowable = self.member.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') wf = add_wf(self, 'CWUser') wf.add_state('asleep', initial=True) self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', @@ -447,12 +461,12 @@ self.execute('DELETE X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', {'wf': wf.eid, 'x': self.member.eid}) self.member.clear_all_caches() - self.assertEquals(self.member.state, 'asleep')# no change before commit + self.assertEquals(iworkflowable.state, 'asleep')# no change before commit self.commit() self.member.clear_all_caches() - self.assertEquals(self.member.current_workflow.name, "default user workflow") - self.assertEquals(self.member.state, 'activated') - self.assertEquals(parse_hist(self.member.workflow_history), + self.assertEquals(iworkflowable.current_workflow.name, "default user workflow") + self.assertEquals(iworkflowable.state, 'activated') + self.assertEquals(parse_hist(iworkflowable.workflow_history), [('activated', 'deactivated', 'deactivate', None), ('deactivated', 'asleep', None, 'workflow changed to "CWUser"'), ('asleep', 'activated', None, 'workflow changed to "default user workflow"'),]) @@ -473,28 +487,29 @@ def test_auto_transition_fired(self): wf = self.setup_custom_wf() user = self.create_user('member') + iworkflowable = user.cw_adapt_to('IWorkflowable') self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', {'wf': wf.eid, 'x': user.eid}) self.commit() user.clear_all_caches() - self.assertEquals(user.state, 'asleep') - self.assertEquals([t.name for t in user.possible_transitions()], + self.assertEquals(iworkflowable.state, 'asleep') + self.assertEquals([t.name for t in iworkflowable.possible_transitions()], ['rest']) - user.fire_transition('rest') + iworkflowable.fire_transition('rest') self.commit() user.clear_all_caches() - self.assertEquals(user.state, 'asleep') - self.assertEquals([t.name for t in user.possible_transitions()], + self.assertEquals(iworkflowable.state, 'asleep') + self.assertEquals([t.name for t in iworkflowable.possible_transitions()], ['rest']) - self.assertEquals(parse_hist(user.workflow_history), + self.assertEquals(parse_hist(iworkflowable.workflow_history), [('asleep', 'asleep', 'rest', None)]) user.set_attributes(surname=u'toto') # fulfill condition self.commit() - user.fire_transition('rest') + iworkflowable.fire_transition('rest') self.commit() user.clear_all_caches() - self.assertEquals(user.state, 'dead') - self.assertEquals(parse_hist(user.workflow_history), + self.assertEquals(iworkflowable.state, 'dead') + self.assertEquals(parse_hist(iworkflowable.workflow_history), [('asleep', 'asleep', 'rest', None), ('asleep', 'asleep', 'rest', None), ('asleep', 'dead', 'sick', None),]) @@ -505,7 +520,8 @@ self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s', {'wf': wf.eid, 'x': user.eid}) self.commit() - self.assertEquals(user.state, 'dead') + iworkflowable = user.cw_adapt_to('IWorkflowable') + self.assertEquals(iworkflowable.state, 'dead') def test_auto_transition_initial_state_fired(self): wf = self.execute('Any WF WHERE ET default_workflow WF, ' @@ -517,14 +533,15 @@ self.commit() user = self.create_user('member', surname=u'toto') self.commit() - self.assertEquals(user.state, 'dead') + iworkflowable = user.cw_adapt_to('IWorkflowable') + self.assertEquals(iworkflowable.state, 'dead') class WorkflowHooksTC(CubicWebTC): def setUp(self): CubicWebTC.setUp(self) - self.wf = self.session.user.current_workflow + self.wf = self.session.user.cw_adapt_to('IWorkflowable').current_workflow self.session.set_pool() self.s_activated = self.wf.state_by_name('activated').eid self.s_deactivated = self.wf.state_by_name('deactivated').eid @@ -572,8 +589,9 @@ def test_transition_checking1(self): cnx = self.login('stduser') user = cnx.user(self.session) + iworkflowable = user.cw_adapt_to('IWorkflowable') ex = self.assertRaises(ValidationError, - user.fire_transition, 'activate') + iworkflowable.fire_transition, 'activate') self.assertEquals(self._cleanup_msg(ex.errors['by_transition-subject']), u"transition isn't allowed from") cnx.close() @@ -581,8 +599,9 @@ def test_transition_checking2(self): cnx = self.login('stduser') user = cnx.user(self.session) + iworkflowable = user.cw_adapt_to('IWorkflowable') ex = self.assertRaises(ValidationError, - user.fire_transition, 'dummy') + iworkflowable.fire_transition, 'dummy') self.assertEquals(self._cleanup_msg(ex.errors['by_transition-subject']), u"transition isn't allowed from") cnx.close() @@ -591,15 +610,16 @@ cnx = self.login('stduser') session = self.session user = cnx.user(session) - user.fire_transition('deactivate') + iworkflowable = user.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') cnx.commit() session.set_pool() ex = self.assertRaises(ValidationError, - user.fire_transition, 'deactivate') + iworkflowable.fire_transition, 'deactivate') self.assertEquals(self._cleanup_msg(ex.errors['by_transition-subject']), u"transition isn't allowed from") # get back now - user.fire_transition('activate') + iworkflowable.fire_transition('activate') cnx.commit() cnx.close() diff -r b5640328ffad -r 9db65b381028 entities/wfobjs.py --- a/entities/wfobjs.py Thu Jul 01 09:23:39 2010 +0200 +++ b/entities/wfobjs.py Thu Jul 01 17:06:37 2010 +0200 @@ -15,9 +15,13 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""workflow definition and history related entities +"""workflow handling: +* entity types defining workflow (Workflow, State, Transition...) +* workflow history (TrInfo) +* adapter for workflowable entities (IWorkflowableAdapter) """ + __docformat__ = "restructuredtext en" from warnings import warn @@ -27,7 +31,8 @@ from logilab.common.compat import any from cubicweb.entities import AnyEntity, fetch_config -from cubicweb.interfaces import IWorkflowable +from cubicweb.view import EntityAdapter +from cubicweb.selectors import relation_possible from cubicweb.mixins import MI_REL_TRIGGERS class WorkflowException(Exception): pass @@ -47,15 +52,6 @@ return any(et for et in self.reverse_default_workflow if et.name == etype) - # XXX define parent() instead? what if workflow of multiple types? - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - if self.workflow_of: - return self.workflow_of[0].rest_path(), {'vid': 'workflow'} - return super(Workflow, self).after_deletion_path() - def iter_workflows(self, _done=None): """return an iterator on actual workflows, eg this workflow and its subworkflows @@ -177,7 +173,7 @@ {'os': todelstate.eid, 'ns': replacement.eid}) execute('SET X to_state NS WHERE X to_state OS, OS eid %(os)s, NS eid %(ns)s', {'os': todelstate.eid, 'ns': replacement.eid}) - todelstate.delete() + todelstate.cw_delete() class BaseTransition(AnyEntity): @@ -226,14 +222,6 @@ return False return True - def after_deletion_path(self): - """return (path, parameters) which should be used as redirect - information when this entity is being deleted - """ - if self.transition_of: - return self.transition_of[0].rest_path(), {} - return super(BaseTransition, self).after_deletion_path() - def set_permissions(self, requiredgroups=(), conditions=(), reset=True): """set or add (if `reset` is False) groups and conditions for this transition @@ -277,7 +265,7 @@ try: return self.destination_state[0] except IndexError: - return entity.latest_trinfo().previous_state + return entity.cw_adapt_to('IWorkflowable').latest_trinfo().previous_state def potential_destinations(self): try: @@ -288,9 +276,6 @@ for previousstate in tr.reverse_allowed_transition: yield previousstate - def parent(self): - return self.workflow - class WorkflowTransition(BaseTransition): """customized class for WorkflowTransition entities""" @@ -331,7 +316,7 @@ return None if tostateeid is None: # go back to state from which we've entered the subworkflow - return entity.subworkflow_input_trinfo().previous_state + return entity.cw_adapt_to('IWorkflowable').subworkflow_input_trinfo().previous_state return self._cw.entity_from_eid(tostateeid) @cached @@ -358,9 +343,6 @@ def destination(self): return self.destination_state and self.destination_state[0] or None - def parent(self): - return self.reverse_subworkflow_exit[0] - class State(AnyEntity): """customized class for State entities""" @@ -371,10 +353,7 @@ @property def workflow(self): # take care, may be missing in multi-sources configuration - return self.state_of and self.state_of[0] - - def parent(self): - return self.workflow + return self.state_of and self.state_of[0] or None class TrInfo(AnyEntity): @@ -399,22 +378,99 @@ def transition(self): return self.by_transition and self.by_transition[0] or None - def parent(self): - return self.for_entity - class WorkflowableMixIn(object): """base mixin providing workflow helper methods for workflowable entities. This mixin will be automatically set on class supporting the 'in_state' relation (which implies supporting 'wf_info_for' as well) """ - __implements__ = (IWorkflowable,) + + @property + @deprecated('[3.5] use printable_state') + def displayable_state(self): + return self._cw._(self.state) + @property + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').main_workflow") + def main_workflow(self): + return self.cw_adapt_to('IWorkflowable').main_workflow + @property + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').current_workflow") + def current_workflow(self): + return self.cw_adapt_to('IWorkflowable').current_workflow + @property + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').current_state") + def current_state(self): + return self.cw_adapt_to('IWorkflowable').current_state + @property + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').state") + def state(self): + return self.cw_adapt_to('IWorkflowable').state + @property + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').printable_state") + def printable_state(self): + return self.cw_adapt_to('IWorkflowable').printable_state + @property + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').workflow_history") + def workflow_history(self): + return self.cw_adapt_to('IWorkflowable').workflow_history + + @deprecated('[3.5] get transition from current workflow and use its may_be_fired method') + def can_pass_transition(self, trname): + """return the Transition instance if the current user can fire the + transition with the given name, else None + """ + tr = self.current_workflow and self.current_workflow.transition_by_name(trname) + if tr and tr.may_be_fired(self.eid): + return tr + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').cwetype_workflow()") + def cwetype_workflow(self): + return self.cw_adapt_to('IWorkflowable').main_workflow() + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').latest_trinfo()") + def latest_trinfo(self): + return self.cw_adapt_to('IWorkflowable').latest_trinfo() + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').possible_transitions()") + def possible_transitions(self, type='normal'): + return self.cw_adapt_to('IWorkflowable').possible_transitions(type) + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').fire_transition()") + def fire_transition(self, tr, comment=None, commentformat=None): + return self.cw_adapt_to('IWorkflowable').fire_transition(tr, comment, commentformat) + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').change_state()") + def change_state(self, statename, comment=None, commentformat=None, tr=None): + return self.cw_adapt_to('IWorkflowable').change_state(statename, comment, commentformat, tr) + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').subworkflow_input_trinfo()") + def subworkflow_input_trinfo(self): + return self.cw_adapt_to('IWorkflowable').subworkflow_input_trinfo() + @deprecated("[3.9] use entity.cw_adapt_to('IWorkflowable').subworkflow_input_transition()") + def subworkflow_input_transition(self): + return self.cw_adapt_to('IWorkflowable').subworkflow_input_transition() + + +MI_REL_TRIGGERS[('in_state', 'subject')] = WorkflowableMixIn + + + +class IWorkflowableAdapter(WorkflowableMixIn, EntityAdapter): + """base adapter providing workflow helper methods for workflowable entities. + """ + __regid__ = 'IWorkflowable' + __select__ = relation_possible('in_state') + + @cached + def cwetype_workflow(self): + """return the default workflow for entities of this type""" + # XXX CWEType method + wfrset = self._cw.execute('Any WF WHERE ET default_workflow WF, ' + 'ET name %(et)s', {'et': self.entity.__regid__}) + if wfrset: + return wfrset.get_entity(0, 0) + self.warning("can't find any workflow for %s", self.entity.__regid__) + return None @property def main_workflow(self): """return current workflow applied to this entity""" - if self.custom_workflow: - return self.custom_workflow[0] + if self.entity.custom_workflow: + return self.entity.custom_workflow[0] return self.cwetype_workflow() @property @@ -425,14 +481,14 @@ @property def current_state(self): """return current state entity""" - return self.in_state and self.in_state[0] or None + return self.entity.in_state and self.entity.in_state[0] or None @property def state(self): """return current state name""" try: - return self.in_state[0].name - except IndexError: + return self.current_state.name + except AttributeError: self.warning('entity %s has no state', self) return None @@ -449,26 +505,15 @@ """return the workflow history for this entity (eg ordered list of TrInfo entities) """ - return self.reverse_wf_info_for + return self.entity.reverse_wf_info_for def latest_trinfo(self): """return the latest transition information for this entity""" try: - return self.reverse_wf_info_for[-1] + return self.workflow_history[-1] except IndexError: return None - @cached - def cwetype_workflow(self): - """return the default workflow for entities of this type""" - # XXX CWEType method - wfrset = self._cw.execute('Any WF WHERE ET default_workflow WF, ' - 'ET name %(et)s', {'et': self.__regid__}) - if wfrset: - return wfrset.get_entity(0, 0) - self.warning("can't find any workflow for %s", self.__regid__) - return None - def possible_transitions(self, type='normal'): """generates transition that MAY be fired for the given entity, expected to be in this state @@ -483,16 +528,44 @@ {'x': self.current_state.eid, 'type': type, 'wfeid': self.current_workflow.eid}) for tr in rset.entities(): - if tr.may_be_fired(self.eid): + if tr.may_be_fired(self.entity.eid): yield tr + def subworkflow_input_trinfo(self): + """return the TrInfo which has be recorded when this entity went into + the current sub-workflow + """ + if self.main_workflow.eid == self.current_workflow.eid: + return # doesn't make sense + subwfentries = [] + for trinfo in self.workflow_history: + if (trinfo.transition and + trinfo.previous_state.workflow.eid != trinfo.new_state.workflow.eid): + # entering or leaving a subworkflow + if (subwfentries and + subwfentries[-1].new_state.workflow.eid == trinfo.previous_state.workflow.eid and + subwfentries[-1].previous_state.workflow.eid == trinfo.new_state.workflow.eid): + # leave + del subwfentries[-1] + else: + # enter + subwfentries.append(trinfo) + if not subwfentries: + return None + return subwfentries[-1] + + def subworkflow_input_transition(self): + """return the transition which has went through the current sub-workflow + """ + return getattr(self.subworkflow_input_trinfo(), 'transition', None) + def _add_trinfo(self, comment, commentformat, treid=None, tseid=None): kwargs = {} if comment is not None: kwargs['comment'] = comment if commentformat is not None: kwargs['comment_format'] = commentformat - kwargs['wf_info_for'] = self + kwargs['wf_info_for'] = self.entity if treid is not None: kwargs['by_transition'] = self._cw.entity_from_eid(treid) if tseid is not None: @@ -532,51 +605,3 @@ stateeid = state.eid # XXX try to find matching transition? return self._add_trinfo(comment, commentformat, tr and tr.eid, stateeid) - - def subworkflow_input_trinfo(self): - """return the TrInfo which has be recorded when this entity went into - the current sub-workflow - """ - if self.main_workflow.eid == self.current_workflow.eid: - return # doesn't make sense - subwfentries = [] - for trinfo in self.workflow_history: - if (trinfo.transition and - trinfo.previous_state.workflow.eid != trinfo.new_state.workflow.eid): - # entering or leaving a subworkflow - if (subwfentries and - subwfentries[-1].new_state.workflow.eid == trinfo.previous_state.workflow.eid and - subwfentries[-1].previous_state.workflow.eid == trinfo.new_state.workflow.eid): - # leave - del subwfentries[-1] - else: - # enter - subwfentries.append(trinfo) - if not subwfentries: - return None - return subwfentries[-1] - - def subworkflow_input_transition(self): - """return the transition which has went through the current sub-workflow - """ - return getattr(self.subworkflow_input_trinfo(), 'transition', None) - - def clear_all_caches(self): - super(WorkflowableMixIn, self).clear_all_caches() - clear_cache(self, 'cwetype_workflow') - - @deprecated('[3.5] get transition from current workflow and use its may_be_fired method') - def can_pass_transition(self, trname): - """return the Transition instance if the current user can fire the - transition with the given name, else None - """ - tr = self.current_workflow and self.current_workflow.transition_by_name(trname) - if tr and tr.may_be_fired(self.eid): - return tr - - @property - @deprecated('[3.5] use printable_state') - def displayable_state(self): - return self._cw._(self.state) - -MI_REL_TRIGGERS[('in_state', 'subject')] = WorkflowableMixIn diff -r b5640328ffad -r 9db65b381028 entity.py --- a/entity.py Thu Jul 01 09:23:39 2010 +0200 +++ b/entity.py Thu Jul 01 17:06:37 2010 +0200 @@ -19,11 +19,12 @@ __docformat__ = "restructuredtext en" +from copy import copy from warnings import warn from logilab.common import interface -from logilab.common.compat import all from logilab.common.decorators import cached +from logilab.common.deprecation import deprecated from logilab.mtconverter import TransformData, TransformError, xml_escape from rql.utils import rqlvar_maker @@ -51,7 +52,7 @@ return '1' -class Entity(AppObject, dict): +class Entity(AppObject): """an entity instance has e_schema automagically set on the class and instances has access to their issuing cursor. @@ -106,10 +107,10 @@ if not interface.implements(cls, iface): interface.extend(cls, iface) if role == 'subject': - setattr(cls, rschema.type, SubjectRelation(rschema)) + attr = rschema.type else: attr = 'reverse_%s' % rschema.type - setattr(cls, attr, ObjectRelation(rschema)) + setattr(cls, attr, Relation(rschema, role)) if mixins: # see etype class instantation in cwvreg.ETypeRegistry.etype_class method: # due to class dumping, cls is the generated top level class with actual @@ -124,6 +125,24 @@ cls.__bases__ = tuple(mixins) cls.info('plugged %s mixins on %s', mixins, cls) + fetch_attrs = ('modification_date',) + @classmethod + def fetch_order(cls, attr, var): + """class method used to control sort order when multiple entities of + this type are fetched + """ + return cls.fetch_unrelated_order(attr, var) + + @classmethod + def fetch_unrelated_order(cls, attr, var): + """class method used to control sort order when multiple entities of + this type are fetched to use in edition (eg propose them to create a + new relation on an edited entity). + """ + if attr == 'modification_date': + return '%s DESC' % var + return None + @classmethod def fetch_rql(cls, user, restriction=None, fetchattrs=None, mainvar='X', settype=True, ordermethod='fetch_order'): @@ -269,17 +288,17 @@ def __init__(self, req, rset=None, row=None, col=0): AppObject.__init__(self, req, rset=rset, row=row, col=col) - dict.__init__(self) - self._related_cache = {} + self._cw_related_cache = {} if rset is not None: self.eid = rset[row][col] else: self.eid = None - self._is_saved = True + self._cw_is_saved = True + self.cw_attr_cache = {} def __repr__(self): return '' % ( - self.e_schema, self.eid, self.keys(), id(self)) + self.e_schema, self.eid, self.cw_attr_cache.keys(), id(self)) def __json_encode__(self): """custom json dumps hook to dump the entity's eid @@ -298,12 +317,18 @@ def __cmp__(self, other): raise NotImplementedError('comparison not implemented for %s' % self.__class__) + def __contains__(self, key): + return key in self.cw_attr_cache + + def __iter__(self): + return iter(self.cw_attr_cache) + def __getitem__(self, key): if key == 'eid': warn('[3.7] entity["eid"] is deprecated, use entity.eid instead', DeprecationWarning, stacklevel=2) return self.eid - return super(Entity, self).__getitem__(key) + return self.cw_attr_cache[key] def __setitem__(self, attr, value): """override __setitem__ to update self.edited_attributes. @@ -321,13 +346,13 @@ DeprecationWarning, stacklevel=2) self.eid = value else: - super(Entity, self).__setitem__(attr, value) + self.cw_attr_cache[attr] = value # don't add attribute into skip_security if already in edited # attributes, else we may accidentaly skip a desired security check if hasattr(self, 'edited_attributes') and \ attr not in self.edited_attributes: self.edited_attributes.add(attr) - self.skip_security_attributes.add(attr) + self._cw_skip_security_attributes.add(attr) def __delitem__(self, attr): """override __delitem__ to update self.edited_attributes on cleanup of @@ -345,28 +370,34 @@ del self.entity['load_left'] """ - super(Entity, self).__delitem__(attr) + del self.cw_attr_cache[attr] if hasattr(self, 'edited_attributes'): self.edited_attributes.remove(attr) + def clear(self): + self.cw_attr_cache.clear() + + def get(self, key, default=None): + return self.cw_attr_cache.get(key, default) + def setdefault(self, attr, default): """override setdefault to update self.edited_attributes""" - super(Entity, self).setdefault(attr, default) + self.cw_attr_cache.setdefault(attr, default) # don't add attribute into skip_security if already in edited # attributes, else we may accidentaly skip a desired security check if hasattr(self, 'edited_attributes') and \ attr not in self.edited_attributes: self.edited_attributes.add(attr) - self.skip_security_attributes.add(attr) + self._cw_skip_security_attributes.add(attr) def pop(self, attr, default=_marker): """override pop to update self.edited_attributes on cleanup of undesired changes introduced in the entity's dict. See `__delitem__` """ if default is _marker: - value = super(Entity, self).pop(attr) + value = self.cw_attr_cache.pop(attr) else: - value = super(Entity, self).pop(attr, default) + value = self.cw_attr_cache.pop(attr, default) if hasattr(self, 'edited_attributes') and attr in self.edited_attributes: self.edited_attributes.remove(attr) return value @@ -377,27 +408,24 @@ for attr, value in values.items(): self[attr] = value # use self.__setitem__ implementation - def rql_set_value(self, attr, value): - """call by rql execution plan when some attribute is modified - - don't use dict api in such case since we don't want attribute to be - added to skip_security_attributes. - """ - super(Entity, self).__setitem__(attr, value) + def cw_adapt_to(self, interface): + """return an adapter the entity to the given interface name. - def pre_add_hook(self): - """hook called by the repository before doing anything to add the entity - (before_add entity hooks have not been called yet). This give the - occasion to do weird stuff such as autocast (File -> Image for instance). - - This method must return the actual entity to be added. + return None if it can not be adapted. """ - return self + try: + cache = self._cw_adapters_cache + except AttributeError: + self._cw_adapters_cache = cache = {} + try: + return cache[interface] + except KeyError: + adapter = self._cw.vreg['adapters'].select_or_none( + interface, self._cw, entity=self) + cache[interface] = adapter + return adapter - def set_eid(self, eid): - self.eid = eid - - def has_eid(self): + def has_eid(self): # XXX cw_has_eid """return True if the entity has an attributed eid (False meaning that the entity has to be created """ @@ -407,38 +435,34 @@ except (ValueError, TypeError): return False - def is_saved(self): + def cw_is_saved(self): """during entity creation, there is some time during which the entity - has an eid attributed though it's not saved (eg during before_add_entity - hooks). You can use this method to ensure the entity has an eid *and* is - saved in its source. + has an eid attributed though it's not saved (eg during + 'before_add_entity' hooks). You can use this method to ensure the entity + has an eid *and* is saved in its source. """ - return self.has_eid() and self._is_saved + return self.has_eid() and self._cw_is_saved @cached - def metainformation(self): + def cw_metainformation(self): res = dict(zip(('type', 'source', 'extid'), self._cw.describe(self.eid))) res['source'] = self._cw.source_defs()[res['source']] return res - def clear_local_perm_cache(self, action): - for rqlexpr in self.e_schema.get_rqlexprs(action): - self._cw.local_perm_cache.pop((rqlexpr.eid, (('x', self.eid),)), None) - - def check_perm(self, action): + def cw_check_perm(self, action): self.e_schema.check_perm(self._cw, action, eid=self.eid) - def has_perm(self, action): + def cw_has_perm(self, action): return self.e_schema.has_perm(self._cw, action, eid=self.eid) - def view(self, __vid, __registry='views', w=None, **kwargs): + def view(self, __vid, __registry='views', w=None, **kwargs): # XXX cw_view """shortcut to apply a view on this entity""" view = self._cw.vreg[__registry].select(__vid, self._cw, rset=self.cw_rset, row=self.cw_row, col=self.cw_col, **kwargs) return view.render(row=self.cw_row, col=self.cw_col, w=w, **kwargs) - def absolute_url(self, *args, **kwargs): + def absolute_url(self, *args, **kwargs): # XXX cw_url """return an absolute url to view this entity""" # use *args since we don't want first argument to be "anonymous" to # avoid potential clash with kwargs @@ -451,7 +475,7 @@ # the object for use in the relation is tricky # XXX search_state is web specific if getattr(self._cw, 'search_state', ('normal',))[0] == 'normal': - kwargs['base_url'] = self.metainformation()['source'].get('base-url') + kwargs['base_url'] = self.cw_metainformation()['source'].get('base-url') if method in (None, 'view'): try: kwargs['_restpath'] = self.rest_path(kwargs.get('base_url')) @@ -463,7 +487,7 @@ kwargs['rql'] = 'Any X WHERE X eid %s' % self.eid return self._cw.build_url(method, **kwargs) - def rest_path(self, use_ext_eid=False): + def rest_path(self, use_ext_eid=False): # XXX cw_rest_path """returns a REST-like (relative) path for this entity""" mainattr, needcheck = self._rest_attr_info() etype = str(self.e_schema) @@ -486,12 +510,12 @@ path += '/eid' if mainattr == 'eid': if use_ext_eid: - value = self.metainformation()['extid'] + value = self.cw_metainformation()['extid'] else: value = self.eid return '%s/%s' % (path, self._cw.url_quote(value)) - def attr_metadata(self, attr, metadata): + def cw_attr_metadata(self, attr, metadata): """return a metadata for an attribute (None if unspecified)""" value = getattr(self, '%s_%s' % (attr, metadata), None) if value is None and metadata == 'encoding': @@ -499,7 +523,7 @@ return value def printable_value(self, attr, value=_marker, attrtype=None, - format='text/html', displaytime=True): + format='text/html', displaytime=True): # XXX cw_printable_value """return a displayable value (i.e. unicode string) which may contains html tags """ @@ -518,16 +542,16 @@ # description... if props.internationalizable: value = self._cw._(value) - attrformat = self.attr_metadata(attr, 'format') + attrformat = self.cw_attr_metadata(attr, 'format') if attrformat: - return self.mtc_transform(value, attrformat, format, - self._cw.encoding) + return self._cw_mtc_transform(value, attrformat, format, + self._cw.encoding) elif attrtype == 'Bytes': - attrformat = self.attr_metadata(attr, 'format') + attrformat = self.cw_attr_metadata(attr, 'format') if attrformat: - encoding = self.attr_metadata(attr, 'encoding') - return self.mtc_transform(value.getvalue(), attrformat, format, - encoding) + encoding = self.cw_attr_metadata(attr, 'encoding') + return self._cw_mtc_transform(value.getvalue(), attrformat, format, + encoding) return u'' value = printable_value(self._cw, attrtype, value, props, displaytime=displaytime) @@ -535,8 +559,8 @@ value = xml_escape(value) return value - def mtc_transform(self, data, format, target_format, encoding, - _engine=ENGINE): + def _cw_mtc_transform(self, data, format, target_format, encoding, + _engine=ENGINE): trdata = TransformData(data, format, encoding, appobject=self) data = _engine.convert(trdata, target_format).decode() if format == 'text/html': @@ -545,7 +569,13 @@ # entity cloning ########################################################## - def copy_relations(self, ceid): + def cw_copy(self): + thecopy = copy(self) + thecopy.cw_attr_cache = copy(self.cw_attr_cache) + thecopy._cw_related_cache = {} + return thecopy + + def copy_relations(self, ceid): # XXX cw_copy_relations """copy relations of the object with the given eid on this object (this method is called on the newly created copy, and ceid designates the original entity). @@ -574,7 +604,7 @@ rql = 'SET X %s V WHERE X eid %%(x)s, Y eid %%(y)s, Y %s V' % ( rschema.type, rschema.type) execute(rql, {'x': self.eid, 'y': ceid}) - self.clear_related_cache(rschema.type, 'subject') + self.cw_clear_relation_cache(rschema.type, 'subject') for rschema in self.e_schema.object_relations(): if rschema.meta: continue @@ -592,36 +622,32 @@ rql = 'SET V %s X WHERE X eid %%(x)s, Y eid %%(y)s, V %s Y' % ( rschema.type, rschema.type) execute(rql, {'x': self.eid, 'y': ceid}) - self.clear_related_cache(rschema.type, 'object') + self.cw_clear_relation_cache(rschema.type, 'object') # data fetching methods ################################################### @cached - def as_rset(self): + def as_rset(self): # XXX .cw_as_rset """returns a resultset containing `self` information""" rset = ResultSet([(self.eid,)], 'Any X WHERE X eid %(x)s', {'x': self.eid}, [(self.__regid__,)]) rset.req = self._cw return rset - def to_complete_relations(self): + def _cw_to_complete_relations(self): """by default complete final relations to when calling .complete()""" for rschema in self.e_schema.subject_relations(): if rschema.final: continue targets = rschema.objects(self.e_schema) - if len(targets) > 1: - # ambigous relations, the querier doesn't handle - # outer join correctly in this case - continue if rschema.inlined: matching_groups = self._cw.user.matching_groups - rdef = rschema.rdef(self.e_schema, targets[0]) - if matching_groups(rdef.get_groups('read')) and \ - all(matching_groups(e.get_groups('read')) for e in targets): + if all(matching_groups(e.get_groups('read')) and + rschema.rdef(self.e_schema, e).get_groups('read') + for e in targets): yield rschema, 'subject' - def to_complete_attributes(self, skip_bytes=True, skip_pwd=True): + def _cw_to_complete_attributes(self, skip_bytes=True, skip_pwd=True): for rschema, attrschema in self.e_schema.attribute_definitions(): # skip binary data by default if skip_bytes and attrschema.type == 'Bytes': @@ -638,7 +664,7 @@ yield attr _cw_completed = False - def complete(self, attributes=None, skip_bytes=True, skip_pwd=True): + def complete(self, attributes=None, skip_bytes=True, skip_pwd=True): # XXX cw_complete """complete this entity by adding missing attributes (i.e. query the repository to fill the entity) @@ -655,9 +681,9 @@ V = varmaker.next() rql = ['WHERE %s eid %%(x)s' % V] selected = [] - for attr in (attributes or self.to_complete_attributes(skip_bytes, skip_pwd)): + for attr in (attributes or self._cw_to_complete_attributes(skip_bytes, skip_pwd)): # if attribute already in entity, nothing to do - if self.has_key(attr): + if self.cw_attr_cache.has_key(attr): continue # case where attribute must be completed, but is not yet in entity var = varmaker.next() @@ -667,26 +693,20 @@ lastattr = len(selected) + 1 if attributes is None: # fetch additional relations (restricted to 0..1 relations) - for rschema, role in self.to_complete_relations(): + for rschema, role in self._cw_to_complete_relations(): rtype = rschema.type - if self.relation_cached(rtype, role): + if self.cw_relation_cached(rtype, role): continue + # at this point we suppose that: + # * this is a inlined relation + # * entity (self) is the subject + # * user has read perm on the relation and on the target entity + assert rschema.inlined + assert role == 'subject' var = varmaker.next() - targettype = rschema.targets(self.e_schema, role)[0] - rdef = rschema.role_rdef(self.e_schema, targettype, role) - card = rdef.role_cardinality(role) - assert card in '1?', '%s %s %s %s' % (self.e_schema, rtype, - role, card) - if role == 'subject': - if card == '1': - rql.append('%s %s %s' % (V, rtype, var)) - else: - rql.append('%s %s %s?' % (V, rtype, var)) - else: - if card == '1': - rql.append('%s %s %s' % (var, rtype, V)) - else: - rql.append('%s? %s %s' % (var, rtype, V)) + # keep outer join anyway, we don't want .complete to crash on + # missing mandatory relation (see #1058267) + rql.append('%s %s %s?' % (V, rtype, var)) selected.append(((rtype, role), var)) if selected: # select V, we need it as the left most selected variable @@ -706,9 +726,9 @@ rrset.req = self._cw else: rrset = self._cw.eid_rset(value) - self.set_related_cache(rtype, role, rrset) + self.cw_set_relation_cache(rtype, role, rrset) - def get_value(self, name): + def cw_attr_value(self, name): """get value for the attribute relation , query the repository to get the value if necessary. @@ -716,9 +736,9 @@ :param name: name of the attribute to get """ try: - value = self[name] + value = self.cw_attr_cache[name] except KeyError: - if not self.is_saved(): + if not self.cw_is_saved(): return None rql = "Any A WHERE X eid %%(x)s, X %s A" % name try: @@ -740,7 +760,7 @@ self[name] = value = None return value - def related(self, rtype, role='subject', limit=None, entities=False): + def related(self, rtype, role='subject', limit=None, entities=False): # XXX .cw_related """returns a resultset of related entities :param role: is the role played by 'self' in the relation ('subject' or 'object') @@ -748,19 +768,19 @@ :param entities: if True, the entites are returned; if False, a result set is returned """ try: - return self.related_cache(rtype, role, entities, limit) + return self._cw_relation_cache(rtype, role, entities, limit) except KeyError: pass if not self.has_eid(): if entities: return [] return self.empty_rset() - rql = self.related_rql(rtype, role) + rql = self.cw_related_rql(rtype, role) rset = self._cw.execute(rql, {'x': self.eid}) - self.set_related_cache(rtype, role, rset) + self.cw_set_relation_cache(rtype, role, rset) return self.related(rtype, role, limit, entities) - def related_rql(self, rtype, role='subject', targettypes=None): + def cw_related_rql(self, rtype, role='subject', targettypes=None): rschema = self._cw.vreg.schema[rtype] if role == 'subject': restriction = 'E eid %%(x)s, E %s X' % rtype @@ -809,7 +829,7 @@ # generic vocabulary methods ############################################## - def unrelated_rql(self, rtype, targettype, role, ordermethod=None, + def cw_unrelated_rql(self, rtype, targettype, role, ordermethod=None, vocabconstraints=True): """build a rql to fetch `targettype` entities unrelated to this entity using (rtype, role) relation. @@ -871,12 +891,12 @@ return rql, args def unrelated(self, rtype, targettype, role='subject', limit=None, - ordermethod=None): + ordermethod=None): # XXX .cw_unrelated """return a result set of target type objects that may be related by a given relation, with self as subject or object """ try: - rql, args = self.unrelated_rql(rtype, targettype, role, ordermethod) + rql, args = self.cw_unrelated_rql(rtype, targettype, role, ordermethod) except Unauthorized: return self._cw.empty_rset() if limit is not None: @@ -884,18 +904,19 @@ rql = '%s LIMIT %s WHERE %s' % (before, limit, after) return self._cw.execute(rql, args) - # relations cache handling ################################################ + # relations cache handling ################################################# - def relation_cached(self, rtype, role): - """return true if the given relation is already cached on the instance + def cw_relation_cached(self, rtype, role): + """return None if the given relation isn't already cached on the + instance, else the content of the cache (a 2-uple (rset, entities)). """ - return self._related_cache.get('%s_%s' % (rtype, role)) + return self._cw_related_cache.get('%s_%s' % (rtype, role)) - def related_cache(self, rtype, role, entities=True, limit=None): + def _cw_relation_cache(self, rtype, role, entities=True, limit=None): """return values for the given relation if it's cached on the instance, else raise `KeyError` """ - res = self._related_cache['%s_%s' % (rtype, role)][entities] + res = self._cw_related_cache['%s_%s' % (rtype, role)][entities] if limit is not None and limit < len(res): if entities: res = res[:limit] @@ -903,10 +924,10 @@ res = res.limit(limit) return res - def set_related_cache(self, rtype, role, rset, col=0): + def cw_set_relation_cache(self, rtype, role, rset): """set cached values for the given relation""" if rset: - related = list(rset.entities(col)) + related = list(rset.entities(0)) rschema = self._cw.vreg.schema.rschema(rtype) if role == 'subject': rcard = rschema.rdef(self.e_schema, related[0].e_schema).cardinality[1] @@ -916,23 +937,24 @@ target = 'subject' if rcard in '?1': for rentity in related: - rentity._related_cache['%s_%s' % (rtype, target)] = ( + rentity._cw_related_cache['%s_%s' % (rtype, target)] = ( self.as_rset(), (self,)) else: related = () - self._related_cache['%s_%s' % (rtype, role)] = (rset, related) + self._cw_related_cache['%s_%s' % (rtype, role)] = (rset, related) - def clear_related_cache(self, rtype=None, role=None): + def cw_clear_relation_cache(self, rtype=None, role=None): """clear cached values for the given relation or the entire cache if no relation is given """ if rtype is None: - self._related_cache = {} + self._cw_related_cache = {} + self._cw_adapters_cache = {} else: assert role - self._related_cache.pop('%s_%s' % (rtype, role), None) + self._cw_related_cache.pop('%s_%s' % (rtype, role), None) - def clear_all_caches(self): + def clear_all_caches(self): # XXX cw_clear_all_caches """flush all caches on this entity. Further attributes/relations access will triggers new database queries to get back values. @@ -942,10 +964,9 @@ # clear attributes cache haseid = 'eid' in self self._cw_completed = False - self.clear() + self.cw_attr_cache.clear() # clear relations cache - for rschema, _, role in self.e_schema.relation_definitions(): - self.clear_related_cache(rschema.type, role) + self.cw_clear_relation_cache() # rest path unique cache try: del self.__unique @@ -954,10 +975,10 @@ # raw edition utilities ################################################### - def set_attributes(self, **kwargs): + def set_attributes(self, **kwargs): # XXX cw_set_attributes _check_cw_unsafe(kwargs) assert kwargs - assert self._is_saved, "should not call set_attributes while entity "\ + assert self.cw_is_saved(), "should not call set_attributes while entity "\ "hasn't been saved yet" relations = [] for key in kwargs: @@ -972,7 +993,7 @@ # edited_attributes / skip_security_attributes machinery self.update(kwargs) - def set_relations(self, **kwargs): + def set_relations(self, **kwargs): # XXX cw_set_relations """add relations to the given object. To set a relation where this entity is the object of the relation, use 'reverse_' as argument name. @@ -996,28 +1017,42 @@ restr, ','.join(str(r.eid) for r in values)), {'x': self.eid}) - def delete(self, **kwargs): + def cw_delete(self, **kwargs): assert self.has_eid(), self.eid self._cw.execute('DELETE %s X WHERE X eid %%(x)s' % self.e_schema, {'x': self.eid}, **kwargs) # server side utilities ################################################### + def _cw_rql_set_value(self, attr, value): + """call by rql execution plan when some attribute is modified + + don't use dict api in such case since we don't want attribute to be + added to skip_security_attributes. + + This method is for internal use, you should not use it. + """ + self.cw_attr_cache[attr] = value + + def _cw_clear_local_perm_cache(self, action): + for rqlexpr in self.e_schema.get_rqlexprs(action): + self._cw.local_perm_cache.pop((rqlexpr.eid, (('x', self.eid),)), None) + @property - def skip_security_attributes(self): + def _cw_skip_security_attributes(self): try: - return self._skip_security_attributes + return self.__cw_skip_security_attributes except: - self._skip_security_attributes = set() - return self._skip_security_attributes + self.__cw_skip_security_attributes = set() + return self.__cw_skip_security_attributes - def set_defaults(self): + def _cw_set_defaults(self): """set default values according to the schema""" for attr, value in self.e_schema.defaults(): - if not self.has_key(attr): + if not self.cw_attr_cache.has_key(attr): self[str(attr)] = value - def check(self, creation=False): + def _cw_check(self, creation=False): """check this entity against its schema. Only final relation are checked here, constraint on actual relations are checked in hooks """ @@ -1040,61 +1075,29 @@ self.e_schema.check(self, creation=creation, _=_, relations=relations) - def fti_containers(self, _done=None): - if _done is None: - _done = set() - _done.add(self.eid) - containers = tuple(self.e_schema.fulltext_containers()) - if containers: - for rschema, target in containers: - if target == 'object': - targets = getattr(self, rschema.type) - else: - targets = getattr(self, 'reverse_%s' % rschema) - for entity in targets: - if entity.eid in _done: - continue - for container in entity.fti_containers(_done): - yield container - yielded = True - else: - yield self + @deprecated('[3.9] use entity.cw_attr_value(attr)') + def get_value(self, name): + return self.cw_attr_value(name) - def get_words(self): - """used by the full text indexer to get words to index + @deprecated('[3.9] use entity.cw_delete()') + def delete(self, **kwargs): + return self.cw_delete(**kwargs) - this method should only be used on the repository side since it depends - on the logilab.database package + @deprecated('[3.9] use entity.cw_attr_metadata(attr, metadata)') + def attr_metadata(self, attr, metadata): + return self.cw_attr_metadata(attr, metadata) - :rtype: list - :return: the list of indexable word of this entity - """ - from logilab.database.fti import tokenize - # take care to cases where we're modyfying the schema - pending = self._cw.transaction_data.setdefault('pendingrdefs', set()) - words = [] - for rschema in self.e_schema.indexable_attributes(): - if (self.e_schema, rschema) in pending: - continue - try: - value = self.printable_value(rschema, format='text/plain') - except TransformError: - continue - except: - self.exception("can't add value of %s to text index for entity %s", - rschema, self.eid) - continue - if value: - words += tokenize(value) - for rschema, role in self.e_schema.fulltext_relations(): - if role == 'subject': - for entity in getattr(self, rschema.type): - words += entity.get_words() - else: # if role == 'object': - for entity in getattr(self, 'reverse_%s' % rschema.type): - words += entity.get_words() - return words + @deprecated('[3.9] use entity.cw_has_perm(action)') + def has_perm(self, action): + return self.cw_has_perm(action) + @deprecated('[3.9] use entity.cw_set_relation_cache(rtype, role, rset)') + def set_related_cache(self, rtype, role, rset): + self.cw_set_relation_cache(rtype, role, rset) + + @deprecated('[3.9] use entity.cw_clear_relation_cache(rtype, role, rset)') + def clear_related_cache(self, rtype=None, role=None): + self.cw_clear_relation_cache(rtype, role) # attribute and relation descriptors ########################################## @@ -1108,18 +1111,18 @@ def __get__(self, eobj, eclass): if eobj is None: return self - return eobj.get_value(self._attrname) + return eobj.cw_attr_value(self._attrname) def __set__(self, eobj, value): eobj[self._attrname] = value + class Relation(object): """descriptor that controls schema relation access""" - _role = None # for pylint - def __init__(self, rschema): - self._rschema = rschema + def __init__(self, rschema, role): self._rtype = rschema.type + self._role = role def __get__(self, eobj, eclass): if eobj is None: @@ -1131,14 +1134,6 @@ raise NotImplementedError -class SubjectRelation(Relation): - """descriptor that controls schema relation access""" - _role = 'subject' - -class ObjectRelation(Relation): - """descriptor that controls schema relation access""" - _role = 'object' - from logging import getLogger from cubicweb import set_log_methods set_log_methods(Entity, getLogger('cubicweb.entity')) diff -r b5640328ffad -r 9db65b381028 etwist/request.py --- a/etwist/request.py Thu Jul 01 09:23:39 2010 +0200 +++ b/etwist/request.py Thu Jul 01 17:06:37 2010 +0200 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""Twisted request handler for CubicWeb +"""Twisted request handler for CubicWeb""" -""" __docformat__ = "restructuredtext en" from datetime import datetime @@ -55,9 +54,9 @@ return self._twreq.method def relative_path(self, includeparams=True): - """return the normalized path of the request (ie at least relative - to the instance's root, but some other normalization may be needed - so that the returned path may be used to compare to generated urls + """return the normalized path of the request (ie at least relative to + the instance's root, but some other normalization may be needed so that + the returned path may be used to compare to generated urls :param includeparams: boolean indicating if GET form parameters should be kept in the path @@ -68,8 +67,8 @@ return path def get_header(self, header, default=None, raw=True): - """return the value associated with the given input header, - raise KeyError if the header is not set + """return the value associated with the given input header, raise + KeyError if the header is not set """ if raw: return self._headers_in.getRawHeaders(header, [default])[0] diff -r b5640328ffad -r 9db65b381028 etwist/server.py --- a/etwist/server.py Thu Jul 01 09:23:39 2010 +0200 +++ b/etwist/server.py Thu Jul 01 17:06:37 2010 +0200 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""twisted server for CubicWeb web instances +"""twisted server for CubicWeb web instances""" -""" __docformat__ = "restructuredtext en" import sys @@ -99,12 +98,11 @@ class CubicWebRootResource(resource.Resource): - def __init__(self, config, debug=None): - self.debugmode = debug + def __init__(self, config, vreg=None): self.config = config # instantiate publisher here and not in init_publisher to get some # checks done before daemonization (eg versions consistency) - self.appli = CubicWebPublisher(config, debug=self.debugmode) + self.appli = CubicWebPublisher(config, vreg=vreg) self.base_url = config['base-url'] self.https_url = config['https-url'] self.children = {} @@ -118,8 +116,6 @@ # when we have an in-memory repository, clean unused sessions every XX # seconds and properly shutdown the server if config.repo_method == 'inmemory': - reactor.addSystemEventTrigger('before', 'shutdown', - self.shutdown_event) if config.pyro_enabled(): # if pyro is enabled, we have to register to the pyro name # server, create a pyro daemon, and create a task to handle pyro @@ -127,7 +123,10 @@ self.pyro_daemon = self.appli.repo.pyro_register() self.pyro_listen_timeout = 0.02 self.appli.repo.looping_task(1, self.pyro_loop_event) - self.appli.repo.start_looping_tasks() + if config.mode != 'test': + reactor.addSystemEventTrigger('before', 'shutdown', + self.shutdown_event) + self.appli.repo.start_looping_tasks() self.set_url_rewriter() CW_EVENT_MANAGER.bind('after-registry-reload', self.set_url_rewriter) @@ -156,6 +155,9 @@ pre_path = request.path.split('/')[1:] if pre_path[0] == 'https': pre_path.pop(0) + uiprops = self.config.https_uiprops + else: + uiprops = self.config.uiprops directory = pre_path[0] # Anything in data/, static/, fckeditor/ and the generated versioned # data directory is treated as static files @@ -165,7 +167,7 @@ if directory == 'static': return File(self.config.static_directory) if directory == 'fckeditor': - return File(self.config.ext_resources['FCKEDITOR_PATH']) + return File(uiprops['FCKEDITOR_PATH']) if directory != 'data': # versioned directory, use specific file with http cache # headers so their are cached for a very long time @@ -173,7 +175,7 @@ else: cls = File if path == 'fckeditor': - return cls(self.config.ext_resources['FCKEDITOR_PATH']) + return cls(uiprops['FCKEDITOR_PATH']) if path == directory: # recurse return self datadir = self.config.locate_resource(path) @@ -187,7 +189,10 @@ def render(self, request): """Render a page from the root resource""" # reload modified files in debug mode - if self.debugmode: + if self.config.debugmode: + self.config.uiprops.reload_if_needed() + if self.https_url: + self.config.https_uiprops.reload_if_needed() self.appli.vreg.reload_if_needed() if self.config['profile']: # default profiler don't trace threads return self.render_request(request) @@ -394,20 +399,21 @@ LOGGER = getLogger('cubicweb.twisted') set_log_methods(CubicWebRootResource, LOGGER) -def run(config, debug): +def run(config, vreg=None, debug=None): + if debug is not None: + config.debugmode = debug # create the site - root_resource = CubicWebRootResource(config, debug) + root_resource = CubicWebRootResource(config, vreg=vreg) website = server.Site(root_resource) # serve it via standard HTTP on port set in the configuration port = config['port'] or 8080 reactor.listenTCP(port, website) - logger = getLogger('cubicweb.twisted') - if not debug: + if not config.debugmode: if sys.platform == 'win32': raise ConfigurationError("Under windows, you must use the service management " "commands (e.g : 'net start my_instance)'") from logilab.common.daemon import daemonize - print 'instance starting in the background' + LOGGER.info('instance started in the background on %s', root_resource.base_url) if daemonize(config['pid-file']): return # child process root_resource.init_publisher() # before changing uid @@ -419,7 +425,7 @@ uid = getpwnam(config['uid']).pw_uid os.setuid(uid) root_resource.start_service() - logger.info('instance started on %s', root_resource.base_url) + LOGGER.info('instance started on %s', root_resource.base_url) # avoid annoying warnign if not in Main Thread signals = threading.currentThread().getName() == 'MainThread' if config['profile']: diff -r b5640328ffad -r 9db65b381028 etwist/twctl.py --- a/etwist/twctl.py Thu Jul 01 09:23:39 2010 +0200 +++ b/etwist/twctl.py Thu Jul 01 17:06:37 2010 +0200 @@ -15,9 +15,7 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""cubicweb-clt handlers for twisted - -""" +"""cubicweb-clt handlers for twisted""" from cubicweb.toolsutils import CommandHandler from cubicweb.web.webctl import WebCreateHandler @@ -32,9 +30,9 @@ cmdname = 'start' cfgname = 'twisted' - def start_server(self, config, debug): + def start_server(self, config): from cubicweb.etwist import server - server.run(config, debug) + server.run(config) class TWStopHandler(CommandHandler): cmdname = 'stop' diff -r b5640328ffad -r 9db65b381028 goa/appobjects/components.py --- a/goa/appobjects/components.py Thu Jul 01 09:23:39 2010 +0200 +++ b/goa/appobjects/components.py Thu Jul 01 17:06:37 2010 +0200 @@ -98,7 +98,7 @@ def sendmail(self, recipient, subject, body): sender = '%s <%s>' % ( self.req.user.dc_title() or self.config['sender-name'], - self.req.user.get_email() or self.config['sender-addr']) + self.req.user.cw_adapt_to('IEmailable').get_email() or self.config['sender-addr']) mail.send_mail(sender=sender, to=recipient, subject=subject, body=body) diff -r b5640328ffad -r 9db65b381028 goa/db.py --- a/goa/db.py Thu Jul 01 09:23:39 2010 +0200 +++ b/goa/db.py Thu Jul 01 17:06:37 2010 +0200 @@ -233,7 +233,7 @@ return self.req.datastore_get(self.eid) except AttributeError: # self.req is not a server session return Get(self.eid) - self.set_defaults() + self._cw_set_defaults() values = self._to_gae_dict(convert=False) parent = key_name = _app = None if self._gaeinitargs is not None: @@ -343,7 +343,7 @@ self.req = req dbmodel = self.to_gae_model() key = Put(dbmodel) - self.set_eid(str(key)) + self.eid = str(key) if self.req is not None and self.rset is None: self.rset = rset_from_objs(self.req, dbmodel, ('eid',), 'Any X WHERE X eid %(x)s', {'x': self.eid}) @@ -409,7 +409,7 @@ def dynamic_properties(self): raise NotImplementedError('use eschema') - def is_saved(self): + def cw_is_saved(self): return self.has_eid() def parent(self): diff -r b5640328ffad -r 9db65b381028 goa/gaesource.py --- a/goa/gaesource.py Thu Jul 01 09:23:39 2010 +0200 +++ b/goa/gaesource.py Thu Jul 01 17:06:37 2010 +0200 @@ -49,15 +49,15 @@ except KeyError: pass else: - entity.clear_related_cache(rtype, role) + entity.cw_clear_relation_cache(rtype, role) if gaesubject.kind() == 'CWUser': for asession in session.repo._sessions.itervalues(): if asession.user.eid == subject: - asession.user.clear_related_cache(rtype, 'subject') + asession.user.cw_clear_relation_cache(rtype, 'subject') if gaeobject.kind() == 'CWUser': for asession in session.repo._sessions.itervalues(): if asession.user.eid == object: - asession.user.clear_related_cache(rtype, 'object') + asession.user.cw_clear_relation_cache(rtype, 'object') def _mark_modified(session, gaeentity): modified = session.transaction_data.setdefault('modifiedentities', {}) diff -r b5640328ffad -r 9db65b381028 goa/skel/loader.py --- a/goa/skel/loader.py Thu Jul 01 09:23:39 2010 +0200 +++ b/goa/skel/loader.py Thu Jul 01 17:06:37 2010 +0200 @@ -30,7 +30,7 @@ # apply monkey patches first goa.do_monkey_patch() # get instance's configuration (will be loaded from app.conf file) - GAEConfiguration.ext_resources['JAVASCRIPTS'].append('DATADIR/goa.js') + GAEConfiguration.uiprops['JAVASCRIPTS'].append('DATADIR/goa.js') config = GAEConfiguration('toto', APPLROOT) # create default groups create_groups() diff -r b5640328ffad -r 9db65b381028 goa/skel/main.py --- a/goa/skel/main.py Thu Jul 01 09:23:39 2010 +0200 +++ b/goa/skel/main.py Thu Jul 01 17:06:37 2010 +0200 @@ -31,7 +31,7 @@ # get instance's configuration (will be loaded from app.conf file) from cubicweb.goa.goaconfig import GAEConfiguration -GAEConfiguration.ext_resources['JAVASCRIPTS'].append('DATADIR/goa.js') +GAEConfiguration.uiprops['JAVASCRIPTS'].append('DATADIR/goa.js') config = GAEConfiguration('toto', APPLROOT) # dynamic objects registry diff -r b5640328ffad -r 9db65b381028 goa/test/unittest_rql.py --- a/goa/test/unittest_rql.py Thu Jul 01 09:23:39 2010 +0200 +++ b/goa/test/unittest_rql.py Thu Jul 01 17:06:37 2010 +0200 @@ -15,9 +15,6 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -""" - -""" from cubicweb.goa.testlib import * from cubicweb import Binary @@ -612,7 +609,7 @@ def test_error_unknown_eid(self): rset = self.req.execute('Any X WHERE X eid %(x)s', {'x': '1234'}) self.assertEquals(len(rset), 0) - self.blog.delete() + self.blog.cw_delete() rset = self.req.execute('Any X WHERE X eid %(x)s', {'x': self.blog.eid}) self.assertEquals(len(rset), 0) diff -r b5640328ffad -r 9db65b381028 goa/tools/laxctl.py --- a/goa/tools/laxctl.py Thu Jul 01 09:23:39 2010 +0200 +++ b/goa/tools/laxctl.py Thu Jul 01 17:06:37 2010 +0200 @@ -43,7 +43,7 @@ do_monkey_patch() from cubicweb.goa.goavreg import GAEVregistry from cubicweb.goa.goaconfig import GAEConfiguration - #WebConfiguration.ext_resources['JAVASCRIPTS'].append('DATADIR/goa.js') + #WebConfiguration.uiprops['JAVASCRIPTS'].append('DATADIR/goa.js') config = GAEConfiguration('toto', applroot) vreg = GAEVregistry(config) vreg.set_schema(config.load_schema()) diff -r b5640328ffad -r 9db65b381028 hooks/bookmark.py --- a/hooks/bookmark.py Thu Jul 01 09:23:39 2010 +0200 +++ b/hooks/bookmark.py Thu Jul 01 17:06:37 2010 +0200 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""bookmark related hooks +"""bookmark related hooks""" -""" __docformat__ = "restructuredtext en" from cubicweb.server import hook @@ -28,7 +27,7 @@ def precommit_event(self): if not self.session.deleted_in_transaction(self.bookmark.eid): if not self.bookmark.bookmarked_by: - self.bookmark.delete() + self.bookmark.cw_delete() class DelBookmarkedByHook(hook.Hook): diff -r b5640328ffad -r 9db65b381028 hooks/security.py --- a/hooks/security.py Thu Jul 01 09:23:39 2010 +0200 +++ b/hooks/security.py Thu Jul 01 17:06:37 2010 +0200 @@ -29,9 +29,9 @@ def check_entity_attributes(session, entity, editedattrs=None, creation=False): eid = entity.eid eschema = entity.e_schema - # .skip_security_attributes is there to bypass security for attributes + # ._cw_skip_security_attributes is there to bypass security for attributes # set by hooks by modifying the entity's dictionnary - dontcheck = entity.skip_security_attributes + dontcheck = entity._cw_skip_security_attributes if editedattrs is None: try: editedattrs = entity.edited_attributes @@ -59,7 +59,7 @@ for values in session.transaction_data.pop('check_entity_perm_op'): entity = session.entity_from_eid(values[0]) action = values[1] - entity.check_perm(action) + entity.cw_check_perm(action) check_entity_attributes(session, entity, values[2:], creation=self.creation) @@ -110,10 +110,10 @@ def __call__(self): try: # check user has permission right now, if not retry at commit time - self.entity.check_perm('update') + self.entity.cw_check_perm('update') check_entity_attributes(self._cw, self.entity) except Unauthorized: - self.entity.clear_local_perm_cache('update') + self.entity._cw_clear_local_perm_cache('update') # save back editedattrs in case the entity is reedited later in the # same transaction, which will lead to edited_attributes being # overwritten @@ -127,7 +127,7 @@ events = ('before_delete_entity',) def __call__(self): - self.entity.check_perm('delete') + self.entity.cw_check_perm('delete') class BeforeAddRelationSecurityHook(SecurityHook): diff -r b5640328ffad -r 9db65b381028 hooks/syncschema.py --- a/hooks/syncschema.py Thu Jul 01 09:23:39 2010 +0200 +++ b/hooks/syncschema.py Thu Jul 01 17:06:37 2010 +0200 @@ -34,7 +34,8 @@ from cubicweb import ValidationError from cubicweb.selectors import implements -from cubicweb.schema import META_RTYPES, VIRTUAL_RTYPES, CONSTRAINTS, display_name +from cubicweb.schema import (META_RTYPES, VIRTUAL_RTYPES, CONSTRAINTS, + ETYPE_NAME_MAP, display_name) from cubicweb.server import hook, schemaserial as ss from cubicweb.server.sqlutils import SQL_PREFIX @@ -817,9 +818,10 @@ if name in CORE_ETYPES: raise ValidationError(self.entity.eid, {None: self._cw._('can\'t be deleted')}) # delete every entities of this type - self._cw.execute('DELETE %s X' % name) + if not name in ETYPE_NAME_MAP: + self._cw.execute('DELETE %s X' % name) + MemSchemaCWETypeDel(self._cw, name) DropTable(self._cw, table=SQL_PREFIX + name) - MemSchemaCWETypeDel(self._cw, name) class AfterDelCWETypeHook(DelCWETypeHook): @@ -984,7 +986,11 @@ def __call__(self): session = self._cw - rdef = session.vreg.schema.schema_by_eid(self.eidfrom) + try: + rdef = session.vreg.schema.schema_by_eid(self.eidfrom) + except KeyError: + self.critical('cant get schema rdef associated to %s', self.eidfrom) + return subjschema, rschema, objschema = rdef.as_triple() pendings = session.transaction_data.get('pendingeids', ()) pendingrdefs = session.transaction_data.setdefault('pendingrdefs', set()) @@ -1005,7 +1011,6 @@ # we have to update physical schema systematically for final and inlined # relations, but only if it's the last instance for this relation type # for other relations - if (rschema.final or rschema.inlined): rset = execute('Any COUNT(X) WHERE X is %s, X relation_type R, ' 'R eid %%(x)s, X from_entity E, E name %%(name)s' @@ -1176,7 +1181,7 @@ still_fti = list(schema[etype].indexable_attributes()) for entity in rset.entities(): source.fti_unindex_entity(session, entity.eid) - for container in entity.fti_containers(): + for container in entity.cw_adapt_to('IFTIndexable').fti_containers(): if still_fti or container is not entity: source.fti_unindex_entity(session, container.eid) source.fti_index_entity(session, container) diff -r b5640328ffad -r 9db65b381028 hooks/workflow.py --- a/hooks/workflow.py Thu Jul 01 09:23:39 2010 +0200 +++ b/hooks/workflow.py Thu Jul 01 17:06:37 2010 +0200 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""Core hooks: workflow related hooks +"""Core hooks: workflow related hooks""" -""" __docformat__ = "restructuredtext en" from datetime import datetime @@ -25,8 +24,7 @@ from yams.schema import role_name from cubicweb import RepositoryError, ValidationError -from cubicweb.interfaces import IWorkflowable -from cubicweb.selectors import implements +from cubicweb.selectors import implements, adaptable from cubicweb.server import hook @@ -51,11 +49,12 @@ def precommit_event(self): session = self.session entity = self.entity + iworkflowable = entity.cw_adapt_to('IWorkflowable') # if there is an initial state and the entity's state is not set, # use the initial state as a default state if not (session.deleted_in_transaction(entity.eid) or entity.in_state) \ - and entity.current_workflow: - state = entity.current_workflow.initial + and iworkflowable.current_workflow: + state = iworkflowable.current_workflow.initial if state: session.add_relation(entity.eid, 'in_state', state.eid) _FireAutotransitionOp(session, entity=entity) @@ -65,10 +64,11 @@ def precommit_event(self): entity = self.entity - autotrs = list(entity.possible_transitions('auto')) + iworkflowable = entity.cw_adapt_to('IWorkflowable') + autotrs = list(iworkflowable.possible_transitions('auto')) if autotrs: assert len(autotrs) == 1 - entity.fire_transition(autotrs[0]) + iworkflowable.fire_transition(autotrs[0]) class _WorkflowChangedOp(hook.Operation): @@ -82,29 +82,30 @@ if self.eid in pendingeids: return entity = session.entity_from_eid(self.eid) + iworkflowable = entity.cw_adapt_to('IWorkflowable') # check custom workflow has not been rechanged to another one in the same # transaction - mainwf = entity.main_workflow + mainwf = iworkflowable.main_workflow if mainwf.eid == self.wfeid: deststate = mainwf.initial if not deststate: qname = role_name('custom_workflow', 'subject') msg = session._('workflow has no initial state') raise ValidationError(entity.eid, {qname: msg}) - if mainwf.state_by_eid(entity.current_state.eid): + if mainwf.state_by_eid(iworkflowable.current_state.eid): # nothing to do return # if there are no history, simply go to new workflow's initial state - if not entity.workflow_history: - if entity.current_state.eid != deststate.eid: + if not iworkflowable.workflow_history: + if iworkflowable.current_state.eid != deststate.eid: _change_state(session, entity.eid, - entity.current_state.eid, deststate.eid) + iworkflowable.current_state.eid, deststate.eid) _FireAutotransitionOp(session, entity=entity) return msg = session._('workflow changed to "%s"') msg %= session._(mainwf.name) session.transaction_data[(entity.eid, 'customwf')] = self.wfeid - entity.change_state(deststate, msg, u'text/plain') + iworkflowable.change_state(deststate, msg, u'text/plain') class _CheckTrExitPoint(hook.Operation): @@ -125,9 +126,10 @@ def precommit_event(self): session = self.session forentity = self.forentity + iworkflowable = forentity.cw_adapt_to('IWorkflowable') trinfo = self.trinfo # we're in a subworkflow, check if we've reached an exit point - wftr = forentity.subworkflow_input_transition() + wftr = iworkflowable.subworkflow_input_transition() if wftr is None: # inconsistency detected qname = role_name('to_state', 'subject') @@ -137,9 +139,9 @@ if tostate is not None: # reached an exit point msg = session._('exiting from subworkflow %s') - msg %= session._(forentity.current_workflow.name) + msg %= session._(iworkflowable.current_workflow.name) session.transaction_data[(forentity.eid, 'subwfentrytr')] = True - forentity.change_state(tostate, msg, u'text/plain', tr=wftr) + iworkflowable.change_state(tostate, msg, u'text/plain', tr=wftr) # hooks ######################################################################## @@ -151,7 +153,7 @@ class SetInitialStateHook(WorkflowHook): __regid__ = 'wfsetinitial' - __select__ = WorkflowHook.__select__ & implements(IWorkflowable) + __select__ = WorkflowHook.__select__ & adaptable('IWorkflowable') events = ('after_add_entity',) def __call__(self): @@ -189,18 +191,19 @@ msg = session._('mandatory relation') raise ValidationError(entity.eid, {qname: msg}) forentity = session.entity_from_eid(foreid) + iworkflowable = forentity.cw_adapt_to('IWorkflowable') # then check it has a workflow set, unless we're in the process of changing # entity's workflow if session.transaction_data.get((forentity.eid, 'customwf')): wfeid = session.transaction_data[(forentity.eid, 'customwf')] wf = session.entity_from_eid(wfeid) else: - wf = forentity.current_workflow + wf = iworkflowable.current_workflow if wf is None: msg = session._('related entity has no workflow set') raise ValidationError(entity.eid, {None: msg}) # then check it has a state set - fromstate = forentity.current_state + fromstate = iworkflowable.current_state if fromstate is None: msg = session._('related entity has no state') raise ValidationError(entity.eid, {None: msg}) @@ -278,8 +281,9 @@ _change_state(self._cw, trinfo['wf_info_for'], trinfo['from_state'], trinfo['to_state']) forentity = self._cw.entity_from_eid(trinfo['wf_info_for']) - assert forentity.current_state.eid == trinfo['to_state'] - if forentity.main_workflow.eid != forentity.current_workflow.eid: + iworkflowable = forentity.cw_adapt_to('IWorkflowable') + assert iworkflowable.current_state.eid == trinfo['to_state'] + if iworkflowable.main_workflow.eid != iworkflowable.current_workflow.eid: _SubWorkflowExitOp(self._cw, forentity=forentity, trinfo=trinfo) @@ -297,7 +301,8 @@ # state changed through TrInfo insertion, so we already know it's ok return entity = session.entity_from_eid(self.eidfrom) - mainwf = entity.main_workflow + iworkflowable = entity.cw_adapt_to('IWorkflowable') + mainwf = iworkflowable.main_workflow if mainwf is None: msg = session._('entity has no workflow set') raise ValidationError(entity.eid, {None: msg}) @@ -309,7 +314,7 @@ msg = session._("state doesn't belong to entity's workflow. You may " "want to set a custom workflow for this entity first.") raise ValidationError(self.eidfrom, {qname: msg}) - if entity.current_workflow and wf.eid != entity.current_workflow.eid: + if iworkflowable.current_workflow and wf.eid != iworkflowable.current_workflow.eid: qname = role_name('in_state', 'subject') msg = session._("state doesn't belong to entity's current workflow") raise ValidationError(self.eidfrom, {qname: msg}) @@ -359,7 +364,7 @@ def __call__(self): entity = self._cw.entity_from_eid(self.eidfrom) - typewf = entity.cwetype_workflow() + typewf = entity.cw_adapt_to('IWorkflowable').cwetype_workflow() if typewf is not None: _WorkflowChangedOp(self._cw, eid=self.eidfrom, wfeid=typewf.eid) diff -r b5640328ffad -r 9db65b381028 i18n/fr.po --- a/i18n/fr.po Thu Jul 01 09:23:39 2010 +0200 +++ b/i18n/fr.po Thu Jul 01 17:06:37 2010 +0200 @@ -535,7 +535,7 @@ msgstr "Nouvelle transition workflow" msgid "No result matching query" -msgstr "aucun résultat" +msgstr "Aucun résultat ne correspond à la requête" msgid "Non exhaustive list of views that may apply to entities of this type" msgstr "Liste non exhausite des vues s'appliquant à ce type d'entité" diff -r b5640328ffad -r 9db65b381028 interfaces.py --- a/interfaces.py Thu Jul 01 09:23:39 2010 +0200 +++ b/interfaces.py Thu Jul 01 17:06:37 2010 +0200 @@ -15,68 +15,24 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -""" -Standard interfaces. +"""Standard interfaces. Deprecated in favor of adapters. .. note:: - The `implements` selector matches not only entity classes but also - their interfaces. Writing __select__ = implements('IGeocodable') is - a perfectly fine thing to do. + The `implements` selector used to match not only entity classes but also their + interfaces. This will disappear in a future version. You should define an + adapter for that interface and use `adaptable('MyIFace')` selector on appobjects + that require that interface. + """ __docformat__ = "restructuredtext en" from logilab.common.interface import Interface -class IEmailable(Interface): - """interface for emailable entities""" - def get_email(self): - """return email address""" - - @classmethod - def allowed_massmail_keys(cls): - """returns a set of allowed email substitution keys - - The default is to return the entity's attribute list but an - entity class might override this method to allow extra keys. - For instance, the Person class might want to return a `companyname` - key. - """ - - def as_email_context(self): - """returns the dictionary as used by the sendmail controller to - build email bodies. - - NOTE: the dictionary keys should match the list returned by the - `allowed_massmail_keys` method. - """ - - -class IWorkflowable(Interface): - """interface for entities dealing with a specific workflow""" - # XXX to be completed, see cw.entities.wfobjs.WorkflowableMixIn - - @property - def state(self): - """return current state name""" - - def change_state(self, stateeid, trcomment=None, trcommentformat=None): - """change the entity's state to the state of the given name in entity's - workflow - """ - - def latest_trinfo(self): - """return the latest transition information for this entity - """ - - +# XXX deprecates in favor of IProgressAdapter class IProgress(Interface): - """something that has a cost, a state and a progression - - Take a look at cubicweb.mixins.ProgressMixIn for some - default implementations - """ + """something that has a cost, a state and a progression""" @property def cost(self): @@ -112,7 +68,7 @@ def progress(self): """returns the % progress of the task item""" - +# XXX deprecates in favor of IMileStoneAdapter class IMileStone(IProgress): """represents an ITask's item""" @@ -135,7 +91,132 @@ def contractors(self): """returns the list of persons supposed to work on this task""" +# XXX deprecates in favor of IEmbedableAdapter +class IEmbedable(Interface): + """interface for embedable entities""" + def embeded_url(self): + """embed action interface""" + +# XXX deprecates in favor of ICalendarAdapter +class ICalendarViews(Interface): + """calendar views interface""" + def matching_dates(self, begin, end): + """ + :param begin: day considered as begin of the range (`DateTime`) + :param end: day considered as end of the range (`DateTime`) + + :return: + a list of dates (`DateTime`) in the range [`begin`, `end`] on which + this entity apply + """ + +# XXX deprecates in favor of ICalendarableAdapter +class ICalendarable(Interface): + """interface for items that do have a begin date 'start' and an end date 'stop' + """ + + @property + def start(self): + """return start date""" + + @property + def stop(self): + """return stop state""" + +# XXX deprecates in favor of ICalendarableAdapter +class ITimetableViews(Interface): + """timetable views interface""" + def timetable_date(self): + """XXX explain + + :return: date (`DateTime`) + """ + +# XXX deprecates in favor of IGeocodableAdapter +class IGeocodable(Interface): + """interface required by geocoding views such as gmap-view""" + + @property + def latitude(self): + """returns the latitude of the entity""" + + @property + def longitude(self): + """returns the longitude of the entity""" + + def marker_icon(self): + """returns the icon that should be used as the marker""" + +# XXX deprecates in favor of ISIOCItemAdapter +class ISiocItem(Interface): + """interface for entities which may be represented as an ISIOC item""" + + def isioc_content(self): + """return item's content""" + + def isioc_container(self): + """return container entity""" + + def isioc_type(self): + """return container type (post, BlogPost, MailMessage)""" + + def isioc_replies(self): + """return replies items""" + + def isioc_topics(self): + """return topics items""" + +# XXX deprecates in favor of ISIOCContainerAdapter +class ISiocContainer(Interface): + """interface for entities which may be represented as an ISIOC container""" + + def isioc_type(self): + """return container type (forum, Weblog, MailingList)""" + + def isioc_items(self): + """return contained items""" + +# XXX deprecates in favor of IEmailableAdapter +class IFeed(Interface): + """interface for entities with rss flux""" + + def rss_feed_url(self): + """""" + +# XXX deprecates in favor of IDownloadableAdapter +class IDownloadable(Interface): + """interface for downloadable entities""" + + def download_url(self): # XXX not really part of this interface + """return an url to download entity's content""" + def download_content_type(self): + """return MIME type of the downloadable content""" + def download_encoding(self): + """return encoding of the downloadable content""" + def download_file_name(self): + """return file name of the downloadable content""" + def download_data(self): + """return actual data of the downloadable content""" + +# XXX deprecates in favor of IPrevNextAdapter +class IPrevNext(Interface): + """interface for entities which can be linked to a previous and/or next + entity + """ + + def next_entity(self): + """return the 'next' entity""" + def previous_entity(self): + """return the 'previous' entity""" + +# XXX deprecates in favor of IBreadCrumbsAdapter +class IBreadCrumbs(Interface): + + def breadcrumbs(self, view, recurs=False): + pass + +# XXX deprecates in favor of ITreeAdapter class ITree(Interface): def parent(self): @@ -159,141 +240,3 @@ def root(self): """returns the root object""" - -## web specific interfaces #################################################### - - -class IPrevNext(Interface): - """interface for entities which can be linked to a previous and/or next - entity - """ - - def next_entity(self): - """return the 'next' entity""" - def previous_entity(self): - """return the 'previous' entity""" - - -class IBreadCrumbs(Interface): - """interface for entities which can be "located" on some path""" - - # XXX fix recurs ! - def breadcrumbs(self, view, recurs=False): - """return a list containing some: - - * tuple (url, label) - * entity - * simple label string - - defining path from a root to the current view - - the main view is given as argument so breadcrumbs may vary according - to displayed view (may be None). When recursing on a parent entity, - the `recurs` argument should be set to True. - """ - - -class IDownloadable(Interface): - """interface for downloadable entities""" - - def download_url(self): # XXX not really part of this interface - """return an url to download entity's content""" - def download_content_type(self): - """return MIME type of the downloadable content""" - def download_encoding(self): - """return encoding of the downloadable content""" - def download_file_name(self): - """return file name of the downloadable content""" - def download_data(self): - """return actual data of the downloadable content""" - - -class IEmbedable(Interface): - """interface for embedable entities""" - - def embeded_url(self): - """embed action interface""" - -class ICalendarable(Interface): - """interface for items that do have a begin date 'start' and an end date 'stop' - """ - - @property - def start(self): - """return start date""" - - @property - def stop(self): - """return stop state""" - -class ICalendarViews(Interface): - """calendar views interface""" - def matching_dates(self, begin, end): - """ - :param begin: day considered as begin of the range (`DateTime`) - :param end: day considered as end of the range (`DateTime`) - - :return: - a list of dates (`DateTime`) in the range [`begin`, `end`] on which - this entity apply - """ - -class ITimetableViews(Interface): - """timetable views interface""" - def timetable_date(self): - """XXX explain - - :return: date (`DateTime`) - """ - -class IGeocodable(Interface): - """interface required by geocoding views such as gmap-view""" - - @property - def latitude(self): - """returns the latitude of the entity""" - - @property - def longitude(self): - """returns the longitude of the entity""" - - def marker_icon(self): - """returns the icon that should be used as the marker - (returns None for default) - """ - -class IFeed(Interface): - """interface for entities with rss flux""" - - def rss_feed_url(self): - """return an url which layout sub-entities item - """ - -class ISiocItem(Interface): - """interface for entities (which are item - in sioc specification) with sioc views""" - - def isioc_content(self): - """return content entity""" - - def isioc_container(self): - """return container entity""" - - def isioc_type(self): - """return container type (post, BlogPost, MailMessage)""" - - def isioc_replies(self): - """return replies items""" - - def isioc_topics(self): - """return topics items""" - -class ISiocContainer(Interface): - """interface for entities (which are container - in sioc specification) with sioc views""" - - def isioc_type(self): - """return container type (forum, Weblog, MailingList)""" - - def isioc_items(self): - """return contained items""" diff -r b5640328ffad -r 9db65b381028 mail.py --- a/mail.py Thu Jul 01 09:23:39 2010 +0200 +++ b/mail.py Thu Jul 01 17:06:37 2010 +0200 @@ -184,7 +184,7 @@ # previous email if not self.msgid_timestamp: refs = [self.construct_message_id(eid) - for eid in entity.notification_references(self)] + for eid in entity.cw_adapt_to('INotifiable').notification_references(self)] else: refs = () msgid = self.construct_message_id(entity.eid) @@ -198,7 +198,7 @@ if isinstance(something, Entity): # hi-jack self._cw to get a session for the returned user self._cw = self._cw.hijack_user(something) - emailaddr = something.get_email() + emailaddr = something.cw_adapt_to('IEmailable').get_email() else: emailaddr, lang = something self._cw.set_language(lang) @@ -246,7 +246,8 @@ # email generation helpers ################################################# def construct_message_id(self, eid): - return construct_message_id(self._cw.vreg.config.appid, eid, self.msgid_timestamp) + return construct_message_id(self._cw.vreg.config.appid, eid, + self.msgid_timestamp) def format_field(self, attr, value): return ':%(attr)s: %(value)s' % {'attr': attr, 'value': value} diff -r b5640328ffad -r 9db65b381028 migration.py --- a/migration.py Thu Jul 01 09:23:39 2010 +0200 +++ b/migration.py Thu Jul 01 17:06:37 2010 +0200 @@ -111,7 +111,7 @@ self.config = config if config: # no config on shell to a remote instance - self.config.init_log(logthreshold=logging.ERROR, debug=True) + self.config.init_log(logthreshold=logging.ERROR) # 0: no confirmation, 1: only main commands confirmed, 2 ask for everything self.verbosity = verbosity self.need_wrap = True @@ -281,14 +281,25 @@ return context def cmd_process_script(self, migrscript, funcname=None, *args, **kwargs): - """execute a migration script - in interactive mode, display the migration script path, ask for - confirmation and execute it if confirmed + """execute a migration script in interactive mode + + Display the migration script path, ask for confirmation and execute it + if confirmed + + Context environment can have these variables defined: + - __name__ : will be determine by funcname parameter + - __file__ : is the name of the script if it exists + - __args__ : script arguments coming from command-line + + :param migrscript: name of the script + :param funcname: defines __name__ inside the shell (or use __main__) + :params args: optional arguments for funcname + :keyword scriptargs: optional arguments of the script """ migrscript = os.path.normpath(migrscript) if migrscript.endswith('.py'): script_mode = 'python' - elif migrscript.endswith('.txt') or migrscript.endswith('.rst'): + elif migrscript.endswith(('.txt', '.rst')): script_mode = 'doctest' else: raise Exception('This is not a valid cubicweb shell input') @@ -300,7 +311,8 @@ pyname = '__main__' else: pyname = splitext(basename(migrscript))[0] - scriptlocals.update({'__file__': migrscript, '__name__': pyname}) + scriptlocals.update({'__file__': migrscript, '__name__': pyname, + '__args__': kwargs.pop("scriptargs", [])}) execfile(migrscript, scriptlocals) if funcname is not None: try: diff -r b5640328ffad -r 9db65b381028 misc/migration/3.9.0_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/misc/migration/3.9.0_Any.py Thu Jul 01 17:06:37 2010 +0200 @@ -0,0 +1,3 @@ +if repo.system_source.dbdriver == 'postgres': + sql('ALTER TABLE appears ADD COLUMN weight float') + sql('UPDATE appears SET weight=1.0 ') diff -r b5640328ffad -r 9db65b381028 mixins.py --- a/mixins.py Thu Jul 01 09:23:39 2010 +0200 +++ b/mixins.py Thu Jul 01 17:06:37 2010 +0200 @@ -21,9 +21,10 @@ from itertools import chain from logilab.common.decorators import cached +from logilab.common.deprecation import deprecated, class_deprecated from cubicweb.selectors import implements -from cubicweb.interfaces import IEmailable, ITree +from cubicweb.interfaces import ITree class TreeMixIn(object): @@ -33,6 +34,9 @@ tree_attribute, parent_target and children_target class attribute to benefit from this default implementation """ + __metaclass__ = class_deprecated + __deprecation_warning__ = '[3.9] TreeMixIn is deprecated, use/override ITreeAdapter instead' + tree_attribute = None # XXX misnamed parent_target = 'subject' @@ -117,16 +121,6 @@ return chain([self], _uptoroot(self)) return _uptoroot(self) - def notification_references(self, view): - """used to control References field of email send on notification - for this entity. `view` is the notification view. - - Should return a list of eids which can be used to generate message ids - of previously sent email - """ - return self.path()[:-1] - - ## ITree interface ######################################################## def parent(self): """return the parent entity if any, else None (e.g. if we are on the @@ -151,7 +145,7 @@ entities=entities) def children_rql(self): - return self.related_rql(self.tree_attribute, self.children_target) + return self.cw_related_rql(self.tree_attribute, self.children_target) def is_leaf(self): return len(self.children()) == 0 @@ -171,8 +165,7 @@ NOTE: The default implementation is based on the primary_email / use_email scheme """ - __implements__ = (IEmailable,) - + @deprecated("[3.9] use entity.cw_adapt_to('IEmailable').get_email()") def get_email(self): if getattr(self, 'primary_email', None): return self.primary_email[0].address @@ -180,28 +173,6 @@ return self.use_email[0].address return None - @classmethod - def allowed_massmail_keys(cls): - """returns a set of allowed email substitution keys - - The default is to return the entity's attribute list but an - entity class might override this method to allow extra keys. - For instance, the Person class might want to return a `companyname` - key. - """ - return set(rschema.type - for rschema, attrtype in cls.e_schema.attribute_definitions() - if attrtype.type not in ('Password', 'Bytes')) - - def as_email_context(self): - """returns the dictionary as used by the sendmail controller to - build email bodies. - - NOTE: the dictionary keys should match the list returned by the - `allowed_massmail_keys` method. - """ - return dict( (attr, getattr(self, attr)) for attr in self.allowed_massmail_keys() ) - """pluggable mixins system: plug classes registered in MI_REL_TRIGGERS on entity classes which have the relation described by the dict's key. @@ -215,7 +186,7 @@ } - +# XXX move to cubicweb.web.views.treeview once we delete usage from this file def _done_init(done, view, row, col): """handle an infinite recursion safety belt""" if done is None: @@ -223,7 +194,7 @@ entity = view.cw_rset.get_entity(row, col) if entity.eid in done: msg = entity._cw._('loop in %(rel)s relation (%(eid)s)') % { - 'rel': entity.tree_attribute, + 'rel': entity.cw_adapt_to('ITree').tree_relation, 'eid': entity.eid } return None, msg @@ -233,16 +204,20 @@ class TreeViewMixIn(object): """a recursive tree view""" + __metaclass__ = class_deprecated + __deprecation_warning__ = '[3.9] TreeViewMixIn is deprecated, use/override BaseTreeView instead' + __regid__ = 'tree' + __select__ = implements(ITree) item_vid = 'treeitem' - __select__ = implements(ITree) def call(self, done=None, **kwargs): if done is None: done = set() super(TreeViewMixIn, self).call(done=done, **kwargs) - def cell_call(self, row, col=0, vid=None, done=None, **kwargs): + def cell_call(self, row, col=0, vid=None, done=None, maxlevel=None, **kwargs): + assert maxlevel is None or maxlevel > 0 done, entity = _done_init(done, self, row, col) if done is None: # entity is actually an error message @@ -250,8 +225,14 @@ return self.open_item(entity) entity.view(vid or self.item_vid, w=self.w, **kwargs) + if maxlevel is not None: + maxlevel -= 1 + if maxlevel == 0: + self.close_item(entity) + return relatedrset = entity.children(entities=False) - self.wview(self.__regid__, relatedrset, 'null', done=done, **kwargs) + self.wview(self.__regid__, relatedrset, 'null', done=done, + maxlevel=maxlevel, **kwargs) self.close_item(entity) def open_item(self, entity): @@ -262,6 +243,8 @@ class TreePathMixIn(object): """a recursive path view""" + __metaclass__ = class_deprecated + __deprecation_warning__ = '[3.9] TreePathMixIn is deprecated, use/override TreePathView instead' __regid__ = 'path' item_vid = 'oneline' separator = u' > ' @@ -286,6 +269,8 @@ class ProgressMixIn(object): """provide a default implementations for IProgress interface methods""" + __metaclass__ = class_deprecated + __deprecation_warning__ = '[3.9] ProgressMixIn is deprecated, use/override IProgressAdapter instead' @property def cost(self): diff -r b5640328ffad -r 9db65b381028 mttransforms.py --- a/mttransforms.py Thu Jul 01 09:23:39 2010 +0200 +++ b/mttransforms.py Thu Jul 01 17:06:37 2010 +0200 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""mime type transformation engine for cubicweb, based on mtconverter +"""mime type transformation engine for cubicweb, based on mtconverter""" -""" __docformat__ = "restructuredtext en" from logilab import mtconverter diff -r b5640328ffad -r 9db65b381028 req.py --- a/req.py Thu Jul 01 09:23:39 2010 +0200 +++ b/req.py Thu Jul 01 17:06:37 2010 +0200 @@ -279,7 +279,7 @@ user = self.user userinfo['login'] = user.login userinfo['name'] = user.name() - userinfo['email'] = user.get_email() + userinfo['email'] = user.cw_adapt_to('IEmailable').get_email() return userinfo def is_internal_session(self): @@ -373,11 +373,11 @@ raise ValueError(self._('can\'t parse %(value)r (expected %(format)s)') % {'value': value, 'format': format}) - # abstract methods to override according to the web front-end ############# - def base_url(self): """return the root url of the instance""" - raise NotImplementedError + return self.vreg.config['base-url'] + + # abstract methods to override according to the web front-end ############# def describe(self, eid): """return a tuple (type, sourceuri, extid) for the entity with id """ diff -r b5640328ffad -r 9db65b381028 rset.py --- a/rset.py Thu Jul 01 09:23:39 2010 +0200 +++ b/rset.py Thu Jul 01 17:06:37 2010 +0200 @@ -77,10 +77,16 @@ rows = self.rows if len(rows) > 10: rows = rows[:10] + ['...'] + if len(rows) > 1: + # add a line break before first entity if more that one. + pattern = '' + else: + pattern = '' + if not self.description: - return '' % (self.rql, len(self.rows), + return pattern % (self.rql, len(self.rows), '\n'.join(str(r) for r in rows)) - return '' % (self.rql, len(self.rows), + return pattern % (self.rql, len(self.rows), '\n'.join('%s (%s)' % (r, d) for r, d in zip(rows, self.description))) @@ -453,7 +459,7 @@ etype = self.description[row][col] entity = self.req.vreg['etypes'].etype_class(etype)(req, rset=self, row=row, col=col) - entity.set_eid(eid) + entity.eid = eid # cache entity req.set_entity_cache(entity) eschema = entity.e_schema @@ -494,7 +500,7 @@ rrset.req = req else: rrset = self._build_entity(row, outerselidx).as_rset() - entity.set_related_cache(attr, role, rrset) + entity.cw_set_relation_cache(attr, role, rrset) return entity @cached diff -r b5640328ffad -r 9db65b381028 schema.py --- a/schema.py Thu Jul 01 09:23:39 2010 +0200 +++ b/schema.py Thu Jul 01 17:06:37 2010 +0200 @@ -572,7 +572,13 @@ rdef.name = rdef.name.lower() rdef.subject = bw_normalize_etype(rdef.subject) rdef.object = bw_normalize_etype(rdef.object) - rdefs = super(CubicWebSchema, self).add_relation_def(rdef) + try: + rdefs = super(CubicWebSchema, self).add_relation_def(rdef) + except BadSchemaDefinition: + reversed_etype_map = dict( (v, k) for k, v in ETYPE_NAME_MAP.iteritems() ) + if rdef.subject in reversed_etype_map or rdef.object in reversed_etype_map: + return + raise if rdefs: try: self._eid_index[rdef.eid] = rdefs diff -r b5640328ffad -r 9db65b381028 selectors.py --- a/selectors.py Thu Jul 01 09:23:39 2010 +0200 +++ b/selectors.py Thu Jul 01 17:06:37 2010 +0200 @@ -169,7 +169,7 @@ or below the :func:`objectify_selector` decorator of your selector function so it gets traceable when :class:`traced_selection` is activated (see :ref:`DebuggingSelectors`). -.. autofunction:: cubicweb.selectors.lltrace +.. autofunction:: cubicweb.appobject.lltrace .. note:: Selectors __call__ should *always* return a positive integer, and shall never @@ -183,10 +183,10 @@ Once in a while, one needs to understand why a view (or any application object) is, or is not selected appropriately. Looking at which selectors fired (or did -not) is the way. The :class:`cubicweb.selectors.traced_selection` context +not) is the way. The :class:`cubicweb.appobject.traced_selection` context manager to help with that, *if you're running your instance in debug mode*. -.. autoclass:: cubicweb.selectors.traced_selection +.. autoclass:: cubicweb.appobject.traced_selection .. |cubicweb| replace:: *CubicWeb* @@ -204,87 +204,10 @@ from cubicweb import Unauthorized, NoSelectableObject, NotAnEntity, role # even if not used, let yes here so it's importable through this module -from cubicweb.appobject import Selector, objectify_selector, yes -from cubicweb.vregistry import class_regid -from cubicweb.cwconfig import CubicWebConfiguration +from cubicweb.appobject import Selector, objectify_selector, lltrace, yes from cubicweb.schema import split_expression -# helpers for debugging selectors -SELECTOR_LOGGER = logging.getLogger('cubicweb.selectors') -TRACED_OIDS = None - -def _trace_selector(cls, selector, args, ret): - # /!\ lltrace decorates pure function or __call__ method, this - # means argument order may be different - if isinstance(cls, Selector): - selname = str(cls) - vobj = args[0] - else: - selname = selector.__name__ - vobj = cls - if TRACED_OIDS == 'all' or class_regid(vobj) in TRACED_OIDS: - #SELECTOR_LOGGER.warning('selector %s returned %s for %s', selname, ret, cls) - print '%s -> %s for %s(%s)' % (selname, ret, vobj, vobj.__regid__) - -def lltrace(selector): - """use this decorator on your selectors so the becomes traceable with - :class:`traced_selection` - """ - # don't wrap selectors if not in development mode - if CubicWebConfiguration.mode == 'system': # XXX config.debug - return selector - def traced(cls, *args, **kwargs): - ret = selector(cls, *args, **kwargs) - if TRACED_OIDS is not None: - _trace_selector(cls, selector, args, ret) - return ret - traced.__name__ = selector.__name__ - traced.__doc__ = selector.__doc__ - return traced - -class traced_selection(object): - """ - Typical usage is : - - .. sourcecode:: python - - >>> from cubicweb.selectors import traced_selection - >>> with traced_selection(): - ... # some code in which you want to debug selectors - ... # for all objects - - Don't forget the 'from __future__ import with_statement' at the module top-level - if you're using python prior to 2.6. - - This will yield lines like this in the logs:: - - selector one_line_rset returned 0 for - - You can also give to :class:`traced_selection` the identifiers of objects on - which you want to debug selection ('oid1' and 'oid2' in the example above). - - .. sourcecode:: python - - >>> with traced_selection( ('regid1', 'regid2') ): - ... # some code in which you want to debug selectors - ... # for objects with __regid__ 'regid1' and 'regid2' - - A potentially usefull point to set up such a tracing function is - the `cubicweb.vregistry.Registry.select` method body. - """ - - def __init__(self, traced='all'): - self.traced = traced - - def __enter__(self): - global TRACED_OIDS - TRACED_OIDS = self.traced - - def __exit__(self, exctype, exc, traceback): - global TRACED_OIDS - TRACED_OIDS = None - return traceback is None - +from cubicweb.appobject import traced_selection # XXX for bw compat def score_interface(etypesreg, cls_or_inst, cls, iface): """Return XXX if the give object (maybe an instance or class) implements @@ -301,6 +224,7 @@ if iface is basecls: return index + 3 return 0 + # XXX iface in implements deprecated in 3.9 if implements_iface(cls_or_inst, iface): # implenting an interface takes precedence other special Any interface return 2 @@ -374,14 +298,17 @@ self.accept_none = accept_none @lltrace - def __call__(self, cls, req, rset=None, row=None, col=0, **kwargs): + def __call__(self, cls, req, rset=None, row=None, col=0, accept_none=None, + **kwargs): if kwargs.get('entity'): return self.score_class(kwargs['entity'].__class__, req) if not rset: return 0 score = 0 if row is None: - if not self.accept_none: + if accept_none is None: + accept_none = self.accept_none + if not accept_none: if any(rset[i][col] is None for i in xrange(len(rset))): return 0 for etype in rset.column_types(col): @@ -441,7 +368,8 @@ """ @lltrace - def __call__(self, cls, req, rset=None, row=None, col=0, **kwargs): + def __call__(self, cls, req, rset=None, row=None, col=0, accept_none=None, + **kwargs): if not rset and not kwargs.get('entity'): return 0 score = 0 @@ -449,9 +377,11 @@ score = self.score_entity(kwargs['entity']) elif row is None: col = col or 0 + if accept_none is None: + accept_none = self.accept_none for row, rowvalue in enumerate(rset.rows): if rowvalue[col] is None: # outer join - if not self.accept_none: + if not accept_none: return 0 continue escore = self.score(req, rset, row, col) @@ -527,19 +457,42 @@ * `registry`, a registry name - * `regid`, an object identifier in this registry + * `regids`, object identifiers in this registry, one of them should be + selectable. """ - def __init__(self, registry, regid): + selectable_score = 1 + def __init__(self, registry, *regids): self.registry = registry - self.regid = regid + self.regids = regids + + @lltrace + def __call__(self, cls, req, **kwargs): + for regid in self.regids: + try: + req.vreg[self.registry].select(regid, req, **kwargs) + return self.selectable_score + except NoSelectableObject: + return 0 + + +class adaptable(appobject_selectable): + """Return 1 if another appobject is selectable using the same input context. + + Initializer arguments: + + * `regids`, adapter identifiers (e.g. interface names) to which the context + (usually entities) should be adaptable. One of them should be selectable + when multiple identifiers are given. + """ + # implementing an interface takes precedence other special Any interface, + # hence return 2 (implements('Any') score is 1) + selectable_score = 2 + def __init__(self, *regids): + super(adaptable, self).__init__('adapters', *regids) def __call__(self, cls, req, **kwargs): - try: - req.vreg[self.registry].select(self.regid, req, **kwargs) - return 1 - except NoSelectableObject: - return 0 - + kwargs.setdefault('accept_none', False) + return super(adaptable, self).__call__(cls, req, **kwargs) # rset selectors ############################################################## @@ -585,8 +538,8 @@ @objectify_selector @lltrace def one_line_rset(cls, req, rset=None, row=None, **kwargs): - """Return 1 if the result set is of size 1 or if a specific row in the - result set is specified ('row' argument). + """Return 1 if the result set is of size 1, or greater but a specific row in + the result set is specified ('row' argument). """ if rset is not None and (row is not None or rset.rowcount == 1): return 1 @@ -594,7 +547,7 @@ class multi_lines_rset(Selector): - """If `nb`is specified, return 1 if the result set has exactly `nb` row of + """If `nb` is specified, return 1 if the result set has exactly `nb` row of result. Else (`nb` is None), return 1 if the result set contains *at least* two rows. """ @@ -608,11 +561,11 @@ @lltrace def __call__(self, cls, req, rset=None, **kwargs): - return rset is not None and self.match_expected(rset.rowcount) + return int(rset is not None and self.match_expected(rset.rowcount)) class multi_columns_rset(multi_lines_rset): - """If `nb`is specified, return 1 if the result set has exactly `nb` column + """If `nb` is specified, return 1 if the result set has exactly `nb` column per row. Else (`nb` is None), return 1 if the result set contains *at least* two columns per row. Return 0 for empty result set. """ @@ -738,7 +691,12 @@ .. note:: when interface is an entity class, the score will reflect class proximity so the most specific object will be selected. + + .. note:: with cubicweb >= 3.9, you should use adapters instead of + interface, so no interface should be given to this selector. Use + :class:`adaptable` instead. """ + def score_class(self, eclass, req): return self.score_interfaces(req, eclass, eclass) @@ -765,6 +723,26 @@ self.score_entity = intscore +class has_mimetype(EntitySelector): + """Return 1 if the entity adapt to IDownloadable and has the given MIME type. + + You can give 'image/' to match any image for instance, or 'image/png' to match + only PNG images. + """ + def __init__(self, mimetype, once_is_enough=False): + super(has_mimetype, self).__init__(once_is_enough) + self.mimetype = mimetype + + def score_entity(self, entity): + idownloadable = entity.cw_adapt_to('IDownloadable') + if idownloadable is None: + return 0 + mt = idownloadable.download_content_type() + if not (mt and mt.startswith(self.mimetype)): + return 0 + return 1 + + class relation_possible(EntitySelector): """Return 1 for entity that supports the relation, provided that the request's user may do some `action` on it (see below). @@ -1000,7 +978,7 @@ return self.score(req, rset, row, col) def score_entity(self, entity): - if entity.has_perm(self.action): + if entity.cw_has_perm(self.action): return 1 return 0 @@ -1291,21 +1269,26 @@ class is_in_state(score_entity): """return 1 if entity is in one of the states given as argument list - you should use this instead of your own score_entity x: x.state == 'bla' - selector to avoid some gotchas: + you should use this instead of your own :class:`score_entity` selector to + avoid some gotchas: * possible views gives a fake entity with no state - * you must use the latest tr info, not entity.state for repository side + * you must use the latest tr info, not entity.in_state for repository side checking of the current state """ def __init__(self, *states): def score(entity, states=set(states)): + trinfo = entity.cw_adapt_to('IWorkflowable').latest_trinfo() try: - return entity.latest_trinfo().new_state.name in states + return trinfo.new_state.name in states except AttributeError: return None super(is_in_state, self).__init__(score) +@objectify_selector +def debug_mode(cls, req, rset=None, **kwargs): + """Return 1 if running in debug mode""" + return req.vreg.config.debugmode and 1 or 0 ## deprecated stuff ############################################################ diff -r b5640328ffad -r 9db65b381028 server/migractions.py --- a/server/migractions.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/migractions.py Thu Jul 01 17:06:37 2010 +0200 @@ -51,7 +51,8 @@ from yams.schema2sql import eschema2sql, rschema2sql from cubicweb import AuthenticationError -from cubicweb.schema import (META_RTYPES, VIRTUAL_RTYPES, +from cubicweb.schema import (ETYPE_NAME_MAP, META_RTYPES, VIRTUAL_RTYPES, + PURE_VIRTUAL_RTYPES, CubicWebRelationSchema, order_eschemas) from cubicweb.dbapi import get_repository, repo_connect from cubicweb.migration import MigrationHelper, yes @@ -856,9 +857,23 @@ `oldname` is a string giving the name of the existing entity type `newname` is a string giving the name of the renamed entity type """ - self.rqlexec('SET ET name %(newname)s WHERE ET is CWEType, ET name %(oldname)s', - {'newname' : unicode(newname), 'oldname' : oldname}, - ask_confirm=False) + schema = self.repo.schema + if newname in schema: + assert oldname in ETYPE_NAME_MAP, \ + '%s should be mappend to %s in ETYPE_NAME_MAP' % (oldname, newname) + attrs = ','.join([SQL_PREFIX + rschema.type + for rschema in schema[newname].subject_relations() + if (rschema.final or rschema.inlined) + and not rschema in PURE_VIRTUAL_RTYPES]) + self.sqlexec('INSERT INTO %s%s(%s) SELECT %s FROM %s%s' % ( + SQL_PREFIX, newname, attrs, attrs, SQL_PREFIX, oldname)) + # use rql to propagate deletion. XXX we may miss some stuff since + # only the bootstrap schema is set. + self.rqlexec('DELETE CWEType ET WHERE ET name %(n)s', {'n': oldname}) + else: + self.rqlexec('SET ET name %(newname)s WHERE ET is CWEType, ET name %(oldname)s', + {'newname' : unicode(newname), 'oldname' : oldname}, + ask_confirm=False) if commit: self.commit() @@ -1153,10 +1168,10 @@ if commit: self.commit() - @deprecated('[3.5] use entity.fire_transition("transition") or entity.change_state("state")', - stacklevel=3) + @deprecated('[3.5] use iworkflowable.fire_transition("transition") or ' + 'iworkflowable.change_state("state")', stacklevel=3) def cmd_set_state(self, eid, statename, commit=False): - self._cw.entity_from_eid(eid).change_state(statename) + self._cw.entity_from_eid(eid).cw_adapt_to('IWorkflowable').change_state(statename) if commit: self.commit() diff -r b5640328ffad -r 9db65b381028 server/msplanner.py --- a/server/msplanner.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/msplanner.py Thu Jul 01 17:06:37 2010 +0200 @@ -96,7 +96,7 @@ from rql.stmts import Union, Select from rql.nodes import (VariableRef, Comparison, Relation, Constant, Variable, - Not, Exists) + Not, Exists, SortTerm, Function) from cubicweb import server from cubicweb.utils import make_uid @@ -1330,6 +1330,12 @@ orderby.append) if orderby: newroot.set_orderby(orderby) + elif rqlst.orderby: + for sortterm in rqlst.orderby: + if any(f for f in sortterm.iget_nodes(Function) if f.name == 'FTIRANK'): + newnode, oldnode = sortterm.accept(self, newroot, terms) + if newnode is not None: + newroot.add_sort_term(newnode) self.process_selection(newroot, terms, rqlst) elif not newroot.where: # no restrictions have been copied, just select terms and add @@ -1530,12 +1536,38 @@ copy.operator = '=' return copy, node + def visit_function(self, node, newroot, terms): + if node.name == 'FTIRANK': + # FTIRANK is somewhat special... Rank function should be included in + # the same query has the has_text relation, potentially added to + # selection for latter usage + if not self.hasaggrstep and self.final and node not in self.skip: + return self.visit_default(node, newroot, terms) + elif any(s for s in self.sources if s.uri != 'system'): + return None, node + # p = node.parent + # while p is not None and not isinstance(p, SortTerm): + # p = p.parent + # if isinstance(p, SortTerm): + if not self.hasaggrstep and self.final and node in self.skip: + return Constant(self.skip[node], 'Int'), node + # XXX only if not yet selected + newroot.append_selected(node.copy(newroot)) + self.skip[node] = len(newroot.selection) + return None, node + return self.visit_default(node, newroot, terms) + def visit_default(self, node, newroot, terms): subparts, node = self._visit_children(node, newroot, terms) return copy_node(newroot, node, subparts), node - visit_mathexpression = visit_constant = visit_function = visit_default - visit_sort = visit_sortterm = visit_default + visit_mathexpression = visit_constant = visit_default + + def visit_sortterm(self, node, newroot, terms): + subparts, node = self._visit_children(node, newroot, terms) + if not subparts: + return None, node + return copy_node(newroot, node, subparts), node def _visit_children(self, node, newroot, terms): subparts = [] diff -r b5640328ffad -r 9db65b381028 server/mssteps.py --- a/server/mssteps.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/mssteps.py Thu Jul 01 17:06:37 2010 +0200 @@ -140,13 +140,6 @@ def mytest_repr(self): """return a representation of this step suitable for test""" - sel = self.select.selection - restr = self.select.where - self.select.selection = self.selection - self.select.where = None - rql = self.select.as_string(kwargs=self.plan.args) - self.select.selection = sel - self.select.where = restr try: # rely on a monkey patch (cf unittest_querier) table = self.plan.tablesinorder[self.table] @@ -155,12 +148,19 @@ # not monkey patched table = self.table outputtable = self.outputtable - return (self.__class__.__name__, rql, self.limit, self.offset, table, - outputtable) + sql = self.get_sql().replace(self.table, table) + return (self.__class__.__name__, sql, outputtable) def execute(self): """execute this step""" self.execute_children() + sql = self.get_sql() + if self.outputtable: + self.plan.create_temp_table(self.outputtable) + sql = 'INSERT INTO %s %s' % (self.outputtable, sql) + return self.plan.sqlexec(sql, self.plan.args) + + def get_sql(self): self.inputmap = inputmap = self.children[-1].outputmap # get the select clause clause = [] @@ -223,17 +223,15 @@ sql.append('LIMIT %s' % self.limit) if self.offset: sql.append('OFFSET %s' % self.offset) - #print 'DATA', plan.sqlexec('SELECT * FROM %s' % self.table, None) - sql = ' '.join(sql) - if self.outputtable: - self.plan.create_temp_table(self.outputtable) - sql = 'INSERT INTO %s %s' % (self.outputtable, sql) - return self.plan.sqlexec(sql, self.plan.args) + return ' '.join(sql) def visit_function(self, function): """generate SQL name for a function""" - return '%s(%s)' % (function.name, - ','.join(c.accept(self) for c in function.children)) + try: + return self.children[0].outputmap[str(function)] + except KeyError: + return '%s(%s)' % (function.name, + ','.join(c.accept(self) for c in function.children)) def visit_variableref(self, variableref): """get the sql name for a variable reference""" diff -r b5640328ffad -r 9db65b381028 server/querier.py --- a/server/querier.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/querier.py Thu Jul 01 17:06:37 2010 +0200 @@ -17,8 +17,8 @@ # with CubicWeb. If not, see . """Helper classes to execute RQL queries on a set of sources, performing security checking and data aggregation. +""" -""" from __future__ import with_statement __docformat__ = "restructuredtext en" @@ -29,7 +29,7 @@ from logilab.common.compat import any from rql import RQLSyntaxError from rql.stmts import Union, Select -from rql.nodes import Relation, VariableRef, Constant, SubQuery +from rql.nodes import Relation, VariableRef, Constant, SubQuery, Function from cubicweb import Unauthorized, QueryError, UnknownEid, typed_eid from cubicweb import server @@ -50,7 +50,8 @@ key = term.as_string() value = '%s.C%s' % (table, i) if varmap.get(key, value) != value: - raise Exception('variable name conflict on %s' % key) + raise Exception('variable name conflict on %s: got %s / %s' + % (key, value, varmap)) varmap[key] = value # permission utilities ######################################################## @@ -285,7 +286,26 @@ for term in origselection: newselect.append_selected(term.copy(newselect)) if select.orderby: - newselect.set_orderby([s.copy(newselect) for s in select.orderby]) + sortterms = [] + for sortterm in select.orderby: + sortterms.append(sortterm.copy(newselect)) + for fnode in sortterm.get_nodes(Function): + if fnode.name == 'FTIRANK': + # we've to fetch the has_text relation as well + var = fnode.children[0].variable + rel = iter(var.stinfo['ftirels']).next() + assert not rel.ored(), 'unsupported' + newselect.add_restriction(rel.copy(newselect)) + # remove relation from the orig select and + # cleanup variable stinfo + rel.parent.remove(rel) + var.stinfo['ftirels'].remove(rel) + var.stinfo['relations'].remove(rel) + # XXX not properly re-annotated after security insertion? + newvar = newselect.get_variable(var.name) + newvar.stinfo.setdefault('ftirels', set()).add(rel) + newvar.stinfo.setdefault('relations', set()).add(rel) + newselect.set_orderby(sortterms) _expand_selection(select.orderby, selected, aliases, select, newselect) select.orderby = () # XXX dereference? if select.groupby: @@ -562,6 +582,8 @@ # rql parsing / analysing helper self.solutions = repo.vreg.solutions rqlhelper = repo.vreg.rqlhelper + # set backend on the rql helper, will be used for function checking + rqlhelper.backend = repo.config.sources()['system']['db-driver'] self._parse = rqlhelper.parse self._annotate = rqlhelper.annotate # rql planner diff -r b5640328ffad -r 9db65b381028 server/repository.py --- a/server/repository.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/repository.py Thu Jul 01 17:06:37 2010 +0200 @@ -104,10 +104,10 @@ XXX protect pyro access """ - def __init__(self, config, vreg=None, debug=False): + def __init__(self, config, vreg=None): self.config = config if vreg is None: - vreg = cwvreg.CubicWebVRegistry(config, debug) + vreg = cwvreg.CubicWebVRegistry(config) self.vreg = vreg self.pyro_registered = False self.info('starting repository from %s', self.config.apphome) @@ -152,13 +152,6 @@ if not isinstance(session.user, InternalManager): session.user.__class__ = usercls - def _bootstrap_hook_registry(self): - """called during bootstrap since we need the metadata hooks""" - hooksdirectory = join(CW_SOFTWARE_ROOT, 'hooks') - self.vreg.init_registration([hooksdirectory]) - self.vreg.load_file(join(hooksdirectory, 'metadata.py'), - 'cubicweb.hooks.metadata') - def open_connections_pools(self): config = self.config self._available_pools = Queue.Queue() @@ -184,7 +177,9 @@ for modname in ('__init__', 'authobjs', 'wfobjs'): self.vreg.load_file(join(etdirectory, '%s.py' % modname), 'cubicweb.entities.%s' % modname) - self._bootstrap_hook_registry() + hooksdirectory = join(CW_SOFTWARE_ROOT, 'hooks') + self.vreg.load_file(join(hooksdirectory, 'metadata.py'), + 'cubicweb.hooks.metadata') elif config.read_instance_schema: # normal start: load the instance schema from the database self.fill_schema() @@ -233,8 +228,7 @@ if resetvreg: if self.config._cubes is None: self.config.init_cubes(self.get_cubes()) - # full reload of all appobjects - self.vreg.reset() + # trigger full reload of all appobjects self.vreg.set_schema(schema) else: self.vreg._set_schema(schema) @@ -391,7 +385,7 @@ raise AuthenticationError('authentication failed with all sources') cwuser = self._build_user(session, eid) if self.config.consider_user_state and \ - not cwuser.state in cwuser.AUTHENTICABLE_STATES: + not cwuser.cw_adapt_to('IWorkflowable').state in cwuser.AUTHENTICABLE_STATES: raise AuthenticationError('user is not in authenticable state') return cwuser @@ -572,7 +566,7 @@ session.close() session = Session(user, self, cnxprops) user._cw = user.cw_rset.req = session - user.clear_related_cache() + user.cw_clear_relation_cache() self._sessions[session.id] = session self.info('opened session %s for user %s', session.id, login) self.hm.call_hooks('session_open', session) @@ -931,7 +925,7 @@ self._extid_cache[cachekey] = eid self._type_source_cache[eid] = (etype, source.uri, extid) entity = source.before_entity_insertion(session, extid, etype, eid) - entity.edited_attributes = set(entity) + entity.edited_attributes = set(entity.cw_attr_cache) if source.should_call_hooks: self.hm.call_hooks('before_add_entity', session, entity=entity) # XXX call add_info with complete=False ? @@ -1041,37 +1035,32 @@ the entity instance """ # init edited_attributes before calling before_add_entity hooks - entity._is_saved = False # entity has an eid but is not yet saved - entity.edited_attributes = set(entity) - entity_ = entity.pre_add_hook() - # XXX kill that transmutation feature ! - if not entity_ is entity: - entity.__class__ = entity_.__class__ - entity.__dict__.update(entity_.__dict__) + entity._cw_is_saved = False # entity has an eid but is not yet saved + entity.edited_attributes = set(entity.cw_attr_cache) # XXX cw_edited_attributes eschema = entity.e_schema source = self.locate_etype_source(entity.__regid__) # allocate an eid to the entity before calling hooks - entity.set_eid(self.system_source.create_eid(session)) + entity.eid = self.system_source.create_eid(session) # set caches asap extid = self.init_entity_caches(session, entity, source) if server.DEBUG & server.DBG_REPO: - print 'ADD entity', entity.__regid__, entity.eid, dict(entity) + print 'ADD entity', self, entity.__regid__, entity.eid, entity.cw_attr_cache relations = [] if source.should_call_hooks: self.hm.call_hooks('before_add_entity', session, entity=entity) # XXX use entity.keys here since edited_attributes is not updated for # inline relations XXX not true, right? (see edited_attributes # affectation above) - for attr in entity.iterkeys(): + for attr in entity.cw_attr_cache.iterkeys(): rschema = eschema.subjrels[attr] if not rschema.final: # inlined relation relations.append((attr, entity[attr])) - entity.set_defaults() + entity._cw_set_defaults() if session.is_hook_category_activated('integrity'): - entity.check(creation=True) + entity._cw_check(creation=True) source.add_entity(session, entity) self.add_info(session, entity, source, extid, complete=False) - entity._is_saved = True # entity has an eid and is saved + entity._cw_is_saved = True # entity has an eid and is saved # prefill entity relation caches for rschema in eschema.subject_relations(): rtype = str(rschema) @@ -1080,12 +1069,13 @@ if rschema.final: entity.setdefault(rtype, None) else: - entity.set_related_cache(rtype, 'subject', session.empty_rset()) + entity.cw_set_relation_cache(rtype, 'subject', + session.empty_rset()) for rschema in eschema.object_relations(): rtype = str(rschema) if rtype in schema.VIRTUAL_RTYPES: continue - entity.set_related_cache(rtype, 'object', session.empty_rset()) + entity.cw_set_relation_cache(rtype, 'object', session.empty_rset()) # set inline relation cache before call to after_add_entity for attr, value in relations: session.update_rel_cache_add(entity.eid, attr, value) @@ -1106,7 +1096,7 @@ """ if server.DEBUG & server.DBG_REPO: print 'UPDATE entity', entity.__regid__, entity.eid, \ - dict(entity), edited_attributes + entity.cw_attr_cache, edited_attributes hm = self.hm eschema = entity.e_schema session.set_entity_cache(entity) @@ -1144,7 +1134,7 @@ if not only_inline_rels: hm.call_hooks('before_update_entity', session, entity=entity) if session.is_hook_category_activated('integrity'): - entity.check() + entity._cw_check() source.update_entity(session, entity) self.system_source.update_info(session, entity, need_fti_update) if source.should_call_hooks: @@ -1152,7 +1142,7 @@ hm.call_hooks('after_update_entity', session, entity=entity) for attr, value, prevvalue in relations: # if the relation is already cached, update existant cache - relcache = entity.relation_cached(attr, 'subject') + relcache = entity.cw_relation_cached(attr, 'subject') if prevvalue is not None: hm.call_hooks('after_delete_relation', session, eidfrom=entity.eid, rtype=attr, eidto=prevvalue) @@ -1162,8 +1152,8 @@ if relcache is not None: session.update_rel_cache_add(entity.eid, attr, value) else: - entity.set_related_cache(attr, 'subject', - session.eid_rset(value)) + entity.cw_set_relation_cache(attr, 'subject', + session.eid_rset(value)) hm.call_hooks('after_add_relation', session, eidfrom=entity.eid, rtype=attr, eidto=value) finally: diff -r b5640328ffad -r 9db65b381028 server/schemaserial.py --- a/server/schemaserial.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/schemaserial.py Thu Jul 01 17:06:37 2010 +0200 @@ -27,7 +27,9 @@ from yams import schema as schemamod, buildobjs as ybo -from cubicweb.schema import CONSTRAINTS, ETYPE_NAME_MAP, VIRTUAL_RTYPES +from cubicweb import CW_SOFTWARE_ROOT +from cubicweb.schema import (CONSTRAINTS, ETYPE_NAME_MAP, + VIRTUAL_RTYPES, PURE_VIRTUAL_RTYPES) from cubicweb.server import sqlutils def group_mapping(cursor, interactive=True): @@ -100,17 +102,28 @@ sidx[eid] = eschema continue if etype in ETYPE_NAME_MAP: + needcopy = False netype = ETYPE_NAME_MAP[etype] # can't use write rql queries at this point, use raw sql - session.system_sql('UPDATE %(p)sCWEType SET %(p)sname=%%(n)s WHERE %(p)seid=%%(x)s' - % {'p': sqlutils.SQL_PREFIX}, - {'x': eid, 'n': netype}) - session.system_sql('UPDATE entities SET type=%(n)s WHERE type=%(x)s', - {'x': etype, 'n': netype}) + sqlexec = session.system_sql + if sqlexec('SELECT 1 FROM %(p)sCWEType WHERE %(p)sname=%%(n)s' + % {'p': sqlutils.SQL_PREFIX}, {'n': netype}).fetchone(): + # the new type already exists, we should merge + assert etype.lower() != netype.lower() + needcopy = True + else: + # the new type doesn't exist, we should rename + sqlexec('UPDATE %(p)sCWEType SET %(p)sname=%%(n)s WHERE %(p)seid=%%(x)s' + % {'p': sqlutils.SQL_PREFIX}, {'x': eid, 'n': netype}) + if etype.lower() != netype.lower(): + sqlexec('ALTER TABLE %s%s RENAME TO %s%s' % ( + sqlutils.SQL_PREFIX, etype, sqlutils.SQL_PREFIX, netype)) + sqlexec('UPDATE entities SET type=%(n)s WHERE type=%(x)s', + {'x': etype, 'n': netype}) session.commit(False) try: - session.system_sql('UPDATE deleted_entities SET type=%(n)s WHERE type=%(x)s', - {'x': etype, 'n': netype}) + sqlexec('UPDATE deleted_entities SET type=%(n)s WHERE type=%(x)s', + {'x': etype, 'n': netype}) except: pass tocleanup = [eid] @@ -118,6 +131,12 @@ if etype == eidetype) repo.clear_caches(tocleanup) session.commit(False) + if needcopy: + from logilab.common.testlib import mock_object + sidx[eid] = mock_object(type=netype) + # copy / CWEType entity removal expected to be done through + # rename_entity_type in a migration script + continue etype = netype etype = ybo.EntityType(name=etype, description=desc, eid=eid) eschema = schema.add_entity_type(etype) diff -r b5640328ffad -r 9db65b381028 server/server.py --- a/server/server.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/server.py Thu Jul 01 17:06:37 2010 +0200 @@ -74,10 +74,10 @@ class RepositoryServer(object): - def __init__(self, config, debug=False): + def __init__(self, config): """make the repository available as a PyRO object""" self.config = config - self.repo = Repository(config, debug=debug) + self.repo = Repository(config) self.ns = None self.quiting = None # event queue diff -r b5640328ffad -r 9db65b381028 server/serverctl.py --- a/server/serverctl.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/serverctl.py Thu Jul 01 17:06:37 2010 +0200 @@ -15,9 +15,10 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""cubicweb-ctl commands and command handlers specific to the server.serverconfig +"""cubicweb-ctl commands and command handlers specific to the +server.serverconfig +""" -""" __docformat__ = 'restructuredtext en' # *ctl module should limit the number of import to be imported as quickly as @@ -48,14 +49,16 @@ if dbname is None: dbname = source['db-name'] driver = source['db-driver'] - print '-> connecting to %s database' % driver, - if dbhost: - print '%s@%s' % (dbname, dbhost), - else: - print dbname, + if verbose: + print '-> connecting to %s database' % driver, + if dbhost: + print '%s@%s' % (dbname, dbhost), + else: + print dbname, if not verbose or (not special_privs and source.get('db-user')): user = source['db-user'] - print 'as', user + if verbose: + print 'as', user if source.get('db-password'): password = source['db-password'] else: @@ -249,11 +252,12 @@ cmdname = 'start' cfgname = 'repository' - def start_server(self, ctlconf, debug): + def start_server(self, config): command = ['cubicweb-ctl start-repository '] - if debug: + if config.debugmode: command.append('--debug') - command.append(self.config.appid) + command.append('--loglevel %s' % config['log-threshold']) + command.append(config.appid) os.system(' '.join(command)) @@ -262,8 +266,7 @@ cfgname = 'repository' def poststop(self): - """if pyro is enabled, ensure the repository is correctly - unregistered + """if pyro is enabled, ensure the repository is correctly unregistered """ if self.config.pyro_enabled(): from cubicweb.server.repository import pyro_unregister @@ -272,6 +275,14 @@ # repository specific commands ################################################ +def createdb(helper, source, dbcnx, cursor, **kwargs): + if dbcnx.logged_user != source['db-user']: + helper.create_database(cursor, source['db-name'], source['db-user'], + source['db-encoding'], **kwargs) + else: + helper.create_database(cursor, source['db-name'], + dbencoding=source['db-encoding'], **kwargs) + class CreateInstanceDBCommand(Command): """Create the system database of an instance (run after 'create'). @@ -314,14 +325,13 @@ source = config.sources()['system'] dbname = source['db-name'] driver = source['db-driver'] - create_db = self.config.create_db helper = get_db_helper(driver) if driver == 'sqlite': if os.path.exists(dbname) and ( automatic or ASK.confirm('Database %s already exists. Drop it?' % dbname)): os.unlink(dbname) - elif create_db: + elif self.config.create_db: print '\n'+underline_title('Creating the system database') # connect on the dbms system base to create our base dbcnx = _db_sys_cnx(source, 'CREATE DATABASE and / or USER', verbose=verbose) @@ -338,12 +348,7 @@ cursor.execute('DROP DATABASE %s' % dbname) else: return - if dbcnx.logged_user != source['db-user']: - helper.create_database(cursor, dbname, source['db-user'], - source['db-encoding']) - else: - helper.create_database(cursor, dbname, - dbencoding=source['db-encoding']) + createdb(helper, source, dbcnx, cursor) dbcnx.commit() print '-> database %s created.' % dbname except: @@ -523,22 +528,28 @@ ('debug', {'short': 'D', 'action' : 'store_true', 'help': 'start server in debug mode.'}), + ('loglevel', + {'short': 'l', 'type' : 'choice', 'metavar': '', + 'default': None, 'choices': ('debug', 'info', 'warning', 'error'), + 'help': 'debug if -D is set, error otherwise', + }), ) def run(self, args): from logilab.common.daemon import daemonize + from cubicweb.cwctl import init_cmdline_log_threshold from cubicweb.server.server import RepositoryServer appid = pop_arg(args, msg='No instance specified !') - config = ServerConfiguration.config_for(appid) - if sys.platform == 'win32': - if not self.config.debug: - from logging import getLogger - logger = getLogger('cubicweb.ctl') - logger.info('Forcing debug mode on win32 platform') - self.config.debug = True - debug = self.config.debug + debug = self['debug'] + if sys.platform == 'win32' and not debug: + from logging import getLogger + logger = getLogger('cubicweb.ctl') + logger.info('Forcing debug mode on win32 platform') + debug = True + config = ServerConfiguration.config_for(appid, debugmode=debug) + init_cmdline_log_threshold(config, self['loglevel']) # create the server - server = RepositoryServer(config, debug) + server = RepositoryServer(config) # ensure the directory where the pid-file should be set exists (for # instance /var/run/cubicweb may be deleted on computer restart) pidfile = config['pid-file'] diff -r b5640328ffad -r 9db65b381028 server/session.py --- a/server/session.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/session.py Thu Jul 01 17:06:37 2010 +0200 @@ -250,7 +250,7 @@ entity = self.entity_cache(eid) except KeyError: return - rcache = entity.relation_cached(rtype, role) + rcache = entity.cw_relation_cached(rtype, role) if rcache is not None: rset, entities = rcache rset = rset.copy() @@ -266,14 +266,15 @@ targetentity.cw_col = 0 rset.rowcount += 1 entities.append(targetentity) - entity._related_cache['%s_%s' % (rtype, role)] = (rset, tuple(entities)) + entity._cw_related_cache['%s_%s' % (rtype, role)] = ( + rset, tuple(entities)) def _update_entity_rel_cache_del(self, eid, rtype, role, targeteid): try: entity = self.entity_cache(eid) except KeyError: return - rcache = entity.relation_cached(rtype, role) + rcache = entity.cw_relation_cached(rtype, role) if rcache is not None: rset, entities = rcache for idx, row in enumerate(rset.rows): @@ -292,7 +293,8 @@ del rset.description[idx] del entities[idx] rset.rowcount -= 1 - entity._related_cache['%s_%s' % (rtype, role)] = (rset, tuple(entities)) + entity._cw_related_cache['%s_%s' % (rtype, role)] = ( + rset, tuple(entities)) # resource accessors ###################################################### @@ -312,16 +314,15 @@ def set_language(self, language): """i18n configuration for translation""" - vreg = self.vreg language = language or self.user.property_value('ui.language') try: - gettext, pgettext = vreg.config.translations[language] + gettext, pgettext = self.vreg.config.translations[language] self._ = self.__ = gettext self.pgettext = pgettext except KeyError: - language = vreg.property_value('ui.language') + language = self.vreg.property_value('ui.language') try: - gettext, pgettext = vreg.config.translations[language] + gettext, pgettext = self.vreg.config.translations[language] self._ = self.__ = gettext self.pgettext = pgettext except KeyError: @@ -661,16 +662,6 @@ else: del self.transaction_data['ecache'][eid] - def base_url(self): - url = self.repo.config['base-url'] - if not url: - try: - url = self.repo.config.default_base_url() - except AttributeError: # default_base_url() might not be available - self.warning('missing base-url definition in server config') - url = u'' - return url - def from_controller(self): """return the id (string) of the controller issuing the request (no sense here, always return 'view') diff -r b5640328ffad -r 9db65b381028 server/sources/__init__.py --- a/server/sources/__init__.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/sources/__init__.py Thu Jul 01 17:06:37 2010 +0200 @@ -342,7 +342,7 @@ entity. """ entity = self.repo.vreg['etypes'].etype_class(etype)(session) - entity.set_eid(eid) + entity.eid = eid return entity def after_entity_insertion(self, session, lid, entity): diff -r b5640328ffad -r 9db65b381028 server/sources/native.py --- a/server/sources/native.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/sources/native.py Thu Jul 01 17:06:37 2010 +0200 @@ -1075,10 +1075,10 @@ entity[rtype] = unicode(value, session.encoding, 'replace') else: entity[rtype] = value - entity.set_eid(eid) + entity.eid = eid session.repo.init_entity_caches(session, entity, self) entity.edited_attributes = set(entity) - entity.check() + entity._cw_check() self.repo.hm.call_hooks('before_add_entity', session, entity=entity) # restore the entity action.changes['cw_eid'] = eid @@ -1145,7 +1145,7 @@ return [session._( "Can't undo creation of entity %(eid)s of type %(etype)s, type " "no more supported" % {'eid': eid, 'etype': etype})] - entity.set_eid(eid) + entity.eid = eid # for proper eid/type cache update hook.set_operation(session, 'pendingeids', eid, CleanupDeletedEidsCacheOp) @@ -1233,7 +1233,8 @@ try: # use cursor_index_object, not cursor_reindex_object since # unindexing done in the FTIndexEntityOp - self.dbhelper.cursor_index_object(entity.eid, entity, + self.dbhelper.cursor_index_object(entity.eid, + entity.cw_adapt_to('IFTIndexable'), session.pool['system']) except Exception: # let KeyboardInterrupt / SystemExit propagate self.exception('error while reindexing %s', entity) @@ -1258,7 +1259,8 @@ # processed return done.add(eid) - for container in session.entity_from_eid(eid).fti_containers(): + iftindexable = session.entity_from_eid(eid).cw_adapt_to('IFTIndexable') + for container in iftindexable.fti_containers(): source.fti_unindex_entity(session, container.eid) source.fti_index_entity(session, container) diff -r b5640328ffad -r 9db65b381028 server/sources/rql2sql.py --- a/server/sources/rql2sql.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/sources/rql2sql.py Thu Jul 01 17:06:37 2010 +0200 @@ -612,12 +612,14 @@ sql += '\nHAVING %s' % having # sort if sorts: - sql += '\nORDER BY %s' % ','.join(self._sortterm_sql(sortterm, - fselectidx) - for sortterm in sorts) - if fneedwrap: - selection = ['T1.C%s' % i for i in xrange(len(origselection))] - sql = 'SELECT %s FROM (%s) AS T1' % (','.join(selection), sql) + sqlsortterms = [self._sortterm_sql(sortterm, fselectidx) + for sortterm in sorts] + sqlsortterms = [x for x in sqlsortterms if x is not None] + if sqlsortterms: + sql += '\nORDER BY %s' % ','.join(sqlsortterms) + if sorts and fneedwrap: + selection = ['T1.C%s' % i for i in xrange(len(origselection))] + sql = 'SELECT %s FROM (%s) AS T1' % (','.join(selection), sql) state.finalize_source_cbs() finally: select.selection = origselection @@ -697,12 +699,14 @@ def _sortterm_sql(self, sortterm, selectidx): term = sortterm.term try: - sqlterm = str(selectidx.index(str(term)) + 1) + sqlterm = selectidx.index(str(term)) + 1 except ValueError: # Constant node or non selected term - sqlterm = str(term.accept(self)) + sqlterm = term.accept(self) + if sqlterm is None: + return None if sortterm.asc: - return sqlterm + return str(sqlterm) else: return '%s DESC' % sqlterm @@ -1061,7 +1065,8 @@ not_ = True else: not_ = False - return self.dbhelper.fti_restriction_sql(alias, const.eval(self._args), + query = const.eval(self._args) + return self.dbhelper.fti_restriction_sql(alias, query, jointo, not_) + restriction def visit_comparison(self, cmp): @@ -1105,6 +1110,15 @@ def visit_function(self, func): """generate SQL name for a function""" + if func.name == 'FTIRANK': + try: + rel = iter(func.children[0].variable.stinfo['ftirels']).next() + except KeyError: + raise BadRQLQuery("can't use FTIRANK on variable not used in an" + " 'has_text' relation (eg full-text search)") + const = rel.get_parts()[1].children[0] + return self.dbhelper.fti_rank_order(self._fti_table(rel), + const.eval(self._args)) args = [c.accept(self) for c in func.children] if func in self._state.source_cb_funcs: # function executed as a callback on the source @@ -1133,8 +1147,6 @@ _id = _id.encode() else: _id = str(id(constant)).replace('-', '', 1) - if isinstance(value, unicode): - value = value.encode(self.dbencoding) self._query_attrs[_id] = value return '%%(%s)s' % _id diff -r b5640328ffad -r 9db65b381028 server/sources/storages.py --- a/server/sources/storages.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/sources/storages.py Thu Jul 01 17:06:37 2010 +0200 @@ -152,7 +152,7 @@ # PIL processing that use filename extension to detect content-type, as # well as providing more understandable file names on the fs. basename = [str(entity.eid), attr] - name = entity.attr_metadata(attr, 'name') + name = entity.cw_attr_metadata(attr, 'name') if name is not None: basename.append(name.encode(self.fsencoding)) fspath = uniquify_path(self.default_directory, '_'.join(basename)) diff -r b5640328ffad -r 9db65b381028 server/sqlutils.py --- a/server/sqlutils.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/sqlutils.py Thu Jul 01 17:06:37 2010 +0200 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""SQL utilities functions and classes. +"""SQL utilities functions and classes.""" -""" __docformat__ = "restructuredtext en" import os diff -r b5640328ffad -r 9db65b381028 server/ssplanner.py --- a/server/ssplanner.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/ssplanner.py Thu Jul 01 17:06:37 2010 +0200 @@ -15,15 +15,12 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""plan execution of rql queries on a single source +"""plan execution of rql queries on a single source""" -""" from __future__ import with_statement __docformat__ = "restructuredtext en" -from copy import copy - from rql.stmts import Union, Select from rql.nodes import Constant, Relation @@ -479,7 +476,7 @@ result = [[]] for row in result: # get a new entity definition for this row - edef = copy(base_edef) + edef = base_edef.cw_copy() # complete this entity def using row values index = 0 for rtype, rorder, value in self.rdefs: @@ -487,7 +484,7 @@ value = row[index] index += 1 if rorder == InsertRelationsStep.FINAL: - edef.rql_set_value(rtype, value) + edef._cw_rql_set_value(rtype, value) elif rorder == InsertRelationsStep.RELATION: self.plan.add_relation_def( (edef, rtype, value) ) edef.querier_pending_relations[(rtype, 'subject')] = value @@ -584,7 +581,7 @@ edef = edefs[eid] except KeyError: edefs[eid] = edef = session.entity_from_eid(eid) - edef.rql_set_value(str(rschema), rhsval) + edef._cw_rql_set_value(str(rschema), rhsval) else: repo.glob_add_relation(session, lhsval, str(rschema), rhsval) result[i] = newrow diff -r b5640328ffad -r 9db65b381028 server/test/data/migratedapp/schema.py --- a/server/test/data/migratedapp/schema.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/test/data/migratedapp/schema.py Thu Jul 01 17:06:37 2010 +0200 @@ -69,7 +69,7 @@ mydate = Date(default='TODAY') shortpara = String(maxsize=64) ecrit_par = SubjectRelation('Personne', constraints=[RQLConstraint('S concerne A, O concerne A')]) - attachment = SubjectRelation(('File', 'Image')) + attachment = SubjectRelation('File') class Text(Para): __specializes_schema__ = True diff -r b5640328ffad -r 9db65b381028 server/test/data/schema.py --- a/server/test/data/schema.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/test/data/schema.py Thu Jul 01 17:06:37 2010 +0200 @@ -92,7 +92,7 @@ }) migrated_from = SubjectRelation('Note') - attachment = SubjectRelation(('File', 'Image')) + attachment = SubjectRelation('File') inline1 = SubjectRelation('Affaire', inlined=True, cardinality='?*') todo_by = SubjectRelation('CWUser') diff -r b5640328ffad -r 9db65b381028 server/test/data/site_cubicweb.py --- a/server/test/data/site_cubicweb.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/test/data/site_cubicweb.py Thu Jul 01 17:06:37 2010 +0200 @@ -15,9 +15,6 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -""" - -""" from logilab.database import FunctionDescr from logilab.database.sqlite import register_sqlite_pyfunc @@ -25,7 +22,7 @@ try: class DUMB_SORT(FunctionDescr): - supported_backends = ('sqlite',) + pass register_function(DUMB_SORT) def dumb_sort(something): diff -r b5640328ffad -r 9db65b381028 server/test/data/sources_fti --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/server/test/data/sources_fti Thu Jul 01 17:06:37 2010 +0200 @@ -0,0 +1,14 @@ +[system] + +db-driver = postgres +db-host = localhost +db-port = +adapter = native +db-name = cw_fti_test +db-encoding = UTF-8 +db-user = syt +db-password = syt + +[admin] +login = admin +password = gingkow diff -r b5640328ffad -r 9db65b381028 server/test/unittest_fti.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/server/test/unittest_fti.py Thu Jul 01 17:06:37 2010 +0200 @@ -0,0 +1,52 @@ +from __future__ import with_statement + +from cubicweb.devtools import ApptestConfiguration +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.selectors import implements +from cubicweb.entities.adapters import IFTIndexableAdapter + +class PostgresFTITC(CubicWebTC): + config = ApptestConfiguration('data', sourcefile='sources_fti') + + def test_occurence_count(self): + req = self.request() + c1 = req.create_entity('Card', title=u'c1', + content=u'cubicweb cubicweb cubicweb') + c2 = req.create_entity('Card', title=u'c3', + content=u'cubicweb') + c3 = req.create_entity('Card', title=u'c2', + content=u'cubicweb cubicweb') + self.commit() + self.assertEquals(req.execute('Card X ORDERBY FTIRANK(X) DESC WHERE X has_text "cubicweb"').rows, + [[c1.eid], [c3.eid], [c2.eid]]) + + + def test_attr_weight(self): + class CardIFTIndexableAdapter(IFTIndexableAdapter): + __select__ = implements('Card') + attr_weight = {'title': 'A'} + with self.temporary_appobjects(CardIFTIndexableAdapter): + req = self.request() + c1 = req.create_entity('Card', title=u'c1', + content=u'cubicweb cubicweb cubicweb') + c2 = req.create_entity('Card', title=u'c2', + content=u'cubicweb cubicweb') + c3 = req.create_entity('Card', title=u'cubicweb', + content=u'autre chose') + self.commit() + self.assertEquals(req.execute('Card X ORDERBY FTIRANK(X) DESC WHERE X has_text "cubicweb"').rows, + [[c3.eid], [c1.eid], [c2.eid]]) + + + def test_entity_weight(self): + class PersonneIFTIndexableAdapter(IFTIndexableAdapter): + __select__ = implements('Personne') + entity_weight = 2.0 + with self.temporary_appobjects(PersonneIFTIndexableAdapter): + req = self.request() + c1 = req.create_entity('Personne', nom=u'c1', prenom=u'cubicweb') + c2 = req.create_entity('Comment', content=u'cubicweb cubicweb', comments=c1) + c3 = req.create_entity('Comment', content=u'cubicweb cubicweb cubicweb', comments=c1) + self.commit() + self.assertEquals(req.execute('Any X ORDERBY FTIRANK(X) DESC WHERE X has_text "cubicweb"').rows, + [[c1.eid], [c3.eid], [c2.eid]]) diff -r b5640328ffad -r 9db65b381028 server/test/unittest_ldapuser.py --- a/server/test/unittest_ldapuser.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/test/unittest_ldapuser.py Thu Jul 01 17:06:37 2010 +0200 @@ -178,12 +178,13 @@ cnx = self.login(SYT, password='dummypassword') cu = cnx.cursor() adim = cu.execute('CWUser X WHERE X login %(login)s', {'login': ADIM}).get_entity(0, 0) - adim.fire_transition('deactivate') + iworkflowable = adim.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') try: cnx.commit() adim.clear_all_caches() self.assertEquals(adim.in_state[0].name, 'deactivated') - trinfo = adim.latest_trinfo() + trinfo = iworkflowable.latest_trinfo() self.assertEquals(trinfo.owned_by[0].login, SYT) # select from_state to skip the user's creation TrInfo rset = self.sexecute('Any U ORDERBY D DESC WHERE WF wf_info_for X,' @@ -195,7 +196,7 @@ # restore db state self.restore_connection() adim = self.sexecute('CWUser X WHERE X login %(login)s', {'login': ADIM}).get_entity(0, 0) - adim.fire_transition('activate') + adim.cw_adapt_to('IWorkflowable').fire_transition('activate') self.sexecute('DELETE X in_group G WHERE X login %(syt)s, G name "managers"', {'syt': SYT}) def test_same_column_names(self): diff -r b5640328ffad -r 9db65b381028 server/test/unittest_migractions.py --- a/server/test/unittest_migractions.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/test/unittest_migractions.py Thu Jul 01 17:06:37 2010 +0200 @@ -425,7 +425,7 @@ self.failIf(self.config.cube_dir('email') in self.config.cubes_path()) self.failIf('file' in self.config.cubes()) self.failIf(self.config.cube_dir('file') in self.config.cubes_path()) - for ertype in ('Email', 'EmailThread', 'EmailPart', 'File', 'Image', + for ertype in ('Email', 'EmailThread', 'EmailPart', 'File', 'sender', 'in_thread', 'reply_to', 'data_format'): self.failIf(ertype in schema, ertype) self.assertEquals(sorted(schema['see_also'].rdefs.keys()), @@ -448,7 +448,7 @@ self.failUnless(self.config.cube_dir('email') in self.config.cubes_path()) self.failUnless('file' in self.config.cubes()) self.failUnless(self.config.cube_dir('file') in self.config.cubes_path()) - for ertype in ('Email', 'EmailThread', 'EmailPart', 'File', 'Image', + for ertype in ('Email', 'EmailThread', 'EmailPart', 'File', 'sender', 'in_thread', 'reply_to', 'data_format'): self.failUnless(ertype in schema, ertype) self.assertEquals(sorted(schema['see_also'].rdefs.keys()), diff -r b5640328ffad -r 9db65b381028 server/test/unittest_msplanner.py --- a/server/test/unittest_msplanner.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/test/unittest_msplanner.py Thu Jul 01 17:06:37 2010 +0200 @@ -60,7 +60,7 @@ {'X': 'Card'}, {'X': 'Comment'}, {'X': 'Division'}, {'X': 'Email'}, {'X': 'EmailAddress'}, {'X': 'EmailPart'}, {'X': 'EmailThread'}, {'X': 'ExternalUri'}, {'X': 'File'}, - {'X': 'Folder'}, {'X': 'Image'}, {'X': 'Note'}, + {'X': 'Folder'}, {'X': 'Note'}, {'X': 'Personne'}, {'X': 'RQLExpression'}, {'X': 'Societe'}, {'X': 'State'}, {'X': 'SubDivision'}, {'X': 'SubWorkflowExitPoint'}, {'X': 'Tag'}, {'X': 'TrInfo'}, {'X': 'Transition'}, @@ -413,7 +413,7 @@ """retrieve CWUser X from both sources and return concatenation of results """ self._test('CWUser X ORDERBY X LIMIT 10 OFFSET 10', - [('AggrStep', 'Any X ORDERBY X', 10, 10, 'table0', None, [ + [('AggrStep', 'SELECT table0.C0 FROM table0 ORDER BY table0.C0 LIMIT 10 OFFSET 10', None, [ ('FetchStep', [('Any X WHERE X is CWUser', [{'X': 'CWUser'}])], [self.ldap, self.system], {}, {'X': 'table0.C0'}, []), ]), @@ -423,7 +423,7 @@ """ # COUNT(X) is kept in sub-step and transformed into SUM(X) in the AggrStep self._test('Any COUNT(X) WHERE X is CWUser', - [('AggrStep', 'Any COUNT(X)', None, None, 'table0', None, [ + [('AggrStep', 'SELECT SUM(table0.C0) FROM table0', None, [ ('FetchStep', [('Any COUNT(X) WHERE X is CWUser', [{'X': 'CWUser'}])], [self.ldap, self.system], {}, {'COUNT(X)': 'table0.C0'}, []), ]), @@ -498,7 +498,7 @@ def test_complex_ordered(self): self._test('Any L ORDERBY L WHERE X login L', - [('AggrStep', 'Any L ORDERBY L', None, None, 'table0', None, + [('AggrStep', 'SELECT table0.C0 FROM table0 ORDER BY table0.C0', None, [('FetchStep', [('Any L WHERE X login L, X is CWUser', [{'X': 'CWUser', 'L': 'String'}])], [self.ldap, self.system], {}, {'X.login': 'table0.C0', 'L': 'table0.C0'}, []), @@ -507,7 +507,7 @@ def test_complex_ordered_limit_offset(self): self._test('Any L ORDERBY L LIMIT 10 OFFSET 10 WHERE X login L', - [('AggrStep', 'Any L ORDERBY L', 10, 10, 'table0', None, + [('AggrStep', 'SELECT table0.C0 FROM table0 ORDER BY table0.C0 LIMIT 10 OFFSET 10', None, [('FetchStep', [('Any L WHERE X login L, X is CWUser', [{'X': 'CWUser', 'L': 'String'}])], [self.ldap, self.system], {}, {'X.login': 'table0.C0', 'L': 'table0.C0'}, []), @@ -593,7 +593,7 @@ 2. return content of the table sorted """ self._test('Any X,F ORDERBY F WHERE X firstname F', - [('AggrStep', 'Any X,F ORDERBY F', None, None, 'table0', None, + [('AggrStep', 'SELECT table0.C0, table0.C1 FROM table0 ORDER BY table0.C1', None, [('FetchStep', [('Any X,F WHERE X firstname F, X is CWUser', [{'X': 'CWUser', 'F': 'String'}])], [self.ldap, self.system], {}, @@ -657,7 +657,7 @@ def test_complex_typed_aggregat(self): self._test('Any MAX(X) WHERE X is Card', - [('AggrStep', 'Any MAX(X)', None, None, 'table0', None, + [('AggrStep', 'SELECT MAX(table0.C0) FROM table0', None, [('FetchStep', [('Any MAX(X) WHERE X is Card', [{'X': 'Card'}])], [self.cards, self.system], {}, {'MAX(X)': 'table0.C0'}, []) @@ -784,10 +784,10 @@ [{'X': 'Basket'}]), ('Any X WHERE X has_text "bla", EXISTS(X owned_by 5), X is CWUser', [{'X': 'CWUser'}]), - ('Any X WHERE X has_text "bla", X is IN(Card, Comment, Division, Email, EmailThread, File, Folder, Image, Note, Personne, Societe, SubDivision, Tag)', + ('Any X WHERE X has_text "bla", X is IN(Card, Comment, Division, Email, EmailThread, File, Folder, Note, Personne, Societe, SubDivision, Tag)', [{'X': 'Card'}, {'X': 'Comment'}, {'X': 'Division'}, {'X': 'Email'}, {'X': 'EmailThread'}, - {'X': 'File'}, {'X': 'Folder'}, {'X': 'Image'}, + {'X': 'File'}, {'X': 'Folder'}, {'X': 'Note'}, {'X': 'Personne'}, {'X': 'Societe'}, {'X': 'SubDivision'}, {'X': 'Tag'}]),], None, None, [self.system], {}, []), @@ -810,10 +810,10 @@ [{'X': 'Basket'}]), ('Any X WHERE X has_text "bla", EXISTS(X owned_by 5), X is CWUser', [{'X': 'CWUser'}]), - ('Any X WHERE X has_text "bla", X is IN(Card, Comment, Division, Email, EmailThread, File, Folder, Image, Note, Personne, Societe, SubDivision, Tag)', + ('Any X WHERE X has_text "bla", X is IN(Card, Comment, Division, Email, EmailThread, File, Folder, Note, Personne, Societe, SubDivision, Tag)', [{'X': 'Card'}, {'X': 'Comment'}, {'X': 'Division'}, {'X': 'Email'}, {'X': 'EmailThread'}, - {'X': 'File'}, {'X': 'Folder'}, {'X': 'Image'}, + {'X': 'File'}, {'X': 'Folder'}, {'X': 'Note'}, {'X': 'Personne'}, {'X': 'Societe'}, {'X': 'SubDivision'}, {'X': 'Tag'}])], [self.system], {}, {'X': 'table0.C0'}, []), @@ -823,7 +823,7 @@ [{'X': 'Affaire'}, {'X': 'Basket'}, {'X': 'CWUser'}, {'X': 'Card'}, {'X': 'Comment'}, {'X': 'Division'}, {'X': 'Email'}, {'X': 'EmailThread'}, - {'X': 'File'}, {'X': 'Folder'}, {'X': 'Image'}, + {'X': 'File'}, {'X': 'Folder'}, {'X': 'Note'}, {'X': 'Personne'}, {'X': 'Societe'}, {'X': 'SubDivision'}, {'X': 'Tag'}])], 10, 10, [self.system], {'X': 'table0.C0'}, []) @@ -888,7 +888,7 @@ [{'X': 'Card'}, {'X': 'Note'}, {'X': 'State'}])], [self.cards, self.system], {}, {'X': 'table0.C0'}, []), ('FetchStep', - [('Any X WHERE X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Image, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)', + [('Any X WHERE X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)', [{'X': 'BaseTransition'}, {'X': 'Bookmark'}, {'X': 'CWAttribute'}, {'X': 'CWCache'}, {'X': 'CWConstraint'}, {'X': 'CWConstraintType'}, @@ -899,7 +899,7 @@ {'X': 'Email'}, {'X': 'EmailAddress'}, {'X': 'EmailPart'}, {'X': 'EmailThread'}, {'X': 'ExternalUri'}, {'X': 'File'}, - {'X': 'Folder'}, {'X': 'Image'}, + {'X': 'Folder'}, {'X': 'Personne'}, {'X': 'RQLExpression'}, {'X': 'Societe'}, {'X': 'SubDivision'}, {'X': 'SubWorkflowExitPoint'}, {'X': 'Tag'}, @@ -949,7 +949,7 @@ [self.system], {'X': 'table3.C0'}, {'ET': 'table0.C0', 'X': 'table0.C1'}, []), # extra UnionFetchStep could be avoided but has no cost, so don't care ('UnionFetchStep', - [('FetchStep', [('Any ET,X WHERE X is ET, ET is CWEType, X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Image, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)', + [('FetchStep', [('Any ET,X WHERE X is ET, ET is CWEType, X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)', [{'X': 'BaseTransition', 'ET': 'CWEType'}, {'X': 'Bookmark', 'ET': 'CWEType'}, {'X': 'CWAttribute', 'ET': 'CWEType'}, {'X': 'CWCache', 'ET': 'CWEType'}, {'X': 'CWConstraint', 'ET': 'CWEType'}, @@ -961,7 +961,7 @@ {'X': 'EmailAddress', 'ET': 'CWEType'}, {'X': 'EmailPart', 'ET': 'CWEType'}, {'X': 'EmailThread', 'ET': 'CWEType'}, {'X': 'ExternalUri', 'ET': 'CWEType'}, {'X': 'File', 'ET': 'CWEType'}, {'X': 'Folder', 'ET': 'CWEType'}, - {'X': 'Image', 'ET': 'CWEType'}, {'X': 'Personne', 'ET': 'CWEType'}, + {'X': 'Personne', 'ET': 'CWEType'}, {'X': 'RQLExpression', 'ET': 'CWEType'}, {'X': 'Societe', 'ET': 'CWEType'}, {'X': 'SubDivision', 'ET': 'CWEType'}, {'X': 'SubWorkflowExitPoint', 'ET': 'CWEType'}, {'X': 'Tag', 'ET': 'CWEType'}, {'X': 'TrInfo', 'ET': 'CWEType'}, @@ -1299,9 +1299,66 @@ ]), ]) + def test_has_text_orderby_rank(self): + self._test('Any X ORDERBY FTIRANK(X) WHERE X has_text "bla", X firstname "bla"', + [('FetchStep', [('Any X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])], + [self.ldap, self.system], None, {'X': 'table0.C0'}, []), + ('AggrStep', 'SELECT table1.C1 FROM table1 ORDER BY table1.C0', None, [ + ('FetchStep', [('Any FTIRANK(X),X WHERE X has_text "bla", X is CWUser', + [{'X': 'CWUser'}])], + [self.system], {'X': 'table0.C0'}, {'FTIRANK(X)': 'table1.C0', 'X': 'table1.C1'}, []), + ('FetchStep', [('Any FTIRANK(X),X WHERE X has_text "bla", X firstname "bla", X is Personne', + [{'X': 'Personne'}])], + [self.system], {}, {'FTIRANK(X)': 'table1.C0', 'X': 'table1.C1'}, []), + ]), + ]) + + def test_security_has_text_orderby_rank(self): + # use a guest user + self.session = self.user_groups_session('guests') + self._test('Any X ORDERBY FTIRANK(X) WHERE X has_text "bla", X firstname "bla"', + [('FetchStep', [('Any X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])], + [self.ldap, self.system], None, {'X': 'table1.C0'}, []), + ('UnionFetchStep', + [('FetchStep', [('Any X WHERE X firstname "bla", X is Personne', [{'X': 'Personne'}])], + [self.system], {}, {'X': 'table0.C0'}, []), + ('FetchStep', [('Any X WHERE EXISTS(X owned_by 5), X is CWUser', [{'X': 'CWUser'}])], + [self.system], {'X': 'table1.C0'}, {'X': 'table0.C0'}, [])]), + ('OneFetchStep', [('Any X ORDERBY FTIRANK(X) WHERE X has_text "bla"', + [{'X': 'CWUser'}, {'X': 'Personne'}])], + None, None, [self.system], {'X': 'table0.C0'}, []), + ]) + + def test_has_text_select_rank(self): + self._test('Any X, FTIRANK(X) WHERE X has_text "bla", X firstname "bla"', + # XXX unecessary duplicate selection + [('FetchStep', [('Any X,X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])], + [self.ldap, self.system], None, {'X': 'table0.C1'}, []), + ('UnionStep', None, None, [ + ('OneFetchStep', [('Any X,FTIRANK(X) WHERE X has_text "bla", X is CWUser', [{'X': 'CWUser'}])], + None, None, [self.system], {'X': 'table0.C1'}, []), + ('OneFetchStep', [('Any X,FTIRANK(X) WHERE X has_text "bla", X firstname "bla", X is Personne', [{'X': 'Personne'}])], + None, None, [self.system], {}, []), + ]), + ]) + + def test_security_has_text_select_rank(self): + # use a guest user + self.session = self.user_groups_session('guests') + self._test('Any X, FTIRANK(X) WHERE X has_text "bla", X firstname "bla"', + [('FetchStep', [('Any X,X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])], + [self.ldap, self.system], None, {'X': 'table0.C1'}, []), + ('UnionStep', None, None, [ + ('OneFetchStep', [('Any X,FTIRANK(X) WHERE X has_text "bla", EXISTS(X owned_by 5), X is CWUser', [{'X': 'CWUser'}])], + None, None, [self.system], {'X': 'table0.C1'}, []), + ('OneFetchStep', [('Any X,FTIRANK(X) WHERE X has_text "bla", X firstname "bla", X is Personne', [{'X': 'Personne'}])], + None, None, [self.system], {}, []), + ]), + ]) + def test_sort_func(self): self._test('Note X ORDERBY DUMB_SORT(RF) WHERE X type RF', - [('AggrStep', 'Any X ORDERBY DUMB_SORT(RF)', None, None, 'table0', None, [ + [('AggrStep', 'SELECT table0.C0 FROM table0 ORDER BY DUMB_SORT(table0.C1)', None, [ ('FetchStep', [('Any X,RF WHERE X type RF, X is Note', [{'X': 'Note', 'RF': 'String'}])], [self.cards, self.system], {}, {'X': 'table0.C0', 'X.type': 'table0.C1', 'RF': 'table0.C1'}, []), @@ -1310,8 +1367,7 @@ def test_ambigous_sort_func(self): self._test('Any X ORDERBY DUMB_SORT(RF) WHERE X title RF, X is IN (Bookmark, Card, EmailThread)', - [('AggrStep', 'Any X ORDERBY DUMB_SORT(RF)', - None, None, 'table0', None, + [('AggrStep', 'SELECT table0.C0 FROM table0 ORDER BY DUMB_SORT(table0.C1)', None, [('FetchStep', [('Any X,RF WHERE X title RF, X is Card', [{'X': 'Card', 'RF': 'String'}])], [self.cards, self.system], {}, @@ -1718,8 +1774,9 @@ ]) def test_nonregr2(self): - self.session.user.fire_transition('deactivate') - treid = self.session.user.latest_trinfo().eid + iworkflowable = self.session.user.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') + treid = iworkflowable.latest_trinfo().eid self._test('Any X ORDERBY D DESC WHERE E eid %(x)s, E wf_info_for X, X modification_date D', [('FetchStep', [('Any X,D WHERE X modification_date D, X is Note', [{'X': 'Note', 'D': 'Datetime'}])], @@ -1727,7 +1784,7 @@ ('FetchStep', [('Any X,D WHERE X modification_date D, X is CWUser', [{'X': 'CWUser', 'D': 'Datetime'}])], [self.ldap, self.system], None, {'X': 'table1.C0', 'X.modification_date': 'table1.C1', 'D': 'table1.C1'}, []), - ('AggrStep', 'Any X ORDERBY D DESC', None, None, 'table2', None, [ + ('AggrStep', 'SELECT table2.C0 FROM table2 ORDER BY table2.C1 DESC', None, [ ('FetchStep', [('Any X,D WHERE E eid %s, E wf_info_for X, X modification_date D, E is TrInfo, X is Affaire'%treid, [{'X': 'Affaire', 'E': 'TrInfo', 'D': 'Datetime'}])], [self.system], @@ -1870,8 +1927,7 @@ [{'X': 'Note', 'Z': 'Datetime'}])], [self.cards, self.system], None, {'X': 'table0.C0', 'X.modification_date': 'table0.C1', 'Z': 'table0.C1'}, []), - ('AggrStep', 'Any X ORDERBY Z DESC', - None, None, 'table1', None, + ('AggrStep', 'SELECT table1.C0 FROM table1 ORDER BY table1.C1 DESC', None, [('FetchStep', [('Any X,Z WHERE X modification_date Z, 999999 see_also X, X is Bookmark', [{'X': 'Bookmark', 'Z': 'Datetime'}])], [self.system], {}, {'X': 'table1.C0', 'X.modification_date': 'table1.C1', diff -r b5640328ffad -r 9db65b381028 server/test/unittest_multisources.py --- a/server/test/unittest_multisources.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/test/unittest_multisources.py Thu Jul 01 17:06:37 2010 +0200 @@ -111,11 +111,11 @@ self.assertEquals(len(rset), 4) # since they are orderd by eid, we know the 3 first one is coming from the system source # and the others from external source - self.assertEquals(rset.get_entity(0, 0).metainformation(), + self.assertEquals(rset.get_entity(0, 0).cw_metainformation(), {'source': {'adapter': 'native', 'uri': 'system'}, 'type': u'Card', 'extid': None}) externent = rset.get_entity(3, 0) - metainf = externent.metainformation() + metainf = externent.cw_metainformation() self.assertEquals(metainf['source'], {'adapter': 'pyrorql', 'base-url': 'http://extern.org/', 'uri': 'extern'}) self.assertEquals(metainf['type'], 'Card') self.assert_(metainf['extid']) @@ -134,6 +134,8 @@ self.repo.sources_by_uri['extern'].synchronize(MTIME) # in case fti_update has been run before self.failUnless(self.sexecute('Any X WHERE X has_text "affref"')) self.failUnless(self.sexecute('Affaire X WHERE X has_text "affref"')) + self.failUnless(self.sexecute('Any X ORDERBY FTIRANK(X) WHERE X has_text "affref"')) + self.failUnless(self.sexecute('Affaire X ORDERBY FTIRANK(X) WHERE X has_text "affref"')) def test_anon_has_text(self): self.repo.sources_by_uri['extern'].synchronize(MTIME) # in case fti_update has been run before @@ -145,6 +147,9 @@ cnx = self.login('anon') cu = cnx.cursor() rset = cu.execute('Any X WHERE X has_text "card"') + # 5: 4 card + 1 readable affaire + self.assertEquals(len(rset), 5, zip(rset.rows, rset.description)) + rset = cu.execute('Any X ORDERBY FTIRANK(X) WHERE X has_text "card"') self.assertEquals(len(rset), 5, zip(rset.rows, rset.description)) Connection_close(cnx) @@ -305,8 +310,9 @@ {'x': affaire.eid, 'u': ueid}) def test_nonregr2(self): - self.session.user.fire_transition('deactivate') - treid = self.session.user.latest_trinfo().eid + iworkflowable = self.session.user.cw_adapt_to('IWorkflowable') + iworkflowable.fire_transition('deactivate') + treid = iworkflowable.latest_trinfo().eid rset = self.sexecute('Any X ORDERBY D DESC WHERE E eid %(x)s, E wf_info_for X, X modification_date D', {'x': treid}) self.assertEquals(len(rset), 1) diff -r b5640328ffad -r 9db65b381028 server/test/unittest_querier.py --- a/server/test/unittest_querier.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/test/unittest_querier.py Thu Jul 01 17:06:37 2010 +0200 @@ -130,7 +130,7 @@ 'X': 'Affaire', 'ET': 'CWEType', 'ETN': 'String'}]) rql, solutions = partrqls[1] - self.assertEquals(rql, 'Any ETN,X WHERE X is ET, ET name ETN, ET is CWEType, X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWUser, Card, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Image, Note, Personne, RQLExpression, Societe, State, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)') + self.assertEquals(rql, 'Any ETN,X WHERE X is ET, ET name ETN, ET is CWEType, X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWUser, Card, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Note, Personne, RQLExpression, Societe, State, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)') self.assertListEquals(sorted(solutions), sorted([{'X': 'BaseTransition', 'ETN': 'String', 'ET': 'CWEType'}, {'X': 'Bookmark', 'ETN': 'String', 'ET': 'CWEType'}, @@ -155,7 +155,6 @@ {'X': 'ExternalUri', 'ETN': 'String', 'ET': 'CWEType'}, {'X': 'File', 'ETN': 'String', 'ET': 'CWEType'}, {'X': 'Folder', 'ETN': 'String', 'ET': 'CWEType'}, - {'X': 'Image', 'ETN': 'String', 'ET': 'CWEType'}, {'X': 'Note', 'ETN': 'String', 'ET': 'CWEType'}, {'X': 'Personne', 'ETN': 'String', 'ET': 'CWEType'}, {'X': 'RQLExpression', 'ETN': 'String', 'ET': 'CWEType'}, @@ -491,17 +490,17 @@ 'WHERE RT name N, RDEF relation_type RT ' 'HAVING COUNT(RDEF) > 10') self.assertListEquals(rset.rows, - [[u'description_format', 13], - [u'description', 14], + [[u'description_format', 12], + [u'description', 13], [u'name', 14], - [u'created_by', 38], - [u'creation_date', 38], - [u'cwuri', 38], - [u'in_basket', 38], - [u'is', 38], - [u'is_instance_of', 38], - [u'modification_date', 38], - [u'owned_by', 38]]) + [u'created_by', 37], + [u'creation_date', 37], + [u'cwuri', 37], + [u'in_basket', 37], + [u'is', 37], + [u'is_instance_of', 37], + [u'modification_date', 37], + [u'owned_by', 37]]) def test_select_aggregat_having_dumb(self): # dumb but should not raise an error diff -r b5640328ffad -r 9db65b381028 server/test/unittest_repository.py --- a/server/test/unittest_repository.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/test/unittest_repository.py Thu Jul 01 17:06:37 2010 +0200 @@ -16,10 +16,7 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""unit tests for module cubicweb.server.repository - -""" -from __future__ import with_statement +"""unit tests for module cubicweb.server.repository""" from __future__ import with_statement @@ -205,7 +202,7 @@ session = repo._get_session(cnxid) session.set_pool() user = session.user - user.fire_transition('deactivate') + user.cw_adapt_to('IWorkflowable').fire_transition('deactivate') rset = repo.execute(cnxid, 'TrInfo T WHERE T wf_info_for X, X eid %(x)s', {'x': user.eid}) self.assertEquals(len(rset), 1) repo.rollback(cnxid) diff -r b5640328ffad -r 9db65b381028 server/test/unittest_rql2sql.py --- a/server/test/unittest_rql2sql.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/test/unittest_rql2sql.py Thu Jul 01 17:06:37 2010 +0200 @@ -22,11 +22,13 @@ from logilab.common.testlib import TestCase, unittest_main, mock_object from rql import BadRQLQuery +from rql.utils import register_function, FunctionDescr -#from cubicweb.server.sources.native import remove_unused_solutions -from cubicweb.server.sources.rql2sql import SQLGenerator, remove_unused_solutions +from cubicweb.devtools import TestServerConfiguration +from cubicweb.devtools.repotest import RQLGeneratorTC +from cubicweb.server.sources.rql2sql import remove_unused_solutions -from rql.utils import register_function, FunctionDescr + # add a dumb registered procedure class stockproc(FunctionDescr): supported_backends = ('postgres', 'sqlite', 'mysql') @@ -35,8 +37,6 @@ except AssertionError, ex: pass # already registered -from cubicweb.devtools import TestServerConfiguration -from cubicweb.devtools.repotest import RQLGeneratorTC config = TestServerConfiguration('data') config.bootstrap_cubes() @@ -425,13 +425,10 @@ GROUP BY T1.C1'''), ('Any MAX(X)+MIN(LENGTH(D)), N GROUPBY N ORDERBY 1, N, DF WHERE X data_name N, X data D, X data_format DF;', - '''SELECT (MAX(T1.C1) + MIN(LENGTH(T1.C0))), T1.C2 FROM (SELECT _X.cw_data AS C0, _X.cw_eid AS C1, _X.cw_data_name AS C2, _X.cw_data_format AS C3 + '''SELECT (MAX(_X.cw_eid) + MIN(LENGTH(_X.cw_data))), _X.cw_data_name FROM cw_File AS _X -UNION ALL -SELECT _X.cw_data AS C0, _X.cw_eid AS C1, _X.cw_data_name AS C2, _X.cw_data_format AS C3 -FROM cw_Image AS _X) AS T1 -GROUP BY T1.C2,T1.C3 -ORDER BY 1,2,T1.C3'''), +GROUP BY _X.cw_data_name,_X.cw_data_format +ORDER BY 1,2,_X.cw_data_format'''), ('DISTINCT Any S ORDERBY R WHERE A is Affaire, A sujet S, A ref R', '''SELECT T1.C0 FROM (SELECT DISTINCT _A.cw_sujet AS C0, _A.cw_ref AS C1 @@ -439,12 +436,9 @@ ORDER BY 2) AS T1'''), ('DISTINCT Any MAX(X)+MIN(LENGTH(D)), N GROUPBY N ORDERBY 2, DF WHERE X data_name N, X data D, X data_format DF;', - '''SELECT T1.C0,T1.C1 FROM (SELECT DISTINCT (MAX(T1.C1) + MIN(LENGTH(T1.C0))) AS C0, T1.C2 AS C1, T1.C3 AS C2 FROM (SELECT DISTINCT _X.cw_data AS C0, _X.cw_eid AS C1, _X.cw_data_name AS C2, _X.cw_data_format AS C3 + '''SELECT T1.C0,T1.C1 FROM (SELECT DISTINCT (MAX(_X.cw_eid) + MIN(LENGTH(_X.cw_data))) AS C0, _X.cw_data_name AS C1, _X.cw_data_format AS C2 FROM cw_File AS _X -UNION -SELECT DISTINCT _X.cw_data AS C0, _X.cw_eid AS C1, _X.cw_data_name AS C2, _X.cw_data_format AS C3 -FROM cw_Image AS _X) AS T1 -GROUP BY T1.C2,T1.C3 +GROUP BY _X.cw_data_name,_X.cw_data_format ORDER BY 2,3) AS T1 '''), @@ -1073,11 +1067,9 @@ WHERE rel_is0.eid_to=2'''), ] -from logilab.database import get_db_helper - class CWRQLTC(RQLGeneratorTC): schema = schema - + backend = 'sqlite' def test_nonregr_sol(self): delete = self.rqlhelper.parse( 'DELETE X read_permission READ_PERMISSIONSUBJECT,X add_permission ADD_PERMISSIONSUBJECT,' @@ -1103,12 +1095,7 @@ class PostgresSQLGeneratorTC(RQLGeneratorTC): schema = schema - - #capture = True - def setUp(self): - RQLGeneratorTC.setUp(self) - dbhelper = get_db_helper('postgres') - self.o = SQLGenerator(schema, dbhelper) + backend = 'postgres' def _norm_sql(self, sql): return sql.strip() @@ -1368,13 +1355,53 @@ UNION ALL SELECT _X.cw_eid FROM appears AS appears0, cw_Folder AS _X -WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu -"""), +WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu"""), ('Personne X where X has_text %(text)s, X travaille S, S has_text %(text)s', """SELECT _X.eid FROM appears AS appears0, appears AS appears2, entities AS _X, travaille_relation AS rel_travaille1 WHERE appears0.words @@ to_tsquery('default', 'hip&hop&momo') AND appears0.uid=_X.eid AND _X.type='Personne' AND _X.eid=rel_travaille1.eid_from AND appears2.uid=rel_travaille1.eid_to AND appears2.words @@ to_tsquery('default', 'hip&hop&momo')"""), + + ('Any X ORDERBY FTIRANK(X) DESC WHERE X has_text "toto tata"', + """SELECT appears0.uid +FROM appears AS appears0 +WHERE appears0.words @@ to_tsquery('default', 'toto&tata') +ORDER BY ts_rank(appears0.words, to_tsquery('default', 'toto&tata'))*appears0.weight DESC"""), + + ('Personne X ORDERBY FTIRANK(X) WHERE X has_text "toto tata"', + """SELECT _X.eid +FROM appears AS appears0, entities AS _X +WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=_X.eid AND _X.type='Personne' +ORDER BY ts_rank(appears0.words, to_tsquery('default', 'toto&tata'))*appears0.weight"""), + + ('Personne X ORDERBY FTIRANK(X) WHERE X has_text %(text)s', + """SELECT _X.eid +FROM appears AS appears0, entities AS _X +WHERE appears0.words @@ to_tsquery('default', 'hip&hop&momo') AND appears0.uid=_X.eid AND _X.type='Personne' +ORDER BY ts_rank(appears0.words, to_tsquery('default', 'hip&hop&momo'))*appears0.weight"""), + + ('Any X ORDERBY FTIRANK(X) WHERE X has_text "toto tata", X name "tutu", X is IN (Basket,Folder)', + """SELECT T1.C0 FROM (SELECT _X.cw_eid AS C0, ts_rank(appears0.words, to_tsquery('default', 'toto&tata'))*appears0.weight AS C1 +FROM appears AS appears0, cw_Basket AS _X +WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu +UNION ALL +SELECT _X.cw_eid AS C0, ts_rank(appears0.words, to_tsquery('default', 'toto&tata'))*appears0.weight AS C1 +FROM appears AS appears0, cw_Folder AS _X +WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu +ORDER BY 2) AS T1"""), + + ('Personne X ORDERBY FTIRANK(X),FTIRANK(S) WHERE X has_text %(text)s, X travaille S, S has_text %(text)s', + """SELECT _X.eid +FROM appears AS appears0, appears AS appears2, entities AS _X, travaille_relation AS rel_travaille1 +WHERE appears0.words @@ to_tsquery('default', 'hip&hop&momo') AND appears0.uid=_X.eid AND _X.type='Personne' AND _X.eid=rel_travaille1.eid_from AND appears2.uid=rel_travaille1.eid_to AND appears2.words @@ to_tsquery('default', 'hip&hop&momo') +ORDER BY ts_rank(appears0.words, to_tsquery('default', 'hip&hop&momo'))*appears0.weight,ts_rank(appears2.words, to_tsquery('default', 'hip&hop&momo'))*appears2.weight"""), + + + ('Any X, FTIRANK(X) WHERE X has_text "toto tata"', + """SELECT appears0.uid, ts_rank(appears0.words, to_tsquery('default', 'toto&tata'))*appears0.weight +FROM appears AS appears0 +WHERE appears0.words @@ to_tsquery('default', 'toto&tata')"""), + )): yield t @@ -1430,11 +1457,7 @@ class SqliteSQLGeneratorTC(PostgresSQLGeneratorTC): - - def setUp(self): - RQLGeneratorTC.setUp(self) - dbhelper = get_db_helper('sqlite') - self.o = SQLGenerator(schema, dbhelper) + backend = 'sqlite' def _norm_sql(self, sql): return sql.strip().replace(' ILIKE ', ' LIKE ') @@ -1532,17 +1555,33 @@ FROM appears AS appears0, cw_Folder AS _X WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu """), + + ('Any X ORDERBY FTIRANK(X) WHERE X has_text "toto tata"', + """SELECT DISTINCT appears0.uid +FROM appears AS appears0 +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata'))"""), + + ('Any X ORDERBY FTIRANK(X) WHERE X has_text "toto tata", X name "tutu", X is IN (Basket,Folder)', + """SELECT DISTINCT _X.cw_eid +FROM appears AS appears0, cw_Basket AS _X +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu +UNION +SELECT DISTINCT _X.cw_eid +FROM appears AS appears0, cw_Folder AS _X +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=_X.cw_eid AND _X.cw_name=tutu +"""), + + ('Any X, FTIRANK(X) WHERE X has_text "toto tata"', + """SELECT DISTINCT appears0.uid, 1.0 +FROM appears AS appears0 +WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata'))"""), )): yield t class MySQLGenerator(PostgresSQLGeneratorTC): - - def setUp(self): - RQLGeneratorTC.setUp(self) - dbhelper = get_db_helper('mysql') - self.o = SQLGenerator(schema, dbhelper) + backend = 'mysql' def _norm_sql(self, sql): sql = sql.strip().replace(' ILIKE ', ' LIKE ').replace('TRUE', '1').replace('FALSE', '0') @@ -1642,5 +1681,6 @@ ([{'A': 'RugbyGroup', 'B': 'RugbyTeam'}], {}, set()) ) + if __name__ == '__main__': unittest_main() diff -r b5640328ffad -r 9db65b381028 server/test/unittest_schemaserial.py --- a/server/test/unittest_schemaserial.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/test/unittest_schemaserial.py Thu Jul 01 17:06:37 2010 +0200 @@ -68,8 +68,6 @@ {'et': None, 'x': None}), ('SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s', {'et': None, 'x': None}), - # ('SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s', - # {'et': 'File', 'x': 'Image'}), ('SET X specializes ET WHERE X eid %(x)s, ET eid %(et)s', {'et': None, 'x': None})]) diff -r b5640328ffad -r 9db65b381028 server/test/unittest_security.py --- a/server/test/unittest_security.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/test/unittest_security.py Thu Jul 01 17:06:37 2010 +0200 @@ -192,8 +192,7 @@ self.assertEquals(len(rset), 1) ent = rset.get_entity(0, 0) session.set_pool() # necessary - self.assertRaises(Unauthorized, - ent.e_schema.check_perm, session, 'update', eid=ent.eid) + self.assertRaises(Unauthorized, ent.cw_check_perm, 'update') self.assertRaises(Unauthorized, cu.execute, "SET P travaille S WHERE P is Personne, S is Societe") # test nothing has actually been inserted: @@ -384,7 +383,7 @@ # Note.para attribute editable by managers or if the note is in "todo" state note = self.execute("INSERT Note X: X para 'bidule'").get_entity(0, 0) self.commit() - note.fire_transition('markasdone') + note.cw_adapt_to('IWorkflowable').fire_transition('markasdone') self.execute('SET X para "truc" WHERE X eid %(x)s', {'x': note.eid}) self.commit() cnx = self.login('iaminusersgrouponly') @@ -393,13 +392,13 @@ self.assertRaises(Unauthorized, cnx.commit) note2 = cu.execute("INSERT Note X: X para 'bidule'").get_entity(0, 0) cnx.commit() - note2.fire_transition('markasdone') + note2.cw_adapt_to('IWorkflowable').fire_transition('markasdone') cnx.commit() self.assertEquals(len(cu.execute('Any X WHERE X in_state S, S name "todo", X eid %(x)s', {'x': note2.eid})), 0) cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid}) self.assertRaises(Unauthorized, cnx.commit) - note2.fire_transition('redoit') + note2.cw_adapt_to('IWorkflowable').fire_transition('redoit') cnx.commit() cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid}) cnx.commit() @@ -435,7 +434,7 @@ cnx.commit() self.restore_connection() affaire = self.execute('Any X WHERE X ref "ARCT01"').get_entity(0, 0) - affaire.fire_transition('abort') + affaire.cw_adapt_to('IWorkflowable').fire_transition('abort') self.commit() self.assertEquals(len(self.execute('TrInfo X WHERE X wf_info_for A, A ref "ARCT01"')), 1) @@ -537,14 +536,15 @@ cu = cnx.cursor() self.schema['Affaire'].set_action_permissions('read', ('users',)) aff = cu.execute('Any X WHERE X ref "ARCT01"').get_entity(0, 0) - aff.fire_transition('abort') + aff.cw_adapt_to('IWorkflowable').fire_transition('abort') cnx.commit() # though changing a user state (even logged user) is reserved to managers user = cnx.user(self.session) # XXX wether it should raise Unauthorized or ValidationError is not clear # the best would probably ValidationError if the transition doesn't exist # from the current state but Unauthorized if it exists but user can't pass it - self.assertRaises(ValidationError, user.fire_transition, 'deactivate') + self.assertRaises(ValidationError, + user.cw_adapt_to('IWorkflowable').fire_transition, 'deactivate') finally: # restore orig perms for action, perms in affaire_perms.iteritems(): @@ -552,18 +552,19 @@ def test_trinfo_security(self): aff = self.execute('INSERT Affaire X: X ref "ARCT01"').get_entity(0, 0) + iworkflowable = aff.cw_adapt_to('IWorkflowable') self.commit() - aff.fire_transition('abort') + iworkflowable.fire_transition('abort') self.commit() # can change tr info comment self.execute('SET TI comment %(c)s WHERE TI wf_info_for X, X ref "ARCT01"', {'c': u'bouh!'}) self.commit() - aff.clear_related_cache('wf_info_for', 'object') - trinfo = aff.latest_trinfo() + aff.cw_clear_relation_cache('wf_info_for', 'object') + trinfo = iworkflowable.latest_trinfo() self.assertEquals(trinfo.comment, 'bouh!') # but not from_state/to_state - aff.clear_related_cache('wf_info_for', role='object') + aff.cw_clear_relation_cache('wf_info_for', role='object') self.assertRaises(Unauthorized, self.execute, 'SET TI from_state S WHERE TI eid %(ti)s, S name "ben non"', {'ti': trinfo.eid}) diff -r b5640328ffad -r 9db65b381028 server/test/unittest_storage.py --- a/server/test/unittest_storage.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/test/unittest_storage.py Thu Jul 01 17:06:37 2010 +0200 @@ -15,9 +15,7 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""unit tests for module cubicweb.server.sources.storages - -""" +"""unit tests for module cubicweb.server.sources.storages""" from __future__ import with_statement @@ -89,11 +87,11 @@ f1.set_attributes(data=Binary('the new data')) self.rollback() self.assertEquals(file(expected_filepath).read(), 'the-data') - f1.delete() + f1.cw_delete() self.failUnless(osp.isfile(expected_filepath)) self.rollback() self.failUnless(osp.isfile(expected_filepath)) - f1.delete() + f1.cw_delete() self.commit() self.failIf(osp.isfile(expected_filepath)) @@ -133,11 +131,17 @@ ex = self.assertRaises(QueryError, self.execute, '(Any D WHERE X data D, X is File)' ' UNION ' - '(Any D WHERE X data D, X is Image)') + '(Any D WHERE X title D, X is Bookmark)') self.assertEquals(str(ex), 'query fetch some source mapped attribute, some not') - ex = self.assertRaises(QueryError, - self.execute, 'Any D WHERE X data D') - self.assertEquals(str(ex), 'query fetch some source mapped attribute, some not') + + storages.set_attribute_storage(self.repo, 'State', 'name', + storages.BytesFileSystemStorage(self.tempdir)) + try: + ex = self.assertRaises(QueryError, + self.execute, 'Any D WHERE X name D, X is IN (State, Transition)') + self.assertEquals(str(ex), 'query fetch some source mapped attribute, some not') + finally: + storages.unset_attribute_storage(self.repo, 'State', 'name') def test_source_mapped_attribute_advanced(self): f1 = self.create_file() diff -r b5640328ffad -r 9db65b381028 server/test/unittest_undo.py --- a/server/test/unittest_undo.py Thu Jul 01 09:23:39 2010 +0200 +++ b/server/test/unittest_undo.py Thu Jul 01 17:06:37 2010 +0200 @@ -15,9 +15,6 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -""" - -""" from __future__ import with_statement from cubicweb import ValidationError @@ -104,7 +101,7 @@ address=u'toto@logilab.org', reverse_use_email=toto) txuuid1 = self.commit() - toto.delete() + toto.cw_delete() txuuid2 = self.commit() undoable_transactions = self.cnx.undoable_transactions txs = undoable_transactions(action='D') @@ -147,7 +144,7 @@ self.commit() txs = self.cnx.undoable_transactions() self.assertEquals(len(txs), 2) - toto.delete() + toto.cw_delete() txuuid = self.commit() actions = self.cnx.transaction_info(txuuid).actions_list() self.assertEquals(len(actions), 1) @@ -160,8 +157,8 @@ self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': toto.eid})) self.failUnless(self.execute('Any X WHERE X eid %(x)s', {'x': e.eid})) self.failUnless(self.execute('Any X WHERE X has_text "toto@logilab"')) - self.assertEquals(toto.state, 'activated') - self.assertEquals(toto.get_email(), 'toto@logilab.org') + self.assertEquals(toto.cw_adapt_to('IWorkflowable').state, 'activated') + self.assertEquals(toto.cw_adapt_to('IEmailable').get_email(), 'toto@logilab.org') self.assertEquals([(p.pkey, p.value) for p in toto.reverse_for_user], [('ui.default-text-format', 'text/rest')]) self.assertEquals([g.name for g in toto.in_group], @@ -186,7 +183,7 @@ c = session.create_entity('Card', title=u'hop', content=u'hop') p = session.create_entity('Personne', nom=u'louis', fiche=c) self.commit() - c.delete() + c.cw_delete() txuuid = self.commit() c2 = session.create_entity('Card', title=u'hip', content=u'hip') p.set_relations(fiche=c2) @@ -207,9 +204,9 @@ session.execute('DELETE U in_group G WHERE U eid %(x)s', {'x': self.toto.eid}) self.toto.set_relations(in_group=g) self.commit() - self.toto.delete() + self.toto.cw_delete() txuuid = self.commit() - g.delete() + g.cw_delete() self.commit() errors = self.cnx.undo_transaction(txuuid) self.assertEquals(errors, diff -r b5640328ffad -r 9db65b381028 skeleton/data/external_resources.tmpl --- a/skeleton/data/external_resources.tmpl Thu Jul 01 09:23:39 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,11 +0,0 @@ -# -*- shell-script -*- -############################################################################### -# -# put here information about external resources used by your components, -# or to overides existing external resources configuration -# -############################################################################### - -# CSS stylesheets to include in HTML headers -# uncomment the line below to use template specific stylesheet -# STYLESHEETS = DATADIR/cubes.%(cubename)s.css diff -r b5640328ffad -r 9db65b381028 skeleton/uiprops.py.tmpl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/skeleton/uiprops.py.tmpl Thu Jul 01 17:06:37 2010 +0200 @@ -0,0 +1,15 @@ +############################################################################### +# +# Put here information about external resources / styles used by your cube, +# or to overides existing UI properties. +# +# Existing properties are available through the `sheet` dictionary available +# in the global namespace. You also have access to a `data` function which +# will return proper url for resources in the 'data' directory. +# +# /!\ this file should not be imported /!\ +############################################################################### + +# CSS stylesheets to include in HTML headers +# uncomment the line below to use template specific stylesheet +# STYLESHEETS = sheet['STYLESHEETS'] + [data('cubes.%(cubename)s.css')] diff -r b5640328ffad -r 9db65b381028 sobjects/notification.py --- a/sobjects/notification.py Thu Jul 01 09:23:39 2010 +0200 +++ b/sobjects/notification.py Thu Jul 01 17:06:37 2010 +0200 @@ -46,7 +46,8 @@ mode = self._cw.vreg.config['default-recipients-mode'] if mode == 'users': execute = self._cw.execute - dests = [(u.get_email(), u.property_value('ui.language')) + dests = [(u.cw_adapt_to('IEmailable').get_email(), + u.property_value('ui.language')) for u in execute(self.user_rql, build_descr=True).entities()] elif mode == 'default-dest-addrs': lang = self._cw.vreg.property_value('ui.language') diff -r b5640328ffad -r 9db65b381028 sobjects/test/unittest_notification.py --- a/sobjects/test/unittest_notification.py Thu Jul 01 09:23:39 2010 +0200 +++ b/sobjects/test/unittest_notification.py Thu Jul 01 17:06:37 2010 +0200 @@ -85,7 +85,7 @@ def test_status_change_view(self): req = self.request() u = self.create_user('toto', req=req) - u.fire_transition('deactivate', comment=u'yeah') + u.cw_adapt_to('IWorkflowable').fire_transition('deactivate', comment=u'yeah') self.failIf(MAILBOX) self.commit() self.assertEquals(len(MAILBOX), 1) diff -r b5640328ffad -r 9db65b381028 sobjects/test/unittest_supervising.py --- a/sobjects/test/unittest_supervising.py Thu Jul 01 09:23:39 2010 +0200 +++ b/sobjects/test/unittest_supervising.py Thu Jul 01 17:06:37 2010 +0200 @@ -84,7 +84,7 @@ self.assertEquals(op.to_send[0][1], ['test@logilab.fr']) self.commit() # some other changes ####### - user.fire_transition('deactivate') + user.cw_adapt_to('IWorkflowable').fire_transition('deactivate') sentops = [op for op in session.pending_operations if isinstance(op, SupervisionMailOp)] self.assertEquals(len(sentops), 1) diff -r b5640328ffad -r 9db65b381028 sobjects/textparsers.py --- a/sobjects/textparsers.py Thu Jul 01 09:23:39 2010 +0200 +++ b/sobjects/textparsers.py Thu Jul 01 17:06:37 2010 +0200 @@ -74,10 +74,14 @@ if not hasattr(entity, 'in_state'): self.error('bad change state instruction for eid %s', eid) continue - tr = entity.current_workflow and entity.current_workflow.transition_by_name(trname) + iworkflowable = entity.cw_adapt_to('IWorkflowable') + if iworkflowable.current_workflow: + tr = iworkflowable.current_workflow.transition_by_name(trname) + else: + tr = None if tr and tr.may_be_fired(entity.eid): try: - trinfo = entity.fire_transition(tr) + trinfo = iworkflowable.fire_transition(tr) caller.fire_event('state-changed', {'trinfo': trinfo, 'entity': entity}) except: diff -r b5640328ffad -r 9db65b381028 test/data/scripts/script1.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/scripts/script1.py Thu Jul 01 17:06:37 2010 +0200 @@ -0,0 +1,3 @@ +assert 'data/scripts/script1.py' == __file__ +assert '__main__' == __name__ +assert [] == __args__, __args__ diff -r b5640328ffad -r 9db65b381028 test/data/scripts/script2.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/scripts/script2.py Thu Jul 01 17:06:37 2010 +0200 @@ -0,0 +1,3 @@ +assert 'data/scripts/script2.py' == __file__ +assert '__main__' == __name__ +assert ['-v'] == __args__, __args__ diff -r b5640328ffad -r 9db65b381028 test/data/scripts/script3.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/test/data/scripts/script3.py Thu Jul 01 17:06:37 2010 +0200 @@ -0,0 +1,3 @@ +assert 'data/scripts/script3.py' == __file__ +assert '__main__' == __name__ +assert ['-vd', '-f', 'FILE.TXT'] == __args__, __args__ diff -r b5640328ffad -r 9db65b381028 test/unittest_cwctl.py --- a/test/unittest_cwctl.py Thu Jul 01 09:23:39 2010 +0200 +++ b/test/unittest_cwctl.py Thu Jul 01 17:06:37 2010 +0200 @@ -24,8 +24,12 @@ from logilab.common.testlib import TestCase, unittest_main from cubicweb.cwconfig import CubicWebConfiguration +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.server.migractions import ServerMigrationHelper + CubicWebConfiguration.load_cwctl_plugins() # XXX necessary? + class CubicWebCtlTC(TestCase): def setUp(self): self.stream = StringIO() @@ -37,5 +41,25 @@ from cubicweb.cwctl import ListCommand ListCommand().run([]) + +class CubicWebShellTC(CubicWebTC): + + def test_process_script_args_context(self): + repo = self.cnx._repo + mih = ServerMigrationHelper(None, repo=repo, cnx=self.cnx, + interactive=False, + # hack so it don't try to load fs schema + schema=1) + scripts = {'script1.py': list(), + 'script2.py': ['-v'], + 'script3.py': ['-vd', '-f', 'FILE.TXT'], + } + mih.cmd_process_script('data/scripts/script1.py', funcname=None) + for script, args in scripts.items(): + scriptname = os.path.join('data/scripts/', script) + self.assert_(os.path.exists(scriptname)) + mih.cmd_process_script(scriptname, None, scriptargs=args) + + if __name__ == '__main__': unittest_main() diff -r b5640328ffad -r 9db65b381028 test/unittest_entity.py --- a/test/unittest_entity.py Thu Jul 01 09:23:39 2010 +0200 +++ b/test/unittest_entity.py Thu Jul 01 17:06:37 2010 +0200 @@ -97,27 +97,27 @@ user = self.execute('INSERT CWUser X: X login "toto", X upassword %(pwd)s, X in_group G WHERE G name "users"', {'pwd': 'toto'}).get_entity(0, 0) self.commit() - user.fire_transition('deactivate') + user.cw_adapt_to('IWorkflowable').fire_transition('deactivate') self.commit() eid2 = self.execute('INSERT CWUser X: X login "tutu", X upassword %(pwd)s', {'pwd': 'toto'})[0][0] e = self.execute('Any X WHERE X eid %(x)s', {'x': eid2}).get_entity(0, 0) e.copy_relations(user.eid) self.commit() - e.clear_related_cache('in_state', 'subject') - self.assertEquals(e.state, 'activated') + e.cw_clear_relation_cache('in_state', 'subject') + self.assertEquals(e.cw_adapt_to('IWorkflowable').state, 'activated') def test_related_cache_both(self): user = self.execute('Any X WHERE X eid %(x)s', {'x':self.user().eid}).get_entity(0, 0) adeleid = self.execute('INSERT EmailAddress X: X address "toto@logilab.org", U use_email X WHERE U login "admin"')[0][0] self.commit() - self.assertEquals(user._related_cache, {}) + self.assertEquals(user._cw_related_cache, {}) email = user.primary_email[0] - self.assertEquals(sorted(user._related_cache), ['primary_email_subject']) - self.assertEquals(email._related_cache.keys(), ['primary_email_object']) + self.assertEquals(sorted(user._cw_related_cache), ['primary_email_subject']) + self.assertEquals(email._cw_related_cache.keys(), ['primary_email_object']) groups = user.in_group - self.assertEquals(sorted(user._related_cache), ['in_group_subject', 'primary_email_subject']) + self.assertEquals(sorted(user._cw_related_cache), ['in_group_subject', 'primary_email_subject']) for group in groups: - self.failIf('in_group_subject' in group._related_cache, group._related_cache.keys()) + self.failIf('in_group_subject' in group._cw_related_cache, group._cw_related_cache.keys()) def test_related_limit(self): req = self.request() @@ -197,20 +197,20 @@ Note.fetch_attrs, Note.fetch_order = fetch_config(('type',)) SubNote.fetch_attrs, SubNote.fetch_order = fetch_config(('type',)) p = self.request().create_entity('Personne', nom=u'pouet') - self.assertEquals(p.related_rql('evaluee'), + self.assertEquals(p.cw_related_rql('evaluee'), 'Any X,AA,AB ORDERBY AA ASC WHERE E eid %(x)s, E evaluee X, ' 'X type AA, X modification_date AB') Personne.fetch_attrs, Personne.fetch_order = fetch_config(('nom', )) # XXX - self.assertEquals(p.related_rql('evaluee'), + self.assertEquals(p.cw_related_rql('evaluee'), 'Any X,AA ORDERBY AA DESC ' 'WHERE E eid %(x)s, E evaluee X, X modification_date AA') tag = self.vreg['etypes'].etype_class('Tag')(self.request()) - self.assertEquals(tag.related_rql('tags', 'subject'), + self.assertEquals(tag.cw_related_rql('tags', 'subject'), 'Any X,AA ORDERBY AA DESC ' 'WHERE E eid %(x)s, E tags X, X modification_date AA') - self.assertEquals(tag.related_rql('tags', 'subject', ('Personne',)), + self.assertEquals(tag.cw_related_rql('tags', 'subject', ('Personne',)), 'Any X,AA,AB ORDERBY AA ASC ' 'WHERE E eid %(x)s, E tags X, X is IN (Personne), X nom AA, ' 'X modification_date AB') @@ -219,47 +219,47 @@ tag = self.vreg['etypes'].etype_class('Tag')(self.request()) for ttype in self.schema['tags'].objects(): self.vreg['etypes'].etype_class(ttype).fetch_attrs = ('modification_date',) - self.assertEquals(tag.related_rql('tags', 'subject'), + self.assertEquals(tag.cw_related_rql('tags', 'subject'), 'Any X,AA ORDERBY AA DESC ' 'WHERE E eid %(x)s, E tags X, X modification_date AA') def test_unrelated_rql_security_1(self): user = self.request().user - rql = user.unrelated_rql('use_email', 'EmailAddress', 'subject')[0] + rql = user.cw_unrelated_rql('use_email', 'EmailAddress', 'subject')[0] self.assertEquals(rql, 'Any O,AA,AB,AC ORDERBY AC DESC ' 'WHERE NOT S use_email O, S eid %(x)s, O is EmailAddress, O address AA, O alias AB, O modification_date AC') self.create_user('toto') self.login('toto') user = self.request().user - rql = user.unrelated_rql('use_email', 'EmailAddress', 'subject')[0] + rql = user.cw_unrelated_rql('use_email', 'EmailAddress', 'subject')[0] self.assertEquals(rql, 'Any O,AA,AB,AC ORDERBY AC DESC ' 'WHERE NOT S use_email O, S eid %(x)s, O is EmailAddress, O address AA, O alias AB, O modification_date AC') user = self.execute('Any X WHERE X login "admin"').get_entity(0, 0) - self.assertRaises(Unauthorized, user.unrelated_rql, 'use_email', 'EmailAddress', 'subject') + self.assertRaises(Unauthorized, user.cw_unrelated_rql, 'use_email', 'EmailAddress', 'subject') self.login('anon') user = self.request().user - self.assertRaises(Unauthorized, user.unrelated_rql, 'use_email', 'EmailAddress', 'subject') + self.assertRaises(Unauthorized, user.cw_unrelated_rql, 'use_email', 'EmailAddress', 'subject') def test_unrelated_rql_security_2(self): email = self.execute('INSERT EmailAddress X: X address "hop"').get_entity(0, 0) - rql = email.unrelated_rql('use_email', 'CWUser', 'object')[0] + rql = email.cw_unrelated_rql('use_email', 'CWUser', 'object')[0] self.assertEquals(rql, 'Any S,AA,AB,AC,AD ORDERBY AA ASC ' 'WHERE NOT S use_email O, O eid %(x)s, S is CWUser, S login AA, S firstname AB, S surname AC, S modification_date AD') - #rql = email.unrelated_rql('use_email', 'Person', 'object')[0] + #rql = email.cw_unrelated_rql('use_email', 'Person', 'object')[0] #self.assertEquals(rql, '') self.login('anon') email = self.execute('Any X WHERE X eid %(x)s', {'x': email.eid}).get_entity(0, 0) - rql = email.unrelated_rql('use_email', 'CWUser', 'object')[0] + rql = email.cw_unrelated_rql('use_email', 'CWUser', 'object')[0] self.assertEquals(rql, 'Any S,AA,AB,AC,AD ORDERBY AA ' 'WHERE NOT EXISTS(S use_email O), O eid %(x)s, S is CWUser, S login AA, S firstname AB, S surname AC, S modification_date AD, ' 'A eid %(B)s, EXISTS(S identity A, NOT A in_group C, C name "guests", C is CWGroup)') - #rql = email.unrelated_rql('use_email', 'Person', 'object')[0] + #rql = email.cw_unrelated_rql('use_email', 'Person', 'object')[0] #self.assertEquals(rql, '') def test_unrelated_rql_security_nonexistant(self): self.login('anon') email = self.vreg['etypes'].etype_class('EmailAddress')(self.request()) - rql = email.unrelated_rql('use_email', 'CWUser', 'object')[0] + rql = email.cw_unrelated_rql('use_email', 'CWUser', 'object')[0] self.assertEquals(rql, 'Any S,AA,AB,AC,AD ORDERBY AA ' 'WHERE S is CWUser, S login AA, S firstname AB, S surname AC, S modification_date AD, ' 'A eid %(B)s, EXISTS(S identity A, NOT A in_group C, C name "guests", C is CWGroup)') @@ -442,8 +442,8 @@ e['data_format'] = 'text/html' e['data_encoding'] = 'ascii' e._cw.transaction_data = {} # XXX req should be a session - self.assertEquals(set(e.get_words()), - set(['an', 'html', 'file', 'du', 'html', 'some', 'data'])) + self.assertEquals(e.cw_adapt_to('IFTIndexable').get_words(), + {'C': [u'du', u'html', 'an', 'html', 'file', u'some', u'data']}) def test_nonregr_relation_cache(self): @@ -462,9 +462,9 @@ trinfo = self.execute('Any X WHERE X eid %(x)s', {'x': eid}).get_entity(0, 0) trinfo.complete() self.failUnless(isinstance(trinfo['creation_date'], datetime)) - self.failUnless(trinfo.relation_cached('from_state', 'subject')) - self.failUnless(trinfo.relation_cached('to_state', 'subject')) - self.failUnless(trinfo.relation_cached('wf_info_for', 'subject')) + self.failUnless(trinfo.cw_relation_cached('from_state', 'subject')) + self.failUnless(trinfo.cw_relation_cached('to_state', 'subject')) + self.failUnless(trinfo.cw_relation_cached('wf_info_for', 'subject')) self.assertEquals(trinfo.by_transition, ()) def test_request_cache(self): @@ -508,7 +508,7 @@ def test_metainformation_and_external_absolute_url(self): req = self.request() note = req.create_entity('Note', type=u'z') - metainf = note.metainformation() + metainf = note.cw_metainformation() self.assertEquals(metainf, {'source': {'adapter': 'native', 'uri': 'system'}, 'type': u'Note', 'extid': None}) self.assertEquals(note.absolute_url(), 'http://testing.fr/cubicweb/note/%s' % note.eid) metainf['source'] = metainf['source'].copy() diff -r b5640328ffad -r 9db65b381028 test/unittest_rset.py --- a/test/unittest_rset.py Thu Jul 01 09:23:39 2010 +0200 +++ b/test/unittest_rset.py Thu Jul 01 17:06:37 2010 +0200 @@ -233,10 +233,10 @@ self.assertEquals(e['surname'], 'di mascio') self.assertRaises(KeyError, e.__getitem__, 'firstname') self.assertRaises(KeyError, e.__getitem__, 'creation_date') - self.assertEquals(pprelcachedict(e._related_cache), []) + self.assertEquals(pprelcachedict(e._cw_related_cache), []) e.complete() self.assertEquals(e['firstname'], 'adrien') - self.assertEquals(pprelcachedict(e._related_cache), []) + self.assertEquals(pprelcachedict(e._cw_related_cache), []) def test_get_entity_advanced(self): self.request().create_entity('Bookmark', title=u'zou', path=u'/view') @@ -249,19 +249,19 @@ self.assertEquals(e['title'], 'zou') self.assertRaises(KeyError, e.__getitem__, 'path') self.assertEquals(e.view('text'), 'zou') - self.assertEquals(pprelcachedict(e._related_cache), []) + self.assertEquals(pprelcachedict(e._cw_related_cache), []) e = rset.get_entity(0, 1) self.assertEquals(e.cw_row, 0) self.assertEquals(e.cw_col, 1) self.assertEquals(e['login'], 'anon') self.assertRaises(KeyError, e.__getitem__, 'firstname') - self.assertEquals(pprelcachedict(e._related_cache), + self.assertEquals(pprelcachedict(e._cw_related_cache), []) e.complete() self.assertEquals(e['firstname'], None) self.assertEquals(e.view('text'), 'anon') - self.assertEquals(pprelcachedict(e._related_cache), + self.assertEquals(pprelcachedict(e._cw_related_cache), []) self.assertRaises(NotAnEntity, rset.get_entity, 0, 2) @@ -273,7 +273,7 @@ seid = self.execute('State X WHERE X name "activated"')[0][0] # for_user / in_group are prefetched in CWUser __init__, in_state should # be filed from our query rset - self.assertEquals(pprelcachedict(e._related_cache), + self.assertEquals(pprelcachedict(e._cw_related_cache), [('in_state_subject', [seid])]) def test_get_entity_advanced_prefilled_cache(self): @@ -283,7 +283,7 @@ 'X title XT, S name SN, U login UL, X eid %s' % e.eid) e = rset.get_entity(0, 0) self.assertEquals(e['title'], 'zou') - self.assertEquals(pprelcachedict(e._related_cache), + self.assertEquals(pprelcachedict(e._cw_related_cache), [('created_by_subject', [5])]) # first level of recursion u = e.created_by[0] @@ -302,9 +302,9 @@ e = rset.get_entity(0, 0) # if any of the assertion below fails with a KeyError, the relation is not cached # related entities should be an empty list - self.assertEquals(e.related_cache('primary_email', 'subject', True), ()) + self.assertEquals(e._cw_relation_cache('primary_email', 'subject', True), ()) # related rset should be an empty rset - cached = e.related_cache('primary_email', 'subject', False) + cached = e._cw_relation_cache('primary_email', 'subject', False) self.assertIsInstance(cached, ResultSet) self.assertEquals(cached.rowcount, 0) @@ -405,5 +405,19 @@ rset = self.execute('Any D, COUNT(U) GROUPBY D WHERE U is CWUser, U creation_date D') self.assertEquals(rset.related_entity(0,0), (None, None)) + def test_str(self): + rset = self.execute('(Any X,N WHERE X is CWGroup, X name N)') + self.assertIsInstance(str(rset), basestring) + self.assertEquals(len(str(rset).splitlines()), 1) + + def test_repr(self): + rset = self.execute('(Any X,N WHERE X is CWGroup, X name N)') + self.assertIsInstance(repr(rset), basestring) + self.assertTrue(len(repr(rset).splitlines()) > 1) + + rset = self.execute('(Any X WHERE X is CWGroup, X name "managers")') + self.assertIsInstance(str(rset), basestring) + self.assertEquals(len(str(rset).splitlines()), 1) + if __name__ == '__main__': unittest_main() diff -r b5640328ffad -r 9db65b381028 test/unittest_schema.py --- a/test/unittest_schema.py Thu Jul 01 09:23:39 2010 +0200 +++ b/test/unittest_schema.py Thu Jul 01 17:06:37 2010 +0200 @@ -176,7 +176,7 @@ 'CWCache', 'CWConstraint', 'CWConstraintType', 'CWEType', 'CWAttribute', 'CWGroup', 'EmailAddress', 'CWRelation', 'CWPermission', 'CWProperty', 'CWRType', 'CWUser', - 'ExternalUri', 'File', 'Float', 'Image', 'Int', 'Interval', 'Note', + 'ExternalUri', 'File', 'Float', 'Int', 'Interval', 'Note', 'Password', 'Personne', 'RQLExpression', 'Societe', 'State', 'String', 'SubNote', 'SubWorkflowExitPoint', diff -r b5640328ffad -r 9db65b381028 test/unittest_selectors.py --- a/test/unittest_selectors.py Thu Jul 01 09:23:39 2010 +0200 +++ b/test/unittest_selectors.py Thu Jul 01 17:06:37 2010 +0200 @@ -15,15 +15,14 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""unit tests for selectors mechanism - -""" +"""unit tests for selectors mechanism""" from logilab.common.testlib import TestCase, unittest_main +from cubicweb import Binary from cubicweb.devtools.testlib import CubicWebTC from cubicweb.appobject import Selector, AndSelector, OrSelector -from cubicweb.selectors import implements, match_user_groups +from cubicweb.selectors import implements, adaptable, match_user_groups from cubicweb.interfaces import IDownloadable from cubicweb.web import action @@ -140,11 +139,12 @@ class ImplementsSelectorTC(CubicWebTC): def test_etype_priority(self): req = self.request() - cls = self.vreg['etypes'].etype_class('File') - anyscore = implements('Any').score_class(cls, req) - idownscore = implements(IDownloadable).score_class(cls, req) + f = req.create_entity('File', data_name=u'hop.txt', data=Binary('hop')) + rset = f.as_rset() + anyscore = implements('Any')(f.__class__, req, rset=rset) + idownscore = adaptable('IDownloadable')(f.__class__, req, rset=rset) self.failUnless(idownscore > anyscore, (idownscore, anyscore)) - filescore = implements('File').score_class(cls, req) + filescore = implements('File')(f.__class__, req, rset=rset) self.failUnless(filescore > idownscore, (filescore, idownscore)) def test_etype_inheritance_no_yams_inheritance(self): diff -r b5640328ffad -r 9db65b381028 test/unittest_utils.py --- a/test/unittest_utils.py Thu Jul 01 09:23:39 2010 +0200 +++ b/test/unittest_utils.py Thu Jul 01 17:06:37 2010 +0200 @@ -15,16 +15,16 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""unit tests for module cubicweb.utils - -""" +"""unit tests for module cubicweb.utils""" import re import decimal import datetime from logilab.common.testlib import TestCase, unittest_main + from cubicweb.utils import make_uid, UStringIO, SizeConstrainedList, RepeatList +from cubicweb.entity import Entity try: from cubicweb.utils import CubicWebJsonEncoder, json @@ -99,6 +99,7 @@ l.pop(2) self.assertEquals(l, [(1, 3)]*2) + class SizeConstrainedListTC(TestCase): def test_append(self): @@ -117,6 +118,7 @@ l.extend(extension) yield self.assertEquals, l, expected + class JSONEncoderTC(TestCase): def setUp(self): if json is None: @@ -136,6 +138,20 @@ def test_encoding_decimal(self): self.assertEquals(self.encode(decimal.Decimal('1.2')), '1.2') + def test_encoding_bare_entity(self): + e = Entity(None) + e['pouet'] = 'hop' + e.eid = 2 + self.assertEquals(json.loads(self.encode(e)), + {'pouet': 'hop', 'eid': 2}) + + def test_encoding_entity_in_list(self): + e = Entity(None) + e['pouet'] = 'hop' + e.eid = 2 + self.assertEquals(json.loads(self.encode([e])), + [{'pouet': 'hop', 'eid': 2}]) + def test_encoding_unknown_stuff(self): self.assertEquals(self.encode(TestCase), 'null') diff -r b5640328ffad -r 9db65b381028 test/unittest_vregistry.py --- a/test/unittest_vregistry.py Thu Jul 01 09:23:39 2010 +0200 +++ b/test/unittest_vregistry.py Thu Jul 01 17:06:37 2010 +0200 @@ -15,9 +15,7 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -""" -""" from logilab.common.testlib import unittest_main, TestCase from os.path import join @@ -27,7 +25,7 @@ from cubicweb.cwvreg import CubicWebVRegistry, UnknownProperty from cubicweb.devtools import TestServerConfiguration from cubicweb.devtools.testlib import CubicWebTC -from cubicweb.interfaces import IMileStone +from cubicweb.view import EntityAdapter from cubes.card.entities import Card @@ -56,21 +54,26 @@ def test_load_subinterface_based_appobjects(self): - self.vreg.reset() self.vreg.register_objects([join(BASE, 'web', 'views', 'iprogress.py')]) # check progressbar was kicked self.failIf(self.vreg['views'].get('progressbar')) - class MyCard(Card): - __implements__ = (IMileStone,) - self.vreg.reset() + # we've to emulate register_objects to add custom MyCard objects + path = [join(BASE, 'entities', '__init__.py'), + join(BASE, 'entities', 'adapters.py'), + join(BASE, 'web', 'views', 'iprogress.py')] + filemods = self.vreg.init_registration(path, None) + for filepath, modname in filemods: + self.vreg.load_file(filepath, modname) + class CardIProgressAdapter(EntityAdapter): + __regid__ = 'IProgress' self.vreg._loadedmods[__name__] = {} - self.vreg.register(MyCard) - self.vreg.register_objects([join(BASE, 'entities', '__init__.py'), - join(BASE, 'web', 'views', 'iprogress.py')]) + self.vreg.register(CardIProgressAdapter) + self.vreg.initialization_completed() # check progressbar isn't kicked self.assertEquals(len(self.vreg['views']['progressbar']), 1) def test_properties(self): + self.vreg.reset() self.failIf('system.version.cubicweb' in self.vreg['propertydefs']) self.failUnless(self.vreg.property_info('system.version.cubicweb')) self.assertRaises(UnknownProperty, self.vreg.property_info, 'a.non.existent.key') diff -r b5640328ffad -r 9db65b381028 utils.py --- a/utils.py Thu Jul 01 09:23:39 2010 +0200 +++ b/utils.py Thu Jul 01 17:06:37 2010 +0200 @@ -335,21 +335,11 @@ class CubicWebJsonEncoder(json.JSONEncoder): """define a json encoder to be able to encode yams std types""" - # _iterencode is the only entry point I've found to use a custom encode - # hook early enough: .default() is called if nothing else matched before, - # .iterencode() is called once on the main structure to encode and then - # never gets called again. - # For the record, our main use case is in FormValidateController with: - # json.dumps((status, args, entity), cls=CubicWebJsonEncoder) - # where we want all the entity attributes, including eid, to be part - # of the json object dumped. - # This would have once more been easier if Entity didn't extend dict. - def _iterencode(self, obj, markers=None): - if hasattr(obj, '__json_encode__'): - obj = obj.__json_encode__() - return json.JSONEncoder._iterencode(self, obj, markers) - def default(self, obj): + if hasattr(obj, 'eid'): + d = obj.cw_attr_cache.copy() + d['eid'] = obj.eid + return d if isinstance(obj, datetime.datetime): return obj.strftime('%Y/%m/%d %H:%M:%S') elif isinstance(obj, datetime.date): diff -r b5640328ffad -r 9db65b381028 view.py --- a/view.py Thu Jul 01 09:23:39 2010 +0200 +++ b/view.py Thu Jul 01 17:06:37 2010 +0200 @@ -34,6 +34,7 @@ from cubicweb.appobject import AppObject from cubicweb.utils import UStringIO, HTMLStream from cubicweb.schema import display_name +from cubicweb.vregistry import classid # robots control NOINDEX = u'' @@ -366,6 +367,17 @@ __select__ = non_final_entity() category = 'entityview' + def call(self, **kwargs): + if self.cw_rset is None: + self.entity_call(self.cw_extra_kwargs.pop('entity')) + else: + super(EntityView, self).call(**kwargs) + + def cell_call(self, row, col, **kwargs): + self.entity_call(self.cw_rset.get_entity(row, col), **kwargs) + + def entity_call(self, entity, **kwargs): + raise NotImplementedError() class StartupView(View): """base class for views which doesn't need a particular result set to be @@ -519,3 +531,37 @@ # XXX a generic '%s%s' % (self.__regid__, self.__registry__.capitalize()) would probably be nicer def div_id(self): return '%sComponent' % self.__regid__ + + +class Adapter(AppObject): + """base class for adapters""" + __registry__ = 'adapters' + + +class EntityAdapter(Adapter): + """base class for entity adapters (eg adapt an entity to an interface)""" + def __init__(self, _cw, **kwargs): + try: + self.entity = kwargs.pop('entity') + except KeyError: + self.entity = kwargs['rset'].get_entity(kwargs.get('row') or 0, + kwargs.get('col') or 0) + Adapter.__init__(self, _cw, **kwargs) + + +def implements_adapter_compat(iface): + def _pre39_compat(func): + def decorated(self, *args, **kwargs): + entity = self.entity + if hasattr(entity, func.__name__): + warn('[3.9] %s method is deprecated, define it on a custom ' + '%s for %s instead' % (func.__name__, iface, + classid(entity.__class__)), + DeprecationWarning) + member = getattr(entity, func.__name__) + if callable(member): + return member(*args, **kwargs) + return member + return func(self, *args, **kwargs) + return decorated + return _pre39_compat diff -r b5640328ffad -r 9db65b381028 vregistry.py --- a/vregistry.py Thu Jul 01 09:23:39 2010 +0200 +++ b/vregistry.py Thu Jul 01 17:06:37 2010 +0200 @@ -44,7 +44,7 @@ from cubicweb import CW_SOFTWARE_ROOT from cubicweb import RegistryNotFound, ObjectNotFound, NoSelectableObject -from cubicweb.appobject import AppObject +from cubicweb.appobject import AppObject, class_regid def _toload_info(path, extrapath, _toload=None): """return a dictionary of : and an ordered list of @@ -83,16 +83,6 @@ """returns a unique identifier for an appobject class""" return '%s.%s' % (cls.__module__, cls.__name__) -def class_regid(cls): - """returns a unique identifier for an appobject class""" - if 'id' in cls.__dict__: - warn('[3.6] %s.%s: id is deprecated, use __regid__' - % (cls.__module__, cls.__name__), DeprecationWarning) - cls.__regid__ = cls.id - if hasattr(cls, 'id') and not isinstance(cls.id, property): - return cls.id - return cls.__regid__ - def class_registries(cls, registryname): if registryname: return (registryname,) @@ -235,8 +225,8 @@ % (args, kwargs.keys(), [repr(v) for v in appobjects])) if len(winners) > 1: - # log in production environement, error while debugging - if self.config.debugmode: + # log in production environement / test, error while debugging + if self.config.debugmode or self.config.mode == 'test': raise Exception('select ambiguity, args: %s\nkwargs: %s %s' % (args, kwargs.keys(), [repr(v) for v in winners])) @@ -405,6 +395,7 @@ # initialization methods ################################################### def init_registration(self, path, extrapath=None): + self.reset() # compute list of all modules that have to be loaded self._toloadmods, filemods = _toload_info(path, extrapath) # XXX is _loadedmods still necessary ? It seems like it's useful diff -r b5640328ffad -r 9db65b381028 web/__init__.py --- a/web/__init__.py Thu Jul 01 09:23:39 2010 +0200 +++ b/web/__init__.py Thu Jul 01 17:06:37 2010 +0200 @@ -17,9 +17,8 @@ # with CubicWeb. If not, see . """CubicWeb web client core. You'll need a apache-modpython or twisted publisher to get a full CubicWeb web application - +""" -""" __docformat__ = "restructuredtext en" _ = unicode @@ -77,7 +76,7 @@ return json_dumps(repr(value)) return newfunc -@deprecated('[3.4] use req.build_ajax_replace_url() instead') +@deprecated('[3.4] use req.ajax_replace_url() instead') def ajax_replace_url(nodeid, rql, vid=None, swap=False, **extraparams): """builds a replacePageChunk-like url >>> ajax_replace_url('foo', 'Person P') diff -r b5640328ffad -r 9db65b381028 web/action.py --- a/web/action.py Thu Jul 01 09:23:39 2010 +0200 +++ b/web/action.py Thu Jul 01 17:06:37 2010 +0200 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""abstract action classes for CubicWeb web client +"""abstract action classes for CubicWeb web client""" -""" __docformat__ = "restructuredtext en" _ = unicode diff -r b5640328ffad -r 9db65b381028 web/application.py --- a/web/application.py Thu Jul 01 09:23:39 2010 +0200 +++ b/web/application.py Thu Jul 01 17:06:37 2010 +0200 @@ -15,9 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""CubicWeb web client application object +"""CubicWeb web client application object""" -""" from __future__ import with_statement __docformat__ = "restructuredtext en" @@ -234,7 +233,7 @@ def _update_last_login_time(self, req): # XXX should properly detect missing permission / non writeable source # and avoid "except (RepositoryError, Unauthorized)" below - if req.user.metainformation()['source']['adapter'] == 'ldapuser': + if req.user.cw_metainformation()['source']['adapter'] == 'ldapuser': return try: req.execute('SET X last_login_time NOW WHERE X eid %(x)s', @@ -282,12 +281,12 @@ to publish HTTP request. """ - def __init__(self, config, debug=None, + def __init__(self, config, session_handler_fact=CookieSessionHandler, vreg=None): self.info('starting web instance from %s', config.apphome) if vreg is None: - vreg = cwvreg.CubicWebVRegistry(config, debug=debug) + vreg = cwvreg.CubicWebVRegistry(config) self.vreg = vreg # connect to the repository and get instance's schema self.repo = config.repository(vreg) diff -r b5640328ffad -r 9db65b381028 web/component.py --- a/web/component.py Thu Jul 01 09:23:39 2010 +0200 +++ b/web/component.py Thu Jul 01 17:06:37 2010 +0200 @@ -15,9 +15,10 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""abstract component class and base components definition for CubicWeb web client +"""abstract component class and base components definition for CubicWeb web +client +""" -""" __docformat__ = "restructuredtext en" _ = unicode @@ -61,9 +62,15 @@ context = 'navcontentbottom' def call(self, view=None): - return self.cell_call(0, 0, view=view) + if self.cw_rset is None: + self.entity_call(self.cw_extra_kwargs.pop('entity')) + else: + self.cell_call(0, 0, view=view) def cell_call(self, row, col, view=None): + self.entity_call(self.cw_rset.get_entity(row, col), view=view) + + def entity_call(self, entity, view=None): raise NotImplementedError() diff -r b5640328ffad -r 9db65b381028 web/controller.py --- a/web/controller.py Thu Jul 01 09:23:39 2010 +0200 +++ b/web/controller.py Thu Jul 01 17:06:37 2010 +0200 @@ -25,6 +25,7 @@ from cubicweb.selectors import yes from cubicweb.appobject import AppObject +from cubicweb.mail import format_mail from cubicweb.web import LOGGER, Redirect, RequestError @@ -81,18 +82,20 @@ # generic methods useful for concrete implementations ###################### - def process_rql(self, rql): + def process_rql(self): """execute rql if specified""" - # XXX assigning to self really necessary? - self.cw_rset = None + req = self._cw + rql = req.form.get('rql') if rql: - self._cw.ensure_ro_rql(rql) + req.ensure_ro_rql(rql) if not isinstance(rql, unicode): - rql = unicode(rql, self._cw.encoding) - pp = self._cw.vreg['components'].select_or_none('magicsearch', self._cw) + rql = unicode(rql, req.encoding) + pp = req.vreg['components'].select_or_none('magicsearch', req) if pp is not None: - self.cw_rset = pp.process_query(rql) - return self.cw_rset + return pp.process_query(rql) + if 'eid' in req.form: + return req.eid_rset(req.form['eid']) + return None def notify_edited(self, entity): """called by edit_entity() to notify which entity is edited""" @@ -106,6 +109,16 @@ view.set_http_cache_headers() self._cw.validate_cache() + def sendmail(self, recipient, subject, body): + senderemail = self._cw.user.cw_adapt_to('IEmailable').get_email() + msg = format_mail({'email' : senderemail, + 'name' : self._cw.user.dc_title(),}, + [recipient], body, subject) + if not self._cw.vreg.config.sendmails([(msg, [recipient])]): + msg = self._cw._('could not connect to the SMTP server') + url = self._cw.build_url(__message=msg) + raise Redirect(url) + def reset(self): """reset form parameters and redirect to a view determinated by given parameters diff -r b5640328ffad -r 9db65b381028 web/data/button.png Binary file web/data/button.png has changed diff -r b5640328ffad -r 9db65b381028 web/data/cubicweb.acl.css --- a/web/data/cubicweb.acl.css Thu Jul 01 09:23:39 2010 +0200 +++ b/web/data/cubicweb.acl.css Thu Jul 01 17:06:37 2010 +0200 @@ -6,78 +6,35 @@ */ /******************************************************************************/ -/* security edition form (views/management.py) */ +/* security edition form (views/management.py) web/views/schema.py */ /******************************************************************************/ h2.schema{ - background : #ff7700; - color: #fff; - font-weight: bold; - padding : 0.1em 0.3em; + color: %(aColor)s; } - -h3.schema{ +table.schemaInfo td a.users{ + color : #00CC33; font-weight: bold; } -h4 a, -h4 a:link, -h4 a:visited{ - color:#000; - } - -table.schemaInfo { - margin: 1em 0em; - text-align: left; - border: 1px solid black; - border-collapse: collapse; - width:100%; -} - -table.schemaInfo th, -table.schemaInfo td { - padding: .3em .5em; - border: 1px solid grey; - width:33%; -} - - -table.schemaInfo tr th { - padding: 0.2em 0px 0.2em 5px; - background-image:none; - background-color:#dfdfdf; -} - -table.schemaInfo thead tr { - border: 1px solid #dfdfdf; -} - -table.schemaInfo td { - padding: 3px 10px 3px 5px; - -} - -a.users{ - color : #00CC33; - font-weight: bold } - -a.guests{ - color : #ff7700; +table.schemaInfo td a.guests{ + color: #ff7700; font-weight: bold; } -a.owners{ - color : #8b0000; +table.schemaInfo td a.owners{ + color: #8b0000; font-weight: bold; } -a.managers{ +table.schemaInfo td a.managers{ color: #000000; + font-weight: bold; } .discret, -a.grey{ +table.schemaInfo td a.grey{ color:#666; } @@ -86,39 +43,9 @@ } .red{ - color : #ff7700; + color: #ff7700; } div#schema_security{ width:100%; - } -/******************************************************************************/ -/* user groups edition form (views/euser.py) */ -/******************************************************************************/ - -table#groupedit { - margin: 1ex 1em; - text-align: left; - border: 1px solid black; - border-collapse: collapse; -} - -table#groupedit th, -table#groupedit td { - padding: 0.5em 1em; -} - -table#groupedit tr { - border-bottom: 1px solid black; -} - -table#groupedit tr.nogroup { - border: 1px solid red; - margin: 1px; -} - -table#groupedit td { - text-align: center; - padding: 0.5em; -} - + } \ No newline at end of file diff -r b5640328ffad -r 9db65b381028 web/data/cubicweb.ajax.js --- a/web/data/cubicweb.ajax.js Thu Jul 01 09:23:39 2010 +0200 +++ b/web/data/cubicweb.ajax.js Thu Jul 01 17:06:37 2010 +0200 @@ -1,14 +1,96 @@ -/* - * :organization: Logilab - * :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. - * :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr +/* copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. + * contact http://www.logilab.fr/ -- mailto:contact@logilab.fr + * + * This file is part of CubicWeb. + * + * CubicWeb is free software: you can redistribute it and/or modify it under the + * terms of the GNU Lesser General Public License as published by the Free + * Software Foundation, either version 2.1 of the License, or (at your option) + * any later version. + * + * CubicWeb is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS + * FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more + * details. + * + * You should have received a copy of the GNU Lesser General Public License along + * with CubicWeb. If not, see . */ -CubicWeb.require('python.js'); -CubicWeb.require('htmlhelpers.js'); +/** + * .. function:: Deferred + * + * dummy ultra minimalist implementation of deferred for jQuery + */ +function Deferred() { + this.__init__(this); +} + +jQuery.extend(Deferred.prototype, { + __init__: function() { + this._onSuccess = []; + this._onFailure = []; + this._req = null; + this._result = null; + this._error = null; + }, + + addCallback: function(callback) { + if (this._req.readyState == 4) { + if (this._result) { + var args = [this._result, this._req]; + jQuery.merge(args, cw.utils.sliceList(arguments, 1)); + callback.apply(null, args); + } + } + else { + this._onSuccess.push([callback, cw.utils.sliceList(arguments, 1)]); + } + return this; + }, + + addErrback: function(callback) { + if (this._req.readyState == 4) { + if (this._error) { + callback.apply(null, [this._error, this._req]); + } + } + else { + this._onFailure.push([callback, cw.utils.sliceList(arguments, 1)]); + } + return this; + }, + + success: function(result) { + this._result = result; + try { + for (var i = 0; i < this._onSuccess.length; i++) { + var callback = this._onSuccess[i][0]; + var args = [result, this._req]; + jQuery.merge(args, this._onSuccess[i][1]); + callback.apply(null, args); + } + } catch(error) { + this.error(this.xhr, null, error); + } + }, + + error: function(xhr, status, error) { + this._error = error; + for (var i = 0; i < this._onFailure.length; i++) { + var callback = this._onFailure[i][0]; + var args = [error, this._req]; + jQuery.merge(args, this._onFailure[i][1]); + callback.apply(null, args); + } + } + +}); + var JSON_BASE_URL = baseuri() + 'json?'; +//============= utility function handling remote calls responses. ==============// function _loadAjaxHtmlHead(node, head, tag, srcattr) { var loaded = []; var jqtagfilter = tag + '[' + srcattr + ']'; @@ -17,7 +99,7 @@ }); node.find(tag).each(function(i) { if (this.getAttribute(srcattr)) { - if (!loaded.contains(this.getAttribute(srcattr))) { + if (jQuery.inArray(this.getAttribute(srcattr), loaded) == -1) { jQuery(this).appendTo(head); } } else { @@ -27,7 +109,9 @@ node.find(jqtagfilter).remove(); } -/* +/** + * .. function:: function loadAjaxHtmlHead(response) + * * inspect dom response (as returned by getDomFromResponse), search for * a
    node and put its content into the real * document's head. @@ -59,18 +143,13 @@ // we can safely return this node. Otherwise, the view itself // returned several 'root' nodes and we need to keep the wrapper // created by getDomFromResponse() - if (response.childNodes.length == 1 && - response.getAttribute('cubicweb:type') == 'cwResponseWrapper') { + if (response.childNodes.length == 1 && response.getAttribute('cubicweb:type') == 'cwResponseWrapper') { return response.firstChild; } return response; } -function preprocessAjaxLoad(node, newdomnode) { - return loadAjaxHtmlHead(newdomnode); -} - -function postAjaxLoad(node) { +function _postAjaxLoad(node) { // find sortable tables if there are some if (typeof(Sortable) != 'undefined') { Sortable.sortTables(node); @@ -89,44 +168,77 @@ roundedCorners(node); } if (typeof setFormsTarget != 'undefined') { - setFormsTarget(node); + setFormsTarget(node); } - loadDynamicFragments(node); + _loadDynamicFragments(node); // XXX [3.7] jQuery.one is now used instead jQuery.bind, // jquery.treeview.js can be unpatched accordingly. jQuery(CubicWeb).trigger('server-response', [true, node]); + jQuery(node).trigger('server-response', [true, node]); +} + +function remoteCallFailed(err, req) { + cw.log(err); + if (req.status == 500) { + updateMessage(err); + } else { + updateMessage(_("an error occured while processing your request")); + } } -/* cubicweb loadxhtml plugin to make jquery handle xhtml response +//============= base AJAX functions to make remote calls =====================// +/** + * .. function:: ajaxFuncArgs(fname, form, *args) * - * fetches `url` and replaces this's content with the result + * extend `form` parameters to call the js_`fname` function of the json + * controller with `args` arguments. + */ +function ajaxFuncArgs(fname, form /* ... */) { + form = form || {}; + $.extend(form, { + 'fname': fname, + 'pageid': pageid, + 'arg': $.map(cw.utils.sliceList(arguments, 2), jQuery.toJSON) + }); + return form; +} + +/** + * .. function:: loadxhtml(url, form, reqtype='get', mode='replace', cursor=true) * - * @param mode how the replacement should be done (default is 'replace') - * Possible values are : + * build url given by absolute or relative `url` and `form` parameters + * (dictionary), fetch it using `reqtype` method, then evaluate the + * returned XHTML and insert it according to `mode` in the + * document. Possible modes are : + * * - 'replace' to replace the node's content with the generated HTML * - 'swap' to replace the node itself with the generated HTML * - 'append' to append the generated HTML to the node's content + * + * If `cursor`, turn mouse cursor into 'progress' cursor until the remote call + * is back. */ -jQuery.fn.loadxhtml = function(url, data, reqtype, mode) { - var ajax = null; - if (reqtype == 'post') { - ajax = jQuery.post; - } else { - ajax = jQuery.get; +jQuery.fn.loadxhtml = function(url, form, reqtype, mode, cursor) { + if (this.size() > 1) { + cw.log('loadxhtml was called with more than one element'); } - if (this.size() > 1) { - log('loadxhtml was called with more than one element'); + var callback = null; + if (form && form.callback) { + cw.log('[3.9] callback given through form.callback is deprecated, add ' + 'callback on the defered'); + callback = form.callback; + delete form.callback; } var node = this.get(0); // only consider the first element - mode = mode || 'replace'; - var callback = null; - if (data && data.callback) { - callback = data.callback; - delete data.callback; + if (cursor) { + setProgressCursor(); } - ajax(url, data, function(response) { + var d = loadRemote(url, form, reqtype); + d.addCallback(function(response) { var domnode = getDomFromResponse(response); - domnode = preprocessAjaxLoad(node, domnode); + domnode = loadAjaxHtmlHead(domnode); + mode = mode || 'replace'; + // make sure the component is visible + $(node).removeClass("hidden"); if (mode == 'swap') { var origId = node.id; node = swapDOM(node, domnode); @@ -138,19 +250,97 @@ } else if (mode == 'append') { jQuery(node).append(domnode); } - postAjaxLoad(node); + _postAjaxLoad(node); while (jQuery.isFunction(callback)) { callback = callback.apply(this, [domnode]); } }); -}; + if (cursor) { + d.addCallback(resetCursor); + d.addErrback(resetCursor); + d.addErrback(remoteCallFailed); + } + return d; +} +/** + * .. function:: loadRemote(url, form, reqtype='GET', async=true) + * + * Asynchronously (unless `async` argument is set to false) load an url or path + * and return a deferred whose callbacks args are decoded according to the + * Content-Type response header. `form` should be additional form params + * dictionary, `reqtype` the HTTP request type (get 'GET' or 'POST'). + */ +function loadRemote(url, form, reqtype, sync) { + if (!url.startswith(baseuri())) { + url = baseuri() + url; + } + if (!sync) { + var deferred = new Deferred(); + jQuery.ajax({ + url: url, + type: (reqtype || 'GET').toUpperCase(), + data: form, + async: true, + + beforeSend: function(xhr) { + deferred._req = xhr; + }, + + success: function(data, status) { + if (deferred._req.getResponseHeader("content-type") == 'application/json') { + data = cw.evalJSON(data); + } + deferred.success(data); + }, + error: function(xhr, status, error) { + try { + if (xhr.status == 500) { + var reason_dict = cw.evalJSON(xhr.responseText); + deferred.error(xhr, status, reason_dict['reason']); + return; + } + } catch(exc) { + cw.log('error with server side error report:' + exc); + } + deferred.error(xhr, status, null); + } + }); + return deferred; + } else { + var result = jQuery.ajax({ + url: url, + type: (reqtype || 'GET').toUpperCase(), + data: form, + async: false + }); + if (result) { + // XXX no good reason to force json here, + // it should depends on request content-type + result = cw.evalJSON(result.responseText); + } + return result + } +} -/* finds each dynamic fragment in the page and executes the +//============= higher level AJAX functions using remote calls ===============// +/** + * .. function:: _(message) + * + * emulation of gettext's _ shortcut + */ +function _(message) { + return loadRemote('json', ajaxFuncArgs('i18n', null, [message]), 'GET', true)[0]; +} + +/** + * .. function:: _loadDynamicFragments(node) + * + * finds each dynamic fragment in the page and executes the * the associated RQL to build them (Async call) */ -function loadDynamicFragments(node) { +function _loadDynamicFragments(node) { if (node) { var fragments = jQuery(node).find('div.dynamicFragment'); } else { @@ -162,247 +352,138 @@ if (typeof LOADING_MSG == 'undefined') { LOADING_MSG = 'loading'; // this is only a safety belt, it should not happen } - for(var i=0; i'; + var $fragment = jQuery(fragment); // if cubicweb:loadurl is set, just pick the url et send it to loadxhtml - var url = getNodeAttribute(fragment, 'cubicweb:loadurl'); + var url = $fragment.attr('cubicweb:loadurl'); if (url) { - jQuery(fragment).loadxhtml(url); + $fragment.loadxhtml(url); continue; } // else: rebuild full url by fetching cubicweb:rql, cubicweb:vid, etc. - var rql = getNodeAttribute(fragment, 'cubicweb:rql'); - var items = getNodeAttribute(fragment, 'cubicweb:vid').split('&'); + var rql = $fragment.attr('cubicweb:rql'); + var items = $fragment.attr('cubicweb:vid').split('&'); var vid = items[0]; var extraparams = {}; // case where vid='myvid¶m1=val1¶m2=val2': this is a deprecated abuse-case if (items.length > 1) { - console.log("[3.5] you're using extraargs in cubicweb:vid attribute, this is deprecated, consider using loadurl instead"); - for (var j=1; j innerText, FF -> textContent */ - var text = node.innerText || node.textContent; - if (text && !text.strip()) { - continue; - } + /* all browsers but FF -> innerText, FF -> textContent */ + var text = node.innerText || node.textContent; + if (text && ! text.strip()) { + continue; + } } else { stripped.push(node); } @@ -441,7 +522,10 @@ return stripped; } -/* convenience function that returns a DOM node based on req's result. +/** + * .. function:: getDomFromResponse(response) + * + * convenience function that returns a DOM node based on req's result. * XXX clarify the need to clone * */ function getDomFromResponse(response) { @@ -462,17 +546,116 @@ } // several children => wrap them in a single node and return the wrap return DIV({'cubicweb:type': "cwResponseWrapper"}, - map(function(node) { - return jQuery(node).clone().context; - }, children)); + $.map(children, function(node) { + return jQuery(node).clone().context;}) + ); } -function postJSON(url, data, callback) { - return jQuery.post(url, data, callback, 'json'); -} +/* DEPRECATED *****************************************************************/ + +preprocessAjaxLoad = cw.utils.deprecatedFunction( + '[3.9] preprocessAjaxLoad() is deprecated, use loadAjaxHtmlHead instead', + function(node, newdomnode) { + return loadAjaxHtmlHead(newdomnode); + } +); + +reloadComponent = cw.utils.deprecatedFunction( + '[3.9] reloadComponent() is deprecated, use loadxhtml instead', + function(compid, rql, registry, nodeid, extraargs) { + registry = registry || 'components'; + rql = rql || ''; + nodeid = nodeid || (compid + 'Component'); + extraargs = extraargs || {}; + var node = jqNode(nodeid); + return node.loadxhtml('json', ajaxFuncArgs('component', null, compid, + rql, registry, extraargs)); + } +); + +reloadBox = cw.utils.deprecatedFunction( + '[3.9] reloadBox() is deprecated, use loadxhtml instead', + function(boxid, rql) { + return reloadComponent(boxid, rql, 'boxes', boxid); + } +); -function getJSON(url, data, callback){ - return jQuery.get(url, data, callback, 'json'); -} +replacePageChunk = cw.utils.deprecatedFunction( + '[3.9] replacePageChunk() is deprecated, use loadxhtml instead', + function(nodeId, rql, vid, extraparams, /* ... */ swap, callback) { + var params = null; + if (callback) { + params = { + callback: callback + }; + } + var node = jQuery('#' + nodeId)[0]; + var props = {}; + if (node) { + props['rql'] = rql; + props['fname'] = 'view'; + props['pageid'] = pageid; + if (vid) { + props['vid'] = vid; + } + if (extraparams) { + jQuery.extend(props, extraparams); + } + // FIXME we need to do asURL(props) manually instead of + // passing `props` directly to loadxml because replacePageChunk + // is sometimes called (abusively) with some extra parameters in `vid` + var mode = swap ? 'swap': 'replace'; + var url = JSON_BASE_URL + asURL(props); + jQuery(node).loadxhtml(url, params, 'get', mode); + } else { + cw.log('Node', nodeId, 'not found'); + } + } +); + +loadxhtml = cw.utils.deprecatedFunction( + '[3.9] loadxhtml() function is deprecated, use loadxhtml method instead', + function(nodeid, url, /* ... */ replacemode) { + jQuery('#' + nodeid).loadxhtml(url, null, 'post', replacemode); + } +); -CubicWeb.provide('ajax.js'); +remoteExec = cw.utils.deprecatedFunction( + '[3.9] remoteExec() is deprecated, use loadRemote instead', + function(fname /* ... */) { + setProgressCursor(); + var props = { + 'fname': fname, + 'pageid': pageid, + 'arg': $.map(cw.utils.sliceList(arguments, 1), jQuery.toJSON) + }; + var result = jQuery.ajax({ + url: JSON_BASE_URL, + data: props, + async: false + }).responseText; + if (result) { + result = cw.evalJSON(result); + } + resetCursor(); + return result; + } +); + +asyncRemoteExec = cw.utils.deprecatedFunction( + '[3.9] asyncRemoteExec() is deprecated, use loadRemote instead', + function(fname /* ... */) { + setProgressCursor(); + var props = { + 'fname': fname, + 'pageid': pageid, + 'arg': $.map(cw.utils.sliceList(arguments, 1), jQuery.toJSON) + }; + // XXX we should inline the content of loadRemote here + var deferred = loadRemote(JSON_BASE_URL, props, 'POST'); + deferred = deferred.addErrback(remoteCallFailed); + deferred = deferred.addErrback(resetCursor); + deferred = deferred.addCallback(resetCursor); + return deferred; + } +); diff -r b5640328ffad -r 9db65b381028 web/data/cubicweb.bookmarks.js --- a/web/data/cubicweb.bookmarks.js Thu Jul 01 09:23:39 2010 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,10 +0,0 @@ -CubicWeb.require('ajax.js'); - -function removeBookmark(beid) { - d = asyncRemoteExec('delete_bookmark', beid); - d.addCallback(function(boxcontent) { - reloadComponent('bookmarks_box', '', 'boxes', 'bookmarks_box'); - document.location.hash = '#header'; - updateMessage(_("bookmark has been removed")); - }); -} diff -r b5640328ffad -r 9db65b381028 web/data/cubicweb.calendar.css --- a/web/data/cubicweb.calendar.css Thu Jul 01 09:23:39 2010 +0200 +++ b/web/data/cubicweb.calendar.css Thu Jul 01 17:06:37 2010 +0200 @@ -230,7 +230,7 @@ .calendar th.month { font-weight:bold; padding-bottom:0.2em; - background: #cfceb7; + background: %(actionBoxTitleBgColor)s; } .calendar th.month a{ diff -r b5640328ffad -r 9db65b381028 web/data/cubicweb.calendar.js --- a/web/data/cubicweb.calendar.js Thu Jul 01 09:23:39 2010 +0200 +++ b/web/data/cubicweb.calendar.js Thu Jul 01 17:06:37 2010 +0200 @@ -1,23 +1,20 @@ -/* +/** * This file contains Calendar utilities * :organization: Logilab * :copyright: 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. * :contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr */ -CubicWeb.require('python.js'); -CubicWeb.require('ajax.js'); - // IMPORTANT NOTE: the variables DAYNAMES AND MONTHNAMES will be added // by cubicweb automatically - // dynamically computed (and cached) var _CAL_HEADER = null; TODAY = new Date(); - -/* +/** + * .. class:: Calendar + * * Calendar (graphical) widget * public methods are : * __init__ : @@ -31,7 +28,7 @@ * * toggle(): * show (resp. hide) the calendar if it's hidden (resp. displayed) - * + * * displayNextMonth(): (resp. displayPreviousMonth()) * update the calendar to display next (resp. previous) month */ @@ -39,177 +36,219 @@ this.containerId = containerId; this.inputId = inputId; this.year = year; - this.month = month-1; // Javascript's counter starts at 0 for january + this.month = month - 1; // Javascript's counter starts at 0 for january this.cssclass = cssclass || "popupCalendar"; this.visible = false; this.domtable = null; - this.cellprops = { 'onclick' : function() {dateSelected(this, containerId); }, - 'onmouseover' : function() {this.style.fontWeight = 'bold'; }, - 'onmouseout' : function() {this.style.fontWeight = 'normal';} - } + this.cellprops = { + 'onclick': function() { + dateSelected(this, containerId); + }, + 'onmouseover': function() { + this.style.fontWeight = 'bold'; + }, + 'onmouseout': function() { + this.style.fontWeight = 'normal'; + } + }; - this.todayprops = jQuery.extend({}, this.cellprops, {'class' : 'today'}); + this.todayprops = jQuery.extend({}, + this.cellprops, { + 'class': 'today' + }); this._rowdisplay = function(row) { - return TR(null, map(partial(TD, this.cellprops), row)); - } + var _td = function(elt) { + return TD(this.cellprops, elt); + }; + return TR(null, $.map(row, _td)); + }; this._makecell = function(cellinfo) { - return TD(cellinfo[0], cellinfo[1]); - } + return TD(cellinfo[0], cellinfo[1]); + }; - /* utility function (the only use for now is inside the calendar) */ - this._uppercaseFirst = function(s) { return s.charAt(0).toUpperCase(); } - - /* accepts the cells data and builds the corresponding TR nodes - * @param rows a list of list of couples (daynum, cssprops) + /** + * .. function:: Calendar._uppercaseFirst(s) + * + * utility function (the only use for now is inside the calendar) + */ + this._uppercaseFirst = function(s) { + return s.charAt(0).toUpperCase(); + }; + + /** + * .. function:: Calendar._domForRows(rows) + * + * accepts the cells data and builds the corresponding TR nodes + * + * * `rows`, a list of list of couples (daynum, cssprops) */ this._domForRows = function(rows) { - var lines = [] - for (i=0; i>")))), - TBODY(null, - this._headdisplay(), - this._domForRows(rows)) - ); - return this.domtable; - } + var rows = this._getrows(); + var monthname = MONTHNAMES[this.month] + " " + this.year; + var prevlink = "javascript: togglePreviousMonth('" + this.containerId + "');"; + var nextlink = "javascript: toggleNextMonth('" + this.containerId + "');"; + this.domtable = TABLE({ + 'class': this.cssclass + }, + THEAD(null, TR(null, TH(null, A({ + 'href': prevlink + }, + "<<")), + // IE 6/7 requires colSpan instead of colspan + TH({ + 'colSpan': 5, + 'colspan': 5, + 'style': "text-align: center;" + }, + monthname), TH(null, A({ + 'href': nextlink + }, + ">>")))), TBODY(null, this._headdisplay(), this._domForRows(rows))); + return this.domtable; + }; this._updateDiv = function() { - if (!this.domtable) { - this._makecal(); - } - jqNode(this.containerId).empty().append(this.domtable); - // replaceChildNodes($(this.containerId), this.domtable); - } + if (!this.domtable) { + this._makecal(); + } + cw.jqNode(this.containerId).empty().append(this.domtable); + // replaceChildNodes($(this.containerId), this.domtable); + }; this.displayNextMonth = function() { - this.domtable = null; - if (this.month == 11) { - this.year++; - } - this.month = (this.month+1) % 12; - this._updateDiv(); - } + this.domtable = null; + if (this.month == 11) { + this.year++; + } + this.month = (this.month + 1) % 12; + this._updateDiv(); + }; this.displayPreviousMonth = function() { - this.domtable = null; - if (this.month == 0) { - this.year--; - } - this.month = (this.month+11) % 12; - this._updateDiv(); - } - + this.domtable = null; + if (this.month == 0) { + this.year--; + } + this.month = (this.month + 11) % 12; + this._updateDiv(); + }; + this.show = function() { - if (!this.visible) { - container = jqNode(this.containerId); - if (!this.domtable) { - this._makecal(); - } - container.empty().append(this.domtable); - toggleVisibility(container); - this.visible = true; - } - } + if (!this.visible) { + var container = cw.jqNode(this.containerId); + if (!this.domtable) { + this._makecal(); + } + container.empty().append(this.domtable); + toggleVisibility(container); + this.visible = true; + } + }; this.hide = function(event) { - var self; - if (event) { - self = event.data.self; - } else { - self = this; - } - if (self.visible) { - toggleVisibility(self.containerId); - self.visible = false; - } - } + var self; + if (event) { + self = event.data.self; + } else { + self = this; + } + if (self.visible) { + toggleVisibility(self.containerId); + self.visible = false; + } + }; this.toggle = function() { - if (this.visible) { - this.hide(); - } - else { - this.show(); - } - } + if (this.visible) { + this.hide(); + } + else { + this.show(); + } + }; // call hide() when the user explicitly sets the focus on the matching input - jqNode(inputId).bind('focus', {'self': this}, this.hide); // connect(inputId, 'onfocus', this, 'hide'); + cw.jqNode(inputId).bind('focus', { + 'self': this + }, + this.hide); // connect(inputId, 'onfocus', this, 'hide'); }; // keep track of each calendar created Calendar.REGISTRY = {}; -/* +/** + * .. function:: toggleCalendar(containerId, inputId, year, month) + * * popup / hide calendar associated to `containerId` - */ + */ function toggleCalendar(containerId, inputId, year, month) { var cal = Calendar.REGISTRY[containerId]; if (!cal) { - cal = new Calendar(containerId, inputId, year, month); - Calendar.REGISTRY[containerId] = cal; + cal = new Calendar(containerId, inputId, year, month); + Calendar.REGISTRY[containerId] = cal; } /* hide other calendars */ for (containerId in Calendar.REGISTRY) { - var othercal = Calendar.REGISTRY[containerId]; - if (othercal !== cal) { - othercal.hide(); - } + var othercal = Calendar.REGISTRY[containerId]; + if (othercal !== cal) { + othercal.hide(); + } } cal.toggle(); } - -/* +/** + * .. function:: toggleNextMonth(containerId) + * * ask for next month to calendar displayed in `containerId` */ function toggleNextMonth(containerId) { @@ -217,7 +256,9 @@ cal.displayNextMonth(); } -/* +/** + * .. function:: togglePreviousMonth(containerId) + * * ask for previous month to calendar displayed in `containerId` */ function togglePreviousMonth(containerId) { @@ -225,97 +266,90 @@ cal.displayPreviousMonth(); } - -/* +/** + * .. function:: dateSelected(cell, containerId) + * * Callback called when the user clicked on a cell in the popup calendar */ function dateSelected(cell, containerId) { var cal = Calendar.REGISTRY[containerId]; - var input = getNode(cal.inputId); + var input = cw.getNode(cal.inputId); // XXX: the use of innerHTML might cause problems, but it seems to be // the only way understood by both IE and Mozilla. Otherwise, // IE accepts innerText and mozilla accepts textContent var selectedDate = new Date(cal.year, cal.month, cell.innerHTML, 12); - var xxx = remoteExec("format_date", toISOTimestamp(selectedDate)); - input.value = xxx; + input.value = remoteExec("format_date", cw.utils.toISOTimestamp(selectedDate)); cal.hide(); } -function whichElement(e) -{ -var targ; -if (!e) - { - var e=window.event; - } -if (e.target) - { - targ=e.target; - } -else if (e.srcElement) - { - targ=e.srcElement; - } -if (targ.nodeType==3) // defeat Safari bug - { - targ = targ.parentNode; - } - return targ; +function whichElement(e) { + var targ; + if (!e) { + e = window.event; + } + if (e.target) { + targ = e.target; + } + else if (e.srcElement) { + targ = e.srcElement; + } + if (targ.nodeType == 3) // defeat Safari bug + { + targ = targ.parentNode; + } + return targ; } function getPosition(element) { - var left; - var top; - var offset; - // TODO: deal scrollbar positions also! - left = element.offsetLeft; - top = element.offsetTop; + var left; + var top; + var offset; + // TODO: deal scrollbar positions also! + left = element.offsetLeft; + top = element.offsetTop; - if (element.offsetParent != null) - { - offset = getPosition(element.offsetParent); - left = left + offset[0]; - top = top + offset[1]; - + if (element.offsetParent != null) { + offset = getPosition(element.offsetParent); + left = left + offset[0]; + top = top + offset[1]; + } - return [left, top]; + return [left, top]; } function getMouseInBlock(event) { - var elt = event.target; - var x = event.clientX; - var y = event.clientY; - var w = elt.clientWidth; - var h = elt.clientHeight; - var offset = getPosition(elt); + var elt = event.target; + var x = event.clientX; + var y = event.clientY; + var w = elt.clientWidth; + var h = elt.clientHeight; + var offset = getPosition(elt); - x = 1.0*(x-offset[0])/w; - y = 1.0*(y-offset[1])/h; - return [x, y]; + x = 1.0 * (x - offset[0]) / w; + y = 1.0 * (y - offset[1]) / h; + return [x, y]; } function getHourFromMouse(event, hmin, hmax) { - var pos = getMouseInBlock(event); - var y = pos[1]; - return Math.floor((hmax-hmin)*y + hmin); + var pos = getMouseInBlock(event); + var y = pos[1]; + return Math.floor((hmax - hmin) * y + hmin); } function addCalendarItem(event, hmin, hmax, year, month, day, duration, baseurl) { - var hour = getHourFromMouse(event, hmin, hmax); + var hour = getHourFromMouse(event, hmin, hmax); + + if (0 <= hour && hour < 24) { + baseurl += "&start=" + year + "%2F" + month + "%2F" + day + "%20" + hour + ":00"; + baseurl += "&stop=" + year + "%2F" + month + "%2F" + day + "%20" + (hour + duration) + ":00"; - if (0<=hour && hour < 24) { - baseurl += "&start="+year+"%2F"+month+"%2F"+day+"%20"+hour+":00"; - baseurl += "&stop="+year+"%2F"+month+"%2F"+day+"%20"+(hour+duration)+":00"; - - stopPropagation(event); - window.location.assign(baseurl); - return false; - } - return true; + stopPropagation(event); + window.location.assign(baseurl); + return false; + } + return true; } function stopPropagation(event) { - event.cancelBubble = true; - if (event.stopPropagation) event.stopPropagation(); + event.cancelBubble = true; + if (event.stopPropagation) event.stopPropagation(); } - -CubicWeb.provide('calendar.js'); diff -r b5640328ffad -r 9db65b381028 web/data/cubicweb.compat.js --- a/web/data/cubicweb.compat.js Thu Jul 01 09:23:39 2010 +0200 +++ b/web/data/cubicweb.compat.js Thu Jul 01 17:06:37 2010 +0200 @@ -1,546 +1,103 @@ -/* MochiKit -> jQuery compatibility module */ - -function forEach(array, func) { - for (var i=0, length=array.length; i>> y = ['a:b:c', 'd:e'] - >>> jQuery.map(y, function(y) { return y.split(':');}) - ["a", "b", "c", "d", "e"] - // where one would expect: - [ ["a", "b", "c"], ["d", "e"] ] - XXX why not the same argument order as $.map and forEach ? -*/ -function map(func, array) { - var result = []; - for (var i=0, length=array.length; - i'); - } catch (ex) { - var node = document.createElement('iframe'); - node.id = node.name = params.name; +// ========== END OF ARRAY EXTENSIONS ========== /// +forEach = cw.utils.deprecatedFunction( + '[3.9] forEach() is deprecated, use $.each() instead', + function(array, func) { + return $.each(array, func); } - } - else{ - var node = document.createElement('iframe'); - } - for (key in params) { - if (key != 'name'){ - var value = params[key]; - if (key.substring(0, 2) == 'on') { - // this is an event handler definition - if (typeof value == 'string') { - // litteral definition - value = new Function(value); - } - node[key] = value; - } else { // normal node attribute - node.setAttribute(key, params[key]); - } - } - } - return node; -} - +); -// dummy ultra minimalist implementation on deferred for jQuery -function Deferred() { - this.__init__(this); -} - -jQuery.extend(Deferred.prototype, { - __init__: function() { - this._onSuccess = []; - this._onFailure = []; - this._req = null; - this._result = null; - this._error = null; - }, - - addCallback: function(callback) { - if (this._req.readyState == 4) { - if (this._result) { callback.apply(null, this._result, this._req); } - } - else { this._onSuccess.push([callback, sliceList(arguments, 1)]); } - return this; - }, - - addErrback: function(callback) { - if (this._req.readyState == 4) { - if (this._error) { callback.apply(null, this._error, this._req); } +/** + * .. function:: cw.utils.deprecatedFunction(msg, function) + * + * jQUery flattens arrays returned by the mapping function: + * >>> y = ['a:b:c', 'd:e'] + * >>> jQuery.map(y, function(y) { return y.split(':');}) + * ["a", "b", "c", "d", "e"] + * // where one would expect: + * [ ["a", "b", "c"], ["d", "e"] ] + * XXX why not the same argument order as $.map and forEach ? + */ +map = cw.utils.deprecatedFunction( + '[3.9] map() is deprecated, use $.map instead', + function(func, array) { + var result = []; + for (var i = 0, length = array.length; i < length; i++) { + result.push(func(array[i])); } - else { this._onFailure.push([callback, sliceList(arguments, 1)]); } - return this; - }, - - success: function(result) { - this._result = result; - try { - for (var i=0; i 9) ? n : "0" + n; -}; - -/** @id MochiKit.DateTime.toISODate */ -toISODate = function (date) { - if (typeof(date) == "undefined" || date === null) { - return null; +findValue = cw.utils.deprecatedFunction( + '[3.9] findValue(array, elt) is deprecated, use $.inArray(elt, array) instead', + function(array, element) { + return jQuery.inArray(element, array); } - return [ - date.getFullYear(), - _padTwo(date.getMonth() + 1), - _padTwo(date.getDate()) - ].join("-"); -}; - - -/** @id MochiKit.DateTime.toISOTimeStamp */ -toISOTimestamp = function (date, realISO/* = false*/) { - if (typeof(date) == "undefined" || date === null) { - return null; - } - var sep = realISO ? "T" : " "; - var foot = realISO ? "Z" : ""; - if (realISO) { - date = new Date(date.getTime() + (date.getTimezoneOffset() * 60000)); - } - return toISODate(date) + sep + toISOTime(date, realISO) + foot; -}; - - +); -/* depth-first implementation of the nodeWalk function found - * in MochiKit.Base - * cf. http://mochikit.com/doc/html/MochiKit/Base.html#fn-nodewalk - */ -function nodeWalkDepthFirst(node, visitor) { - var children = visitor(node); - if (children) { - for(var i=0; i 0) */ -function isNotEmpty(obj) { - for (var i = 0; i < arguments.length; i++) { - var o = arguments[i]; - if (!(o && o.length)) { - return false; - } +addElementClass = cw.utils.deprecatedFunction( + '[3.9] addElementClass(node, cls) is depcreated, use $(node).addClass(cls) instead', + function(node, klass) { + $(node).addClass(klass); } - return true; -} +); -/** this implementation comes from MochiKit */ -function formContents(elem/* = document.body */) { - var names = []; - var values = []; - if (typeof(elem) == "undefined" || elem === null) { - elem = document.body; - } else { - elem = getNode(elem); +removeElementClass = cw.utils.deprecatedFunction( + '[3.9] removeElementClass(node, cls) is depcreated, use $(node).removeClass(cls) instead', + function(node, klass) { + $(node).removeClass(klass); + } +); + +hasElementClass = cw.utils.deprecatedFunction( + '[3.9] hasElementClass(node, cls) is depcreated, use $.className.has(node, cls)', + function(node, klass) { + return $.className.has(node, klass); } - nodeWalkDepthFirst(elem, function (elem) { - var name = elem.name; - if (isNotEmpty(name)) { - var tagName = elem.tagName.toUpperCase(); - if (tagName === "INPUT" - && (elem.type == "radio" || elem.type == "checkbox") - && !elem.checked - ) { - return null; - } - if (tagName === "SELECT") { - if (elem.type == "select-one") { - if (elem.selectedIndex >= 0) { - var opt = elem.options[elem.selectedIndex]; - var v = opt.value; - if (!v) { - var h = opt.outerHTML; - // internet explorer sure does suck. - if (h && !h.match(/^[^>]+\svalue\s*=/i)) { - v = opt.text; - } - } - names.push(name); - values.push(v); - return null; - } - // no form elements? - names.push(name); - values.push(""); - return null; - } else { - var opts = elem.options; - if (!opts.length) { - names.push(name); - values.push(""); - return null; - } - for (var i = 0; i < opts.length; i++) { - var opt = opts[i]; - if (!opt.selected) { - continue; - } - var v = opt.value; - if (!v) { - var h = opt.outerHTML; - // internet explorer sure does suck. - if (h && !h.match(/^[^>]+\svalue\s*=/i)) { - v = opt.text; - } - } - names.push(name); - values.push(v); - } - return null; - } - } - if (tagName === "FORM" || tagName === "P" || tagName === "SPAN" - || tagName === "DIV" - ) { - return elem.childNodes; - } - names.push(name); - values.push(elem.value || ''); - return null; - } - return elem.childNodes; - }); - return [names, values]; -} +); -function merge(array1, array2) { - var result = []; - for (var i=0,length=arguments.length; i