--- a/common/mixins.py Wed Sep 30 18:57:42 2009 +0200
+++ b/common/mixins.py Wed Oct 07 12:31:08 2009 +0200
@@ -144,9 +144,6 @@
def children_rql(self):
return self.related_rql(self.tree_attribute, self.children_target)
- def __iter__(self):
- return self.iterchildren()
-
def is_leaf(self):
return len(self.children()) == 0
--- a/cwconfig.py Wed Sep 30 18:57:42 2009 +0200
+++ b/cwconfig.py Wed Oct 07 12:31:08 2009 +0200
@@ -168,20 +168,26 @@
'help': 'server\'s log level',
'group': 'main', 'inputlevel': 1,
}),
- # pyro name server
+ # pyro options
+ ('pyro-instance-id',
+ {'type' : 'string',
+ 'default': Method('default_instance_id'),
+ 'help': 'identifier of the CubicWeb instance in the Pyro name server',
+ 'group': 'pyro', 'inputlevel': 1,
+ }),
('pyro-ns-host',
{'type' : 'string',
'default': '',
'help': 'Pyro name server\'s host. If not set, will be detected by a \
broadcast query. It may contains port information using <host>:<port> notation.',
- 'group': 'pyro-name-server', 'inputlevel': 1,
+ 'group': 'pyro', 'inputlevel': 1,
}),
('pyro-ns-group',
{'type' : 'string',
'default': 'cubicweb',
'help': 'Pyro name server\'s group where the repository will be \
registered.',
- 'group': 'pyro-name-server', 'inputlevel': 1,
+ 'group': 'pyro', 'inputlevel': 1,
}),
# common configuration options which are potentially required as soon as
# you're using "base" application objects (ie to really server/web
@@ -523,6 +529,11 @@
vocab = getattr(self, vocab.method, ())
return CFGTYPE2ETYPE_MAP[optdict['type']], vocab
+ def default_instance_id(self):
+ """return the instance identifier, useful for option which need this
+ as default value
+ """
+ return None
class CubicWebConfiguration(CubicWebNoAppConfiguration):
"""base class for cubicweb server and web configurations"""
--- a/cwctl.py Wed Sep 30 18:57:42 2009 +0200
+++ b/cwctl.py Wed Oct 07 12:31:08 2009 +0200
@@ -415,9 +415,6 @@
the --force option."
raise ExecutionError(msg % (appid, pidf))
helper.start_server(config, debug)
- if not debug:
- # in debug mode, we reach this point once the instance is stopped...
- print 'instance %s %s' % (appid, self.actionverb)
class StopInstanceCommand(InstanceCommand):
--- a/devtools/fill.py Wed Sep 30 18:57:42 2009 +0200
+++ b/devtools/fill.py Wed Oct 07 12:31:08 2009 +0200
@@ -173,7 +173,7 @@
def generate_bytes(self, attrname, index, format=None):
# modpython way
fakefile = Binary("%s%s" % (attrname, index))
- fakefile.filename = "file_%s" % attrname
+ fakefile.filename = u"file_%s" % attrname
fakefile.value = fakefile.getvalue()
return fakefile
--- a/devtools/testlib.py Wed Sep 30 18:57:42 2009 +0200
+++ b/devtools/testlib.py Wed Oct 07 12:31:08 2009 +0200
@@ -390,7 +390,7 @@
for a in self.vreg['views'].possible_views(req, rset=rset))
def pactions(self, req, rset,
- skipcategories=('addrelated', 'siteactions', 'useractions')):
+ skipcategories=('addrelated', 'siteactions', 'useractions', 'footer')):
return [(a.__regid__, a.__class__)
for a in self.vreg['actions'].poss_visible_objects(req, rset=rset)
if a.category not in skipcategories]
@@ -401,7 +401,7 @@
if a.category in categories]
def pactionsdict(self, req, rset,
- skipcategories=('addrelated', 'siteactions', 'useractions')):
+ skipcategories=('addrelated', 'siteactions', 'useractions', 'footer')):
res = {}
for a in self.vreg['actions'].poss_visible_objects(req, rset=rset):
if a.category not in skipcategories:
--- a/doc/book/en/B0015-define-permissions.en.txt Wed Sep 30 18:57:42 2009 +0200
+++ b/doc/book/en/B0015-define-permissions.en.txt Wed Oct 07 12:31:08 2009 +0200
@@ -3,7 +3,7 @@
The security model
------------------
-The security model of `cubicWeb` is based on `Access Control List`.
+The security model of `cubicWeb` is based on `Access Control List`.
The main principles are:
* users and groups of users
@@ -29,7 +29,7 @@
actions if all the other groups the user belongs does not provide
those permissions
-
+
Permissions definition
``````````````````````
@@ -59,14 +59,14 @@
This can only be used for the actions `update` and `delete` of an entity
type.
-It is also possible to use specific groups if they are defined in the precreate
+It is also possible to use specific groups if they are defined in the precreate
of the cube (``migration/precreate.py``).
Use of RQL expression for writing rights
````````````````````````````````````````
-It is possible to define RQL expression to provide update permission
+It is possible to define RQL expression to provide update permission
(`add`, `delete` and `update`) on relation and entity types.
RQL expression for entity type permission :
@@ -79,47 +79,66 @@
respectively on the current entity (on which the action is verified) and
on the user who send the request
-* it is possible to use, in this expression, a special relation
- "has_<ACTION>_permission" where the subject is the user and the
+* it is possible to use, in this expression, a special relation
+ "has_<ACTION>_permission" where the subject is the user and the
object is a any variable, meaning that the user needs to have
permission to execute the action <ACTION> on the entities related
- to this variable
+ to this variable
-For RQL expressions on a relation type, the principles are the same except
+For RQL expressions on a relation type, the principles are the same except
for the following :
-* you have to use the class `RQLExpression` in the case of a non-final relation
+* you have to use the class `RRQLExpression` in the case of a non-final relation
* in the expression, the variables S, O and U are pre-defined references
to respectively the subject and the object of the current relation (on
which the action is being verified) and the user who executed the query
-* we can also defined rights on attributes of an entity (non-final relation),
- knowing that :
+* we can also define rights on attributes of an entity (non-final
+ relation), knowing that :
- - to defines RQL expression, we have to use the class `ERQLExpression`
- in which X represents the entity the attribute belongs to
+ - to define RQL expression, we have to use the class
+ `ERQLExpression` in which X represents the entity the attribute
+ belongs to
- the permissions `add` and `delete` are equivalent. Only `add`/`read`
are actually taken in consideration.
In addition to that the entity type `EPermission` from the standard library
-allow to build very complex and dynamic security architecture. The schema of
+allows to build very complex and dynamic security architecture. The schema of
this entity type is as follow : ::
- class EPermission(MetaEntityType):
+ class CWPermission(EntityType):
"""entity type that may be used to construct some advanced security configuration
"""
- name = String(required=True, indexed=True, internationalizable=True, maxsize=100)
- require_group = SubjectRelation('EGroup', cardinality='+*',
- description=_('groups to which the permission is granted'))
- require_state = SubjectRelation('State',
- description=_("entity'state in which the permission is applicable"))
- # can be used on any entity
- require_permission = ObjectRelation('**', cardinality='*1', composite='subject',
- description=_("link a permission to the entity. This "
- "permission should be used in the security "
- "definition of the entity's type to be useful."))
+ permissions = META_ETYPE_PERMS
+
+ name = String(required=True, indexed=True, internationalizable=True, maxsize=100,
+ description=_('name or identifier of the permission'))
+ label = String(required=True, internationalizable=True, maxsize=100,
+ description=_('distinct label to distinguate between other permission entity of the same name'))
+ require_group = SubjectRelation('CWGroup',
+ description=_('groups to which the permission is granted'))
+
+ # explicitly add X require_permission CWPermission for each entity that should have
+ # configurable security
+ class require_permission(RelationType):
+ """link a permission to the entity. This permission should be used in the
+ security definition of the entity's type to be useful.
+ """
+ permissions = {
+ 'read': ('managers', 'users', 'guests'),
+ 'add': ('managers',),
+ 'delete': ('managers',),
+ }
+
+ class require_group(RelationType):
+ """used to grant a permission to a group"""
+ permissions = {
+ 'read': ('managers', 'users', 'guests'),
+ 'add': ('managers',),
+ 'delete': ('managers',),
+ }
Example of configuration ::
@@ -151,14 +170,14 @@
}
inlined = True
-This configuration indicates that an entity `EPermission` named
+This configuration indicates that an entity `CWPermission` named
"add_version" can be associated to a project and provides rights to create
new versions on this project to specific groups. It is important to notice that :
* in such case, we have to protect both the entity type "Version" and the relation
associating a version to a project ("version_of")
-* because of the genricity of the entity type `EPermission`, we have to execute
+* because of the genericity of the entity type `CWPermission`, we have to execute
a unification with the groups and/or the states if necessary in the expression
("U in_group G, P require_group G" in the above example)
@@ -176,12 +195,13 @@
``````````````````````````````````````````````````````
Potentially, the use of an RQL expression to add an entity or a relation
can cause problems for the user interface, because if the expression uses
-the entity or the relation to create, then we are not able to verify the
+the entity or the relation to create, then we are not able to verify the
permissions before we actually add the entity (please note that this is
not a problem for the RQL server at all, because the permissions checks are
-done after the creation). In such case, the permission check methods
-(check_perm, has_perm) can indicate that the user is not allowed to create
-this entity but can obtain the permission.
+done after the creation). In such case, the permission check methods
+(check_perm, has_perm) can indicate that the user is not allowed to create
+this entity but can obtain the permission.
+
To compensate this problem, it is usually necessary, for such case,
to use an action that reflects the schema permissions but which enables
to check properly the permissions so that it would show up if necessary.
--- a/doc/book/en/conf.py Wed Sep 30 18:57:42 2009 +0200
+++ b/doc/book/en/conf.py Wed Oct 07 12:31:08 2009 +0200
@@ -49,9 +49,9 @@
# other places throughout the built documents.
#
# The short X.Y version.
-version = '0.54'
+version = '3.5'
# The full version, including alpha/beta/rc tags.
-release = '3.4'
+release = '3.5'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
--- a/doc/book/en/development/datamodel/define-workflows.rst Wed Sep 30 18:57:42 2009 +0200
+++ b/doc/book/en/development/datamodel/define-workflows.rst Wed Oct 07 12:31:08 2009 +0200
@@ -8,17 +8,17 @@
A workflow describes how certain entities have to evolve between
different states. Hence we have a set of states, and a "transition graph",
-i.e. a list of possible transitions from one state to another state.
+i.e. a set of possible transitions from one state to another state.
We will define a simple workflow for a blog, with only the following
two states: `submitted` and `published`. So first, we create a simple
-*CubicWeb* in ten minutes (see :ref:`BlogFiveMinutes`).
+*CubicWeb* instance in ten minutes (see :ref:`BlogFiveMinutes`).
Set-up a workflow
-----------------
We want to create a workflow to control the quality of the BlogEntry
-submitted on your instance. When a BlogEntry is created by a user
+submitted on the instance. When a BlogEntry is created by a user
its state should be `submitted`. To be visible to all, it has to
be in the state `published`. To move it from `submitted` to `published`,
we need a transition that we can call `approve_blogentry`.
@@ -27,16 +27,16 @@
So we have to define a group of users, `moderators`, and
this group will have appropriate permissions to publish a BlogEntry.
-There are two ways to create a workflow: from the user interface,
-or by defining it in ``migration/postcreate.py``.
-This script is executed each time a new ``cubicweb-ctl db-init`` is done.
-We strongly recommend to create the workflow in ``migration/postcreate.py``
-and we will now show you how. Read `Under the hood`_ to understand why.
+There are two ways to create a workflow: from the user interface, or
+by defining it in ``migration/postcreate.py``. This script is executed
+each time a new ``cubicweb-ctl db-init`` is done. We strongly
+recommend to create the workflow in ``migration/postcreate.py`` and we
+will now show you how. Read `Two bits of warning`_ to understand why.
-The state of a entity is managed by the `in_state` attribute which can be added to you entity schema by two ways:
+The state of an entity is managed by the `in_state` attribute which
+can be added to your entity schema by inheriting from
+`cubicweb.schema.WorkflowableEntityType`.
-* direct inheritance by subclassing your class from `cubicweb.schema.WorkflowableEntityType`
-* by delegation using `cubicweb.schema.make_worflowable` (usable as a decorator)
About our example of BlogEntry, we must have:
@@ -44,7 +44,7 @@
from cubicweb.schema import WorkflowableEntityType
- class BlogEntry(EntityType, WorkflowableEntityType):
+ class BlogEntry(WorkflowableEntityType):
...
@@ -53,10 +53,14 @@
The ``postcreate.py`` script is executed in a special environment, adding
several *CubicWeb* primitives that can be used.
+
They are all defined in the ``class ServerMigrationHelper``.
We will only discuss the methods we use to create a workflow in this example.
-To define our workflow for BlogDemo, please add the following lines
+A workflow is a collection of entities of type ``State`` and of type
+``Transition`` which are standard *CubicWeb* entity types.
+
+To define a workflow for BlogDemo, please add the following lines
to ``migration/postcreate.py``:
.. sourcecode:: python
@@ -69,25 +73,35 @@
.. sourcecode:: python
- wf = add_workflow(u'your workflow description', 'BlogEntry')
+ wf = add_workflow(u'blog publication workflow', 'BlogEntry')
-At first, instanciate a new workflow object with a gentle description and the concerned entity types (this one can be a tuple for multiple value).
+At first, instanciate a new workflow object with a gentle description
+and the concerned entity types (this one can be a tuple for multiple
+value).
.. sourcecode:: python
submitted = wf.add_state(_('submitted'), initial=True)
published = wf.add_state(_('published'))
-``add_state`` expects as first argument the name of the state you want to create and an optional argument to say if it is supposed to be the initial state of the entity type.
+This will create two entities of type ``State``, one with name
+'submitted', and the other with name 'published'.
+
+``add_state`` expects as first argument the name of the state you want
+to create and an optional argument to say if it is supposed to be the
+initial state of the entity type.
.. sourcecode:: python
wf.add_transition(_('approve_blogentry'), (submitted,), published, ('moderators', 'managers'),)
+This will create an entity of type ``Transition`` with name
+`approve_blogentry` which will be linked to the ``State`` entities
+created before.
``add_transition`` expects
- * as the first argument the name of the transition
+ * as the first argument: the name of the transition
* then the list of states on which the transition can be triggered,
* the target state of the transition,
* and the permissions
@@ -102,10 +116,11 @@
Do not forget to add the `_()` in front of all states and transitions names while creating
a workflow so that they will be identified by the i18n catalog scripts.
-In addition to the user group conditions which the user needs to belong to one of those, we could have added a RQL condition.
-In this case, the user can only perform the action if the two conditions are satisfied.
+In addition to the user groups (one of which the user needs to belong
+to), we could have added a RQL condition. In this case, the user can
+only perform the action if the two conditions are satisfied.
-If we use a RQL condition on a transition, we can use the following variables:
+If we use an RQL condition on a transition, we can use the following variables:
* `%(eid)s`, object's eid
* `%(ueid)s`, user executing the query eid
@@ -114,38 +129,30 @@
.. image:: ../../images/03-transitions-view.en.png
-You can notice that in the action box of a BlogEntry, the state
-is now listed as well as the possible transitions for the current state defined by the workflow.
+You can notice that in the action box of a BlogEntry, the state is now
+listed as well as the possible transitions for the current state
+defined by the workflow.
+
The transitions will only be displayed for users having the right permissions.
In our example, the transition `approve_blogentry` will only be displayed
for the users belonging to the group `moderators` or `managers`.
-Under the hood
-~~~~~~~~~~~~~~
-
-A workflow is a collection of entities of type ``State`` and of type ``Transition``
-which are standard *CubicWeb* entity types.
-
-For instance, the preceding lines:
-
-.. sourcecode:: python
-
- submitted = wf.add_state(_('submitted'), initial=True)
- published = wf.add_state(_('published'))
+Two bits of warning
+~~~~~~~~~~~~~~~~~~~
-will create two entities of type ``State``, one with name 'submitted', and the other
-with name 'published'. Whereas:
-
-.. sourcecode:: python
-
- wf.add_transition(_('approve_blogentry'), (submitted,), published, ('moderators', 'managers'),)
+We could perfectly use the administration interface to do these
+operations. It is a convenient thing to do at times (when doing
+development, to quick-check things). But it is not recommended beyond
+that because it is a bit complicated to do it right and it will be
+only local to your instance (or, said a bit differently, such a
+workflow only exists in an instance database). Furthermore, you cannot
+write unit tests against deployed instances, and experience shows it
+is mandatory to have tests for any mildly complicated workflow
+setup.
-will create an entity of type ``Transition`` with name `approve_blogentry` which will
-be linked to the ``State`` entities created before.
-As a consequence, we could use the administration interface to do these operations. But it is not recommended because it will be uselessly complicated and will be only local to your instance.
-
-Indeed, if you create the states and transitions through the user interface, next time you initialize the database you will have to re-create all the entities.
-The user interface should only be a reference for you to view the states and transitions, but is not the appropriate interface to define your application workflow.
-
-
+Indeed, if you create the states and transitions through the user
+interface, next time you initialize the database you will have to
+re-create all the workflow entities. The user interface should only be
+a reference for you to view the states and transitions, but is not the
+appropriate interface to define your application workflow.
--- a/doc/book/en/development/datamodel/definition.rst Wed Sep 30 18:57:42 2009 +0200
+++ b/doc/book/en/development/datamodel/definition.rst Wed Oct 07 12:31:08 2009 +0200
@@ -74,7 +74,7 @@
function `_` to be properly internationalized.
- `constraints` : a list of conditions/constraints that the relation has to
- satisfy (c.f. `Contraints`_)
+ satisfy (c.f. `Constraints`_)
- `cardinality` : a two character string which specify the cardinality of the
relation. The first character defines the cardinality of the relation on
--- a/doc/book/en/development/devcore/vreg.rst Wed Sep 30 18:57:42 2009 +0200
+++ b/doc/book/en/development/devcore/vreg.rst Wed Oct 07 12:31:08 2009 +0200
@@ -138,7 +138,7 @@
selectable. For an rset with one entity, the EntityRSSIconBox class
will have a higher score then RSSIconBox, which is what we wanted.
-Of course, once this is done, you have to ::
+Of course, once this is done, you have to:
* fill in the call method of EntityRSSIconBox
@@ -161,7 +161,7 @@
(or did not) is the way. There exists a traced_selection context
manager to help with that.
-Here is an example ::
+Here is an example:
.. sourcecode:: python
--- a/doc/book/en/development/devrepo/operations.rst Wed Sep 30 18:57:42 2009 +0200
+++ b/doc/book/en/development/devrepo/operations.rst Wed Oct 07 12:31:08 2009 +0200
@@ -3,7 +3,36 @@
Repository operations
======================
-[WRITE ME]
+When one needs to perform operations (real world operations like mail
+notifications, file operations, real-world side-effects) at
+transaction commit time, Operations are the way to go.
+
+Possible events are:
+
+* precommit: the pool is preparing to commit. You shouldn't do
+ anything things which has to be reverted if the commit fail at this
+ point, but you can freely do any heavy computation or raise an
+ exception if the commit can't go. You can add some new operation
+ during this phase but their precommit event won't be triggered
+
+* commit: the pool is preparing to commit. You should avoid to do to
+ expensive stuff or something that may cause an exception in this
+ event
-* repository operations
+* revertcommit: if an operation failed while commited, this event is
+ triggered for all operations which had their commit event already to
+ let them revert things (including the operation which made fail the
+ commit)
+
+* rollback: the transaction has been either rollbacked either
+ - intentionaly
+ - a precommit event failed, all operations are rollbacked
+ - a commit event failed, all operations which are not been triggered
+ for commit are rollbacked
+
+Exceptions signaled from within a rollback are logged and swallowed.
+
+The order of operations may be important, and is controlled according
+to operation's class (see : Operation, LateOperation, SingleOperation,
+SingleLastOperation).
--- a/doc/book/en/development/devweb/internationalization.rst Wed Sep 30 18:57:42 2009 +0200
+++ b/doc/book/en/development/devweb/internationalization.rst Wed Oct 07 12:31:08 2009 +0200
@@ -2,7 +2,6 @@
.. _internationalization:
-
Internationalization
---------------------
--- a/doc/book/en/development/devweb/js.rst Wed Sep 30 18:57:42 2009 +0200
+++ b/doc/book/en/development/devweb/js.rst Wed Oct 07 12:31:08 2009 +0200
@@ -24,7 +24,7 @@
XXX external_resources variable (which needs love)
CubicWeb javascript api
-~~~~~~~~~~~~~~~~~~~~~~
+~~~~~~~~~~~~~~~~~~~~~~~
Javascript resources are typically loaded on demand, from views. The
request object (available as self.req from most application objects,
--- a/doc/book/en/development/devweb/rtags.rst Wed Sep 30 18:57:42 2009 +0200
+++ b/doc/book/en/development/devweb/rtags.rst Wed Oct 07 12:31:08 2009 +0200
@@ -4,11 +4,11 @@
The "Relation tags" structure
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-.. automodule:: `cubicweb.rtags`
+.. automodule:: cubicweb.rtags
:members:
The `uicfg` module (:mod:`cubicweb.web.uicfg`)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-.. automodule:: `cubicweb.web.uicfg`
+.. automodule:: cubicweb.web.uicfg
:members:
Binary file doc/book/en/images/03-transitions-view.en.png has changed
--- a/doc/book/en/makefile Wed Sep 30 18:57:42 2009 +0200
+++ b/doc/book/en/makefile Wed Oct 07 12:31:08 2009 +0200
@@ -10,7 +10,7 @@
SPHINXBUILD = sphinx-build
PAPER =
#BUILDDIR = build
-BUILDDIR = /tmp/cwdoc
+BUILDDIR = ~/tmp/cwdoc
# Internal variables for sphinx
PAPEROPT_a4 = -D latex_paper_size=a4
--- a/entities/__init__.py Wed Sep 30 18:57:42 2009 +0200
+++ b/entities/__init__.py Wed Oct 07 12:31:08 2009 +0200
@@ -183,6 +183,8 @@
"""return (path, parameters) which should be used as redirect
information when this entity is being deleted
"""
+ if hasattr(self, 'parent') and self.parent():
+ return self.parent().rest_path(), {}
return str(self.e_schema).lower(), {}
def pre_web_edit(self):
--- a/entities/test/unittest_wfobjs.py Wed Sep 30 18:57:42 2009 +0200
+++ b/entities/test/unittest_wfobjs.py Wed Oct 07 12:31:08 2009 +0200
@@ -275,7 +275,7 @@
self.member.clear_all_caches()
self.assertEquals(self.member.current_workflow.eid, wf.eid)
self.assertEquals(self.member.state, 'asleep')
- self.assertEquals(self.member.workflow_history, [])
+ self.assertEquals(self.member.workflow_history, ())
def test_custom_wf_replace_state_keep_history(self):
"""member in inital state with some history, state is redirected and
--- a/entities/wfobjs.py Wed Sep 30 18:57:42 2009 +0200
+++ b/entities/wfobjs.py Wed Oct 07 12:31:08 2009 +0200
@@ -137,7 +137,7 @@
return tr
def add_wftransition(self, name, subworkflow, fromstates, exitpoints,
- requiredgroups=(), conditions=(), **kwargs):
+ requiredgroups=(), conditions=(), **kwargs):
"""add a workflow transition to this workflow"""
tr = self._add_transition('WorkflowTransition', name, fromstates,
requiredgroups, conditions, **kwargs)
@@ -222,9 +222,10 @@
if isinstance(conditions, basestring):
conditions = (conditions,)
for expr in conditions:
- if isinstance(expr, str):
+ if isinstance(expr, basestring):
kwargs = {'expr': unicode(expr)}
- elif isinstance(expr, dict):
+ else:
+ assert isinstance(expr, dict)
kwargs = expr
kwargs['x'] = self.eid
kwargs.setdefault('mainvars', u'X')
@@ -425,18 +426,11 @@
"""
if self.current_state is None or self.current_workflow is None:
return
-<<<<<<< /home/syt/src/fcubicweb/cubicweb/entities/wfobjs.py
rset = self._cw.execute(
- 'Any T,N WHERE S allowed_transition T, S eid %(x)s, '
- 'T name N, T transition_of WF, WF eid %(wfeid)s',
- {'x': self.current_state.eid,
-=======
- rset = self.req.execute(
'Any T,TT, TN WHERE S allowed_transition T, S eid %(x)s, '
'T type TT, T type %(type)s, '
'T name TN, T transition_of WF, WF eid %(wfeid)s',
{'x': self.current_state.eid, 'type': type,
->>>>>>> /tmp/wfobjs.py~other.TyHPqT
'wfeid': self.current_workflow.eid}, 'x')
for tr in rset.entities():
if tr.may_be_fired(self.eid):
@@ -463,16 +457,12 @@
entity's workflow
"""
assert self.current_workflow
-<<<<<<< /home/syt/src/fcubicweb/cubicweb/entities/wfobjs.py
+ if isinstance(tr, basestring):
+ tr = self.current_workflow.transition_by_name(tr)
tr = self.current_workflow.transition_by_name(trname)
if tr is None:
raise WorkflowException('not a %s transition: %s' % (self.__regid__,
trname))
-=======
- if isinstance(tr, basestring):
- tr = self.current_workflow.transition_by_name(tr)
- assert tr is not None, 'not a %s transition: %s' % (self.id, tr)
->>>>>>> /tmp/wfobjs.py~other.TyHPqT
return self._add_trinfo(comment, commentformat, tr.eid)
def change_state(self, statename, comment=None, commentformat=None, tr=None):
--- a/entity.py Wed Sep 30 18:57:42 2009 +0200
+++ b/entity.py Wed Oct 07 12:31:08 2009 +0200
@@ -700,7 +700,7 @@
else raise `KeyError`
"""
res = self._related_cache['%s_%s' % (rtype, role)][entities]
- if limit:
+ if limit is not None:
if entities:
res = res[:limit]
else:
@@ -722,9 +722,10 @@
target = 'subject'
if rcard in '?1':
for rentity in related:
- rentity._related_cache['%s_%s' % (rtype, target)] = (self.as_rset(), [self])
+ rentity._related_cache['%s_%s' % (rtype, target)] = (
+ self.as_rset(), (self,))
else:
- related = []
+ related = ()
self._related_cache['%s_%s' % (rtype, role)] = (rset, related)
def clear_related_cache(self, rtype=None, role=None):
@@ -820,8 +821,12 @@
:return: the list of indexable word of this entity
"""
from indexer.query_objects import tokenize
+ # take care to cases where we're modyfying the schema
+ pending = self.req.transaction_data.setdefault('pendingrdefs', set())
words = []
for rschema in self.e_schema.indexable_attributes():
+ if (self.e_schema, rschema) in pending:
+ continue
try:
value = self.printable_value(rschema, format='text/plain')
except TransformError:
--- a/etwist/server.py Wed Sep 30 18:57:42 2009 +0200
+++ b/etwist/server.py Wed Oct 07 12:31:08 2009 +0200
@@ -35,7 +35,7 @@
# (start-repository command)
# See http://www.erlenstar.demon.co.uk/unix/faq_toc.html#TOC16
if os.fork(): # launch child and...
- return -1
+ os._exit(0)
os.setsid()
if os.fork(): # launch child and...
os._exit(0) # kill off parent again.
@@ -379,12 +379,11 @@
logger = getLogger('cubicweb.twisted')
logger.info('instance started on %s', baseurl)
if not debug:
- if daemonize():
- # child process
- return
+ print 'instance starting in the background'
+ daemonize()
if config['pid-file']:
# ensure the directory where the pid-file should be set exists (for
- # instance /var/run/cubicweb may be deleted on computer restart)
+ # instance /var/run/cubicweb may be deleted on computer restart)
piddir = os.path.dirname(config['pid-file'])
if not os.path.exists(piddir):
os.makedirs(piddir)
--- a/goa/goaconfig.py Wed Sep 30 18:57:42 2009 +0200
+++ b/goa/goaconfig.py Wed Oct 07 12:31:08 2009 +0200
@@ -17,7 +17,7 @@
from cubicweb.goa.dbmyams import load_schema
UNSUPPORTED_OPTIONS = set(('connections-pool-size',
- 'pyro-host', 'pyro-id', 'pyro-instance-id',
+ 'pyro-host', 'pyro-instance-id',
'pyro-ns-host', 'pyro-ns-group',
'https-url', 'host', 'pid-file', 'uid', 'base-url', 'log-file',
'smtp-host', 'smtp-port',
--- a/hooks/integrity.py Wed Sep 30 18:57:42 2009 +0200
+++ b/hooks/integrity.py Wed Oct 07 12:31:08 2009 +0200
@@ -198,6 +198,11 @@
events = ('before_delete_relation',)
def __call__(self):
+ # if the relation is being delete, don't delete composite's components
+ # automatically
+ pendingrdefs = self._cw.transaction_data.get('pendingrdefs', ())
+ if (self._cw.describe(eidfrom)[0], rtype, self._cw.describe(eidto)[0]) in pendingrdefs:
+ return
composite = self._cw.schema_rproperty(self.rtype, self.eidfrom, self.eidto,
'composite')
if composite == 'subject':
--- a/hooks/syncschema.py Wed Sep 30 18:57:42 2009 +0200
+++ b/hooks/syncschema.py Wed Oct 07 12:31:08 2009 +0200
@@ -359,7 +359,12 @@
eschema = self.schema.eschema(rdef.subject)
except KeyError:
return # entity type currently being added
+ # propagate attribute to children classes
rschema = self.schema.rschema(rdef.name)
+ # if relation type has been inserted in the same transaction, its final
+ # attribute is still set to False, so we've to ensure it's False
+ rschema.final = True
+ # XXX 'infered': True/False, not clear actually
props.update({'constraints': rdef.constraints,
'description': rdef.description,
'cardinality': rdef.cardinality,
@@ -371,6 +376,11 @@
for rql, args in ss.frdef2rql(rschema, str(specialization),
rdef.object, props):
session.execute(rql, args)
+ # set default value, using sql for performance and to avoid
+ # modification_date update
+ if default:
+ session.system_sql('UPDATE %s SET %s=%%(default)s' % (table, column),
+ {'default': default})
class SourceDbCWRelationAdd(SourceDbCWAttributeAdd):
@@ -965,14 +975,15 @@
session = self._cw
subjschema, rschema, objschema = session.vreg.schema.schema_by_eid(self.eidfrom)
pendings = session.transaction_data.get('pendingeids', ())
+ pendingrdefs = session.transaction_data.setdefault('pendingrdefs', set())
# first delete existing relation if necessary
if rschema.is_final():
rdeftype = 'CWAttribute'
+ pendingrdefs.add((subjschema, rschema))
else:
rdeftype = 'CWRelation'
+ pendingrdefs.add((subjschema, rschema, objschema))
if not (subjschema.eid in pendings or objschema.eid in pendings):
- pending = session.transaction_data.setdefault('pendingrdefs', set())
- pending.add((subjschema, rschema, objschema))
session.execute('DELETE X %s Y WHERE X is %s, Y is %s'
% (rschema, subjschema, objschema))
execute = session.unsafe_execute
--- a/hooks/workflow.py Wed Sep 30 18:57:42 2009 +0200
+++ b/hooks/workflow.py Wed Oct 07 12:31:08 2009 +0200
@@ -48,7 +48,7 @@
state.eid)
-class _FireAutotransitionOp(PreCommitOperation):
+class _FireAutotransitionOp(hook.Operation):
"""try to fire auto transition after state changes"""
def precommit_event(self):
--- a/rset.py Wed Sep 30 18:57:42 2009 +0200
+++ b/rset.py Wed Oct 07 12:31:08 2009 +0200
@@ -117,6 +117,10 @@
self.description +rset.description)
return self.req.decorate_rset(rset)
+ def copy(self):
+ rset = ResultSet(self.rows[:], self.rql, self.args, self.description[:])
+ return self.req.decorate_rset(rset)
+
def _prepare_copy(self, rows, descr):
rset = ResultSet(rows, self.rql, self.args, descr)
return self.req.decorate_rset(rset)
--- a/schema.py Wed Sep 30 18:57:42 2009 +0200
+++ b/schema.py Wed Oct 07 12:31:08 2009 +0200
@@ -557,11 +557,6 @@
def __init__(self, restriction):
self.restriction = restriction
- def check_consistency(self, subjschema, objschema, rdef):
- if objschema.is_final():
- raise BadSchemaDefinition("unique constraint doesn't apply to "
- "final entity type")
-
def serialize(self):
return self.restriction
@@ -577,7 +572,7 @@
def repo_check(self, session, eidfrom, rtype, eidto):
"""raise ValidationError if the relation doesn't satisfy the constraint
"""
- pass # this is a vocabulary constraint, not enforce
+ pass # this is a vocabulary constraint, not enforce XXX why?
def __str__(self):
return self.restriction
@@ -591,13 +586,17 @@
are also enforced at the repository level
"""
def exec_query(self, session, eidfrom, eidto):
+ if eidto is None:
+ rql = 'Any S WHERE S eid %(s)s, ' + self.restriction
+ return session.unsafe_execute(rql, {'s': eidfrom}, 's',
+ build_descr=False)
rql = 'Any S,O WHERE S eid %(s)s, O eid %(o)s, ' + self.restriction
return session.unsafe_execute(rql, {'s': eidfrom, 'o': eidto},
('s', 'o'), build_descr=False)
def error(self, eid, rtype, msg):
raise ValidationError(eid, {rtype: msg})
- def repo_check(self, session, eidfrom, rtype, eidto):
+ def repo_check(self, session, eidfrom, rtype, eidto=None):
"""raise ValidationError if the relation doesn't satisfy the constraint
"""
if not self.exec_query(session, eidfrom, eidto):
@@ -610,7 +609,7 @@
"""the unique rql constraint check that the result of the query isn't
greater than one
"""
- def repo_check(self, session, eidfrom, rtype, eidto):
+ def repo_check(self, session, eidfrom, rtype, eidto=None):
"""raise ValidationError if the relation doesn't satisfy the constraint
"""
if len(self.exec_query(session, eidfrom, eidto)) > 1:
--- a/server/migractions.py Wed Sep 30 18:57:42 2009 +0200
+++ b/server/migractions.py Wed Oct 07 12:31:08 2009 +0200
@@ -626,6 +626,10 @@
if card == '1':
rql += ', NOT X %s NULL' % oldname
self.rqlexec(rql, ask_confirm=self.verbosity>=2)
+ # XXX if both attributes fulltext indexed, should skip fti rebuild
+ # XXX if old attribute was fti indexed but not the new one old value
+ # won't be removed from the index (this occurs on other kind of
+ # fulltextindexed change...)
self.cmd_drop_attribute(etype, oldname, commit=commit)
def cmd_add_entity_type(self, etype, auto=True, commit=True):
@@ -954,7 +958,7 @@
if commit:
self.commit()
- @deprecated('[3.4] use sync_schema_props_perms(ertype, syncprops=False)')
+ @deprecated('[3.2] use sync_schema_props_perms(ertype, syncprops=False)')
def cmd_synchronize_permissions(self, ertype, commit=True):
self.cmd_sync_schema_props_perms(ertype, syncprops=False, commit=commit)
@@ -962,6 +966,21 @@
def cmd_add_workflow(self, name, wfof, default=True, commit=False,
**kwargs):
+ """
+ create a new workflow and links it to entity types
+ :type name: unicode
+ :param name: name of the workflow
+
+ :type wfof: string or list/tuple of strings
+ :param wfof: entity type(s) having this workflow
+
+ :type default: bool
+ :param default: tells wether this is the default workflow
+ for the specified entity type(s); set it to false in
+ the case of a subworkflow
+
+ :rtype: `Workflow`
+ """
self.session.set_pool() # ensure pool is set
wf = self.cmd_create_entity('Workflow', name=unicode(name),
**kwargs)
@@ -991,7 +1010,7 @@
return rset.get_entity(0, 0)
return self.cmd_add_workflow('%s workflow' % ';'.join(etypes), etypes)
- @deprecated('use add_workflow and Workflow.add_state method')
+ @deprecated('[3.5] use add_workflow and Workflow.add_state method')
def cmd_add_state(self, name, stateof, initial=False, commit=False, **kwargs):
"""method to ease workflow definition: add a state for one or more
entity type(s)
@@ -1002,7 +1021,7 @@
self.commit()
return state.eid
- @deprecated('use add_workflow and Workflow.add_transition method')
+ @deprecated('[3.5] use add_workflow and Workflow.add_transition method')
def cmd_add_transition(self, name, transitionof, fromstates, tostate,
requiredgroups=(), conditions=(), commit=False, **kwargs):
"""method to ease workflow definition: add a transition for one or more
@@ -1015,7 +1034,7 @@
self.commit()
return tr.eid
- @deprecated('use Transition.set_transition_permissions method')
+ @deprecated('[3.5] use Transition.set_transition_permissions method')
def cmd_set_transition_permissions(self, treid,
requiredgroups=(), conditions=(),
reset=True, commit=False):
@@ -1028,7 +1047,7 @@
if commit:
self.commit()
- @deprecated('use entity.fire_transition("transition") or entity.change_state("state")')
+ @deprecated('[3.5] use entity.fire_transition("transition") or entity.change_state("state")')
def cmd_set_state(self, eid, statename, commit=False):
self.session.set_pool() # ensure pool is set
self.session.entity_from_eid(eid).change_state(statename)
--- a/server/repository.py Wed Sep 30 18:57:42 2009 +0200
+++ b/server/repository.py Wed Oct 07 12:31:08 2009 +0200
@@ -843,6 +843,8 @@
self._extid_cache[cachekey] = eid
self._type_source_cache[eid] = (etype, source.uri, extid)
entity = source.before_entity_insertion(session, extid, etype, eid)
+ if not hasattr(entity, 'edited_attributes'):
+ entity.edited_attributes = set()
if source.should_call_hooks:
entity.edited_attributes = set(entity)
self.hm.call_hooks('before_add_entity', session, entity=entity)
@@ -965,6 +967,9 @@
the entity eid should originaly be None and a unique eid is assigned to
the entity instance
"""
+ # init edited_attributes before calling before_add_entity hooks
+ entity._is_saved = False # entity has an eid but is not yet saved
+ entity.edited_attributes = set(entity)
entity = entity.pre_add_hook()
eschema = entity.e_schema
etype = str(eschema)
@@ -973,10 +978,7 @@
entity.set_eid(self.system_source.create_eid(session))
if server.DEBUG & server.DBG_REPO:
print 'ADD entity', etype, entity.eid, dict(entity)
- entity._is_saved = False # entity has an eid but is not yet saved
relations = []
- # init edited_attributes before calling before_add_entity hooks
- entity.edited_attributes = set(entity)
if source.should_call_hooks:
self.hm.call_hooks('before_add_entity', session, entity=entity)
# XXX use entity.keys here since edited_attributes is not updated for
@@ -1152,7 +1154,7 @@
def pyro_register(self, host=''):
"""register the repository as a pyro object"""
from logilab.common.pyro_ext import register_object
- appid = self.config['pyro-id'] or self.config.appid
+ appid = self.config['pyro-instance-id'] or self.config.appid
daemon = register_object(self, appid, self.config['pyro-ns-group'],
self.config['pyro-host'],
self.config['pyro-ns-host'])
@@ -1185,7 +1187,7 @@
def pyro_unregister(config):
"""unregister the repository from the pyro name server"""
from logilab.common.pyro_ext import ns_unregister
- appid = config['pyro-id'] or config.appid
+ appid = config['pyro-instance-id'] or config.appid
ns_unregister(appid, config['pyro-ns-group'], config['pyro-ns-host'])
--- a/server/serverconfig.py Wed Sep 30 18:57:42 2009 +0200
+++ b/server/serverconfig.py Wed Oct 07 12:31:08 2009 +0200
@@ -163,18 +163,12 @@
}),
# pyro server.serverconfig
('pyro-host',
- {'type' : 'int',
+ {'type' : 'string',
'default': None,
'help': 'Pyro server host, if not detectable correctly through \
gethostname(). It may contains port information using <host>:<port> notation, \
and if not set, it will be choosen randomly',
- 'group': 'pyro-server', 'inputlevel': 2,
- }),
- ('pyro-id', # XXX reuse pyro-instance-id
- {'type' : 'string',
- 'default': None,
- 'help': 'identifier of the repository in the pyro name server',
- 'group': 'pyro-server', 'inputlevel': 2,
+ 'group': 'pyro', 'inputlevel': 2,
}),
) + CubicWebConfiguration.options)
--- a/server/serverctl.py Wed Sep 30 18:57:42 2009 +0200
+++ b/server/serverctl.py Wed Oct 07 12:31:08 2009 +0200
@@ -126,8 +126,11 @@
config = self.config
print underline_title('Configuring the repository')
config.input_config('email', inputlevel)
- if config.pyro_enabled():
- config.input_config('pyro-server', inputlevel)
+ # ask for pyro configuration if pyro is activated and we're not using a
+ # all-in-one config, in which case this is done by the web side command
+ # handler
+ if config.pyro_enabled() and config.name != 'all-in-one':
+ config.input_config('pyro', inputlevel)
print '\n'+underline_title('Configuring the sources')
sourcesfile = config.sources_file()
sconfig = Configuration(options=SOURCE_TYPES['native'].options)
--- a/server/session.py Wed Sep 30 18:57:42 2009 +0200
+++ b/server/session.py Wed Oct 07 12:31:08 2009 +0200
@@ -147,17 +147,16 @@
else:
self._update_entity_rel_cache_del(object, rtype, 'object', subject)
- def _rel_cache(self, eid, rtype, role):
+ def _update_entity_rel_cache_add(self, eid, rtype, role, targeteid):
try:
entity = self.entity_cache(eid)
except KeyError:
return
- return entity.relation_cached(rtype, role)
-
- def _update_entity_rel_cache_add(self, eid, rtype, role, targeteid):
- rcache = self._rel_cache(eid, rtype, role)
+ rcache = entity.relation_cached(rtype, role)
if rcache is not None:
rset, entities = rcache
+ rset = rset.copy()
+ entities = list(entities)
rset.rows.append([targeteid])
if not isinstance(rset.description, list): # else description not set
rset.description = list(rset.description)
@@ -169,9 +168,14 @@
targetentity.cw_col = 0
rset.rowcount += 1
entities.append(targetentity)
+ entity._related_cache['%s_%s' % (rtype, role)] = (rset, tuple(entities))
def _update_entity_rel_cache_del(self, eid, rtype, role, targeteid):
- rcache = self._rel_cache(eid, rtype, role)
+ try:
+ entity = self.entity_cache(eid)
+ except KeyError:
+ return
+ rcache = entity.relation_cached(rtype, role)
if rcache is not None:
rset, entities = rcache
for idx, row in enumerate(rset.rows):
@@ -183,11 +187,14 @@
self.debug('cache inconsistency for %s %s %s %s', eid, rtype,
role, targeteid)
return
+ rset = rset.copy()
+ entities = list(entities)
del rset.rows[idx]
if isinstance(rset.description, list): # else description not set
del rset.description[idx]
del entities[idx]
rset.rowcount -= 1
+ entity._related_cache['%s_%s' % (rtype, role)] = (rset, tuple(entities))
# resource accessors ######################################################
--- a/server/test/data/migratedapp/schema.py Wed Sep 30 18:57:42 2009 +0200
+++ b/server/test/data/migratedapp/schema.py Wed Oct 07 12:31:08 2009 +0200
@@ -50,7 +50,7 @@
'PE require_permission P, P name "add_note", '
'P require_group G'),)}
- whatever = Int() # keep it before `date` for unittest_migraction.test_add_attribute_int
+ whatever = Int(default=2) # keep it before `date` for unittest_migraction.test_add_attribute_int
date = Datetime()
type = String(maxsize=1)
mydate = Date(default='TODAY')
@@ -88,7 +88,7 @@
class Personne(EntityType):
nom = String(fulltextindexed=True, required=True, maxsize=64)
prenom = String(fulltextindexed=True, maxsize=64)
- civility = String(maxsize=1, default='M')
+ civility = String(maxsize=1, default='M', fulltextindexed=True)
promo = String(vocabulary=('bon','pasbon'))
titre = String(fulltextindexed=True, maxsize=128)
adel = String(maxsize=128)
--- a/server/test/data/schema.py Wed Sep 30 18:57:42 2009 +0200
+++ b/server/test/data/schema.py Wed Oct 07 12:31:08 2009 +0200
@@ -82,7 +82,7 @@
class Personne(EntityType):
nom = String(fulltextindexed=True, required=True, maxsize=64)
prenom = String(fulltextindexed=True, maxsize=64)
- sexe = String(maxsize=1, default='M')
+ sexe = String(maxsize=1, default='M', fulltextindexed=True)
promo = String(vocabulary=('bon','pasbon'))
titre = String(fulltextindexed=True, maxsize=128)
adel = String(maxsize=128)
--- a/server/test/unittest_ldapuser.py Wed Sep 30 18:57:42 2009 +0200
+++ b/server/test/unittest_ldapuser.py Wed Oct 07 12:31:08 2009 +0200
@@ -77,7 +77,7 @@
self.assertEquals(e.surname, None)
self.assertEquals(e.in_group[0].name, 'users')
self.assertEquals(e.owned_by[0].login, 'syt')
- self.assertEquals(e.created_by, [])
+ self.assertEquals(e.created_by, ())
self.assertEquals(e.primary_email[0].address, 'Sylvain Thenault')
# email content should be indexed on the user
rset = self.sexecute('CWUser X WHERE X has_text "thenault"')
--- a/server/test/unittest_migractions.py Wed Sep 30 18:57:42 2009 +0200
+++ b/server/test/unittest_migractions.py Wed Oct 07 12:31:08 2009 +0200
@@ -55,12 +55,17 @@
def test_add_attribute_int(self):
self.failIf('whatever' in self.schema)
+ self.add_entity('Note')
+ self.commit()
orderdict = dict(self.mh.rqlexec('Any RTN, O WHERE X name "Note", RDEF from_entity X, '
'RDEF relation_type RT, RDEF ordernum O, RT name RTN'))
self.mh.cmd_add_attribute('Note', 'whatever')
self.failUnless('whatever' in self.schema)
self.assertEquals(self.schema['whatever'].subjects(), ('Note',))
self.assertEquals(self.schema['whatever'].objects(), ('Int',))
+ self.assertEquals(self.schema['Note'].default('whatever'), 2)
+ note = self.execute('Note X').get_entity(0, 0)
+ self.assertEquals(note.whatever, 2)
orderdict2 = dict(self.mh.rqlexec('Any RTN, O WHERE X name "Note", RDEF from_entity X, '
'RDEF relation_type RT, RDEF ordernum O, RT name RTN'))
whateverorder = migrschema['whatever'].rproperty('Note', 'Int', 'order')
@@ -308,9 +313,9 @@
'X ecrit_part PE, U in_group G, '
'PE require_permission P, P name "add_note", P require_group G')
self.assertEquals([et.name for et in eexpr.reverse_add_permission], ['Note'])
- self.assertEquals(eexpr.reverse_read_permission, [])
- self.assertEquals(eexpr.reverse_delete_permission, [])
- self.assertEquals(eexpr.reverse_update_permission, [])
+ self.assertEquals(eexpr.reverse_read_permission, ())
+ self.assertEquals(eexpr.reverse_delete_permission, ())
+ self.assertEquals(eexpr.reverse_update_permission, ())
# no more rqlexpr to delete and add para attribute
self.failIf(self._rrqlexpr_rset('add', 'para'))
self.failIf(self._rrqlexpr_rset('delete', 'para'))
@@ -320,8 +325,8 @@
'O require_permission P, P name "add_note", '
'U in_group G, P require_group G')
self.assertEquals([rt.name for rt in rexpr.reverse_add_permission], ['ecrit_par'])
- self.assertEquals(rexpr.reverse_read_permission, [])
- self.assertEquals(rexpr.reverse_delete_permission, [])
+ self.assertEquals(rexpr.reverse_read_permission, ())
+ self.assertEquals(rexpr.reverse_delete_permission, ())
# no more rqlexpr to delete and add travaille relation
self.failIf(self._rrqlexpr_rset('add', 'travaille'))
self.failIf(self._rrqlexpr_rset('delete', 'travaille'))
--- a/server/test/unittest_msplanner.py Wed Sep 30 18:57:42 2009 +0200
+++ b/server/test/unittest_msplanner.py Wed Oct 07 12:31:08 2009 +0200
@@ -682,7 +682,7 @@
def test_3sources_ambigous(self):
- self._test('Any X,T WHERE X owned_by U, U login "syt", X title T',
+ self._test('Any X,T WHERE X owned_by U, U login "syt", X title T, X is IN(Bookmark, Card, EmailThread)',
[('FetchStep', [('Any X,T WHERE X title T, X is Card', [{'X': 'Card', 'T': 'String'}])],
[self.cards, self.system], None,
{'T': 'table0.C1', 'X': 'table0.C0', 'X.title': 'table0.C1'}, []),
@@ -1275,7 +1275,7 @@
None, None, [self.system], {}, [])])
def test_has_text_3(self):
- self._test('Any X WHERE X has_text "toto", X title "zoubidou"',
+ self._test('Any X WHERE X has_text "toto", X title "zoubidou", X is IN (Card, EmailThread)',
[('FetchStep', [(u'Any X WHERE X title "zoubidou", X is Card',
[{'X': 'Card'}])],
[self.cards, self.system], None, {'X': 'table0.C0'}, []),
@@ -1299,7 +1299,7 @@
])
def test_ambigous_sort_func(self):
- self._test('Any X ORDERBY DUMB_SORT(RF) WHERE X title RF',
+ self._test('Any X ORDERBY DUMB_SORT(RF) WHERE X title RF, X is IN (Bookmark, Card, EmailThread)',
[('AggrStep', 'Any X ORDERBY DUMB_SORT(RF)',
None, None, 'table0', None,
[('FetchStep', [('Any X,RF WHERE X title RF, X is Card',
--- a/server/test/unittest_querier.py Wed Sep 30 18:57:42 2009 +0200
+++ b/server/test/unittest_querier.py Wed Oct 07 12:31:08 2009 +0200
@@ -219,7 +219,7 @@
self.assertIsInstance(rset[0][0], (int, long))
def test_bytes_storage(self):
- feid = self.execute('INSERT File X: X name "foo.pdf", X data_format "text/plain", X data %(data)s',
+ feid = self.execute('INSERT File X: X data_name "foo.pdf", X data_format "text/plain", X data %(data)s',
{'data': Binary("xxx")})[0][0]
fdata = self.execute('Any D WHERE X data D, X eid %(x)s', {'x': feid}, 'x')[0][0]
self.assertIsInstance(fdata, Binary)
@@ -460,7 +460,7 @@
self.assertListEquals(rset.rows,
[[u'description_format', 13],
[u'description', 14],
- [u'name', 16],
+ [u'name', 14],
[u'created_by', 38],
[u'creation_date', 38],
[u'cwuri', 38],
--- a/server/test/unittest_rql2sql.py Wed Sep 30 18:57:42 2009 +0200
+++ b/server/test/unittest_rql2sql.py Wed Oct 07 12:31:08 2009 +0200
@@ -337,54 +337,15 @@
FROM cw_CWUser AS X LEFT OUTER JOIN cw_CWUser AS Y ON (X.cw_eid=Y.cw_eid)
WHERE X.cw_login=admin'''),
- ('Any XN ORDERBY XN WHERE X name XN',
+ ('Any XN ORDERBY XN WHERE X name XN, X is IN (Basket,Folder,Tag)',
'''SELECT X.cw_name
-FROM cw_BaseTransition AS X
-UNION ALL
-SELECT X.cw_name
FROM cw_Basket AS X
UNION ALL
SELECT X.cw_name
-FROM cw_CWCache AS X
-UNION ALL
-SELECT X.cw_name
-FROM cw_CWConstraintType AS X
-UNION ALL
-SELECT X.cw_name
-FROM cw_CWEType AS X
-UNION ALL
-SELECT X.cw_name
-FROM cw_CWGroup AS X
-UNION ALL
-SELECT X.cw_name
-FROM cw_CWPermission AS X
-UNION ALL
-SELECT X.cw_name
-FROM cw_CWRType AS X
-UNION ALL
-SELECT X.cw_name
-FROM cw_File AS X
-UNION ALL
-SELECT X.cw_name
FROM cw_Folder AS X
UNION ALL
SELECT X.cw_name
-FROM cw_Image AS X
-UNION ALL
-SELECT X.cw_name
-FROM cw_State AS X
-UNION ALL
-SELECT X.cw_name
FROM cw_Tag AS X
-UNION ALL
-SELECT X.cw_name
-FROM cw_Transition AS X
-UNION ALL
-SELECT X.cw_name
-FROM cw_Workflow AS X
-UNION ALL
-SELECT X.cw_name
-FROM cw_WorkflowTransition AS X
ORDER BY 1'''),
# DISTINCT, can use relation under exists scope as principal
@@ -469,61 +430,22 @@
FROM cw_CWRType AS X, cw_RQLExpression AS Y
WHERE X.cw_name=CWGroup AND Y.cw_eid IN(1, 2, 3) AND NOT EXISTS(SELECT 1 FROM read_permission_relation AS rel_read_permission0 WHERE rel_read_permission0.eid_from=X.cw_eid AND rel_read_permission0.eid_to=Y.cw_eid)'''),
- ('Any MAX(X)+MIN(X), N GROUPBY N WHERE X name N;',
+ ('Any MAX(X)+MIN(X), N GROUPBY N WHERE X name N, X is IN (Basket, Folder, Tag);',
'''SELECT (MAX(T1.C0) + MIN(T1.C0)), T1.C1 FROM (SELECT X.cw_eid AS C0, X.cw_name AS C1
-FROM cw_BaseTransition AS X
-UNION ALL
-SELECT X.cw_eid AS C0, X.cw_name AS C1
FROM cw_Basket AS X
UNION ALL
SELECT X.cw_eid AS C0, X.cw_name AS C1
-FROM cw_CWCache AS X
-UNION ALL
-SELECT X.cw_eid AS C0, X.cw_name AS C1
-FROM cw_CWConstraintType AS X
-UNION ALL
-SELECT X.cw_eid AS C0, X.cw_name AS C1
-FROM cw_CWEType AS X
-UNION ALL
-SELECT X.cw_eid AS C0, X.cw_name AS C1
-FROM cw_CWGroup AS X
-UNION ALL
-SELECT X.cw_eid AS C0, X.cw_name AS C1
-FROM cw_CWPermission AS X
-UNION ALL
-SELECT X.cw_eid AS C0, X.cw_name AS C1
-FROM cw_CWRType AS X
-UNION ALL
-SELECT X.cw_eid AS C0, X.cw_name AS C1
-FROM cw_File AS X
-UNION ALL
-SELECT X.cw_eid AS C0, X.cw_name AS C1
FROM cw_Folder AS X
UNION ALL
SELECT X.cw_eid AS C0, X.cw_name AS C1
-FROM cw_Image AS X
-UNION ALL
-SELECT X.cw_eid AS C0, X.cw_name AS C1
-FROM cw_State AS X
-UNION ALL
-SELECT X.cw_eid AS C0, X.cw_name AS C1
-FROM cw_Tag AS X
-UNION ALL
-SELECT X.cw_eid AS C0, X.cw_name AS C1
-FROM cw_Transition AS X
-UNION ALL
-SELECT X.cw_eid AS C0, X.cw_name AS C1
-FROM cw_Workflow AS X
-UNION ALL
-SELECT X.cw_eid AS C0, X.cw_name AS C1
-FROM cw_WorkflowTransition AS X) AS T1
+FROM cw_Tag AS X) AS T1
GROUP BY T1.C1'''),
- ('Any MAX(X)+MIN(LENGTH(D)), N GROUPBY N ORDERBY 1, N, DF WHERE X name N, X data D, X data_format DF;',
- '''SELECT (MAX(T1.C1) + MIN(LENGTH(T1.C0))), T1.C2 FROM (SELECT X.cw_data AS C0, X.cw_eid AS C1, X.cw_name AS C2, X.cw_data_format AS C3
+ ('Any MAX(X)+MIN(LENGTH(D)), N GROUPBY N ORDERBY 1, N, DF WHERE X data_name N, X data D, X data_format DF;',
+ '''SELECT (MAX(T1.C1) + MIN(LENGTH(T1.C0))), T1.C2 FROM (SELECT X.cw_data AS C0, X.cw_eid AS C1, X.cw_data_name AS C2, X.cw_data_format AS C3
FROM cw_File AS X
UNION ALL
-SELECT X.cw_data AS C0, X.cw_eid AS C1, X.cw_name AS C2, X.cw_data_format AS C3
+SELECT X.cw_data AS C0, X.cw_eid AS C1, X.cw_data_name AS C2, X.cw_data_format AS C3
FROM cw_Image AS X) AS T1
GROUP BY T1.C2
ORDER BY 1,2,T1.C3'''),
@@ -533,11 +455,11 @@
FROM cw_Affaire AS A
ORDER BY 2) AS T1'''),
- ('DISTINCT Any MAX(X)+MIN(LENGTH(D)), N GROUPBY N ORDERBY 2, DF WHERE X name N, X data D, X data_format DF;',
- '''SELECT T1.C0,T1.C1 FROM (SELECT DISTINCT (MAX(T1.C1) + MIN(LENGTH(T1.C0))) AS C0, T1.C2 AS C1, T1.C3 AS C2 FROM (SELECT DISTINCT X.cw_data AS C0, X.cw_eid AS C1, X.cw_name AS C2, X.cw_data_format AS C3
+ ('DISTINCT Any MAX(X)+MIN(LENGTH(D)), N GROUPBY N ORDERBY 2, DF WHERE X data_name N, X data D, X data_format DF;',
+ '''SELECT T1.C0,T1.C1 FROM (SELECT DISTINCT (MAX(T1.C1) + MIN(LENGTH(T1.C0))) AS C0, T1.C2 AS C1, T1.C3 AS C2 FROM (SELECT DISTINCT X.cw_data AS C0, X.cw_eid AS C1, X.cw_data_name AS C2, X.cw_data_format AS C3
FROM cw_File AS X
UNION
-SELECT DISTINCT X.cw_data AS C0, X.cw_eid AS C1, X.cw_name AS C2, X.cw_data_format AS C3
+SELECT DISTINCT X.cw_data AS C0, X.cw_eid AS C1, X.cw_data_name AS C2, X.cw_data_format AS C3
FROM cw_Image AS X) AS T1
GROUP BY T1.C2,T1.C3
ORDER BY 2,3) AS T1
@@ -1409,16 +1331,12 @@
FROM appears AS appears0, entities AS X
WHERE appears0.words @@ to_tsquery('default', 'hip&hop&momo') AND appears0.uid=X.eid AND X.type='Personne'"""),
- ('Any X WHERE X has_text "toto tata", X name "tutu", X is IN (Basket,File,Folder)',
+ ('Any X WHERE X has_text "toto tata", X name "tutu", X is IN (Basket,Folder)',
"""SELECT X.cw_eid
FROM appears AS appears0, cw_Basket AS X
WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=X.cw_eid AND X.cw_name=tutu
UNION ALL
SELECT X.cw_eid
-FROM appears AS appears0, cw_File AS X
-WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=X.cw_eid AND X.cw_name=tutu
-UNION ALL
-SELECT X.cw_eid
FROM appears AS appears0, cw_Folder AS X
WHERE appears0.words @@ to_tsquery('default', 'toto&tata') AND appears0.uid=X.cw_eid AND X.cw_name=tutu
"""),
@@ -1558,16 +1476,12 @@
FROM appears AS appears0, entities AS X
WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=X.eid AND X.type='Personne'"""),
- ('Any X WHERE X has_text "toto tata", X name "tutu", X is IN (Basket,File,Folder)',
+ ('Any X WHERE X has_text "toto tata", X name "tutu", X is IN (Basket,Folder)',
"""SELECT X.cw_eid
FROM appears AS appears0, cw_Basket AS X
WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=X.cw_eid AND X.cw_name=tutu
UNION ALL
SELECT X.cw_eid
-FROM appears AS appears0, cw_File AS X
-WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=X.cw_eid AND X.cw_name=tutu
-UNION ALL
-SELECT X.cw_eid
FROM appears AS appears0, cw_Folder AS X
WHERE appears0.word_id IN (SELECT word_id FROM word WHERE word in ('toto', 'tata')) AND appears0.uid=X.cw_eid AND X.cw_name=tutu
"""),
@@ -1619,16 +1533,12 @@
"""SELECT X.eid
FROM appears AS appears0, entities AS X
WHERE MATCH (appears0.words) AGAINST ('hip hop momo' IN BOOLEAN MODE) AND appears0.uid=X.eid AND X.type='Personne'"""),
- ('Any X WHERE X has_text "toto tata", X name "tutu", X is IN (Basket,File,Folder)',
+ ('Any X WHERE X has_text "toto tata", X name "tutu", X is IN (Basket,Folder)',
"""SELECT X.cw_eid
FROM appears AS appears0, cw_Basket AS X
WHERE MATCH (appears0.words) AGAINST ('toto tata' IN BOOLEAN MODE) AND appears0.uid=X.cw_eid AND X.cw_name=tutu
UNION ALL
SELECT X.cw_eid
-FROM appears AS appears0, cw_File AS X
-WHERE MATCH (appears0.words) AGAINST ('toto tata' IN BOOLEAN MODE) AND appears0.uid=X.cw_eid AND X.cw_name=tutu
-UNION ALL
-SELECT X.cw_eid
FROM appears AS appears0, cw_Folder AS X
WHERE MATCH (appears0.words) AGAINST ('toto tata' IN BOOLEAN MODE) AND appears0.uid=X.cw_eid AND X.cw_name=tutu
""")
--- a/server/test/unittest_ssplanner.py Wed Sep 30 18:57:42 2009 +0200
+++ b/server/test/unittest_ssplanner.py Wed Oct 07 12:31:08 2009 +0200
@@ -25,19 +25,19 @@
BasePlannerTC.tearDown(self)
def test_ordered_ambigous_sol(self):
- self._test('Any XN ORDERBY XN WHERE X name XN, X is IN (Basket, File, Folder)',
- [('OneFetchStep', [('Any XN ORDERBY XN WHERE X name XN, X is IN(Basket, File, Folder)',
+ self._test('Any XN ORDERBY XN WHERE X name XN, X is IN (Basket, State, Folder)',
+ [('OneFetchStep', [('Any XN ORDERBY XN WHERE X name XN, X is IN(Basket, State, Folder)',
[{'X': 'Basket', 'XN': 'String'},
- {'X': 'File', 'XN': 'String'},
+ {'X': 'State', 'XN': 'String'},
{'X': 'Folder', 'XN': 'String'}])],
None, None,
[self.system], None, [])])
def test_groupeded_ambigous_sol(self):
- self._test('Any XN,COUNT(X) GROUPBY XN WHERE X name XN, X is IN (Basket, File, Folder)',
- [('OneFetchStep', [('Any XN,COUNT(X) GROUPBY XN WHERE X name XN, X is IN(Basket, File, Folder)',
+ self._test('Any XN,COUNT(X) GROUPBY XN WHERE X name XN, X is IN (Basket, State, Folder)',
+ [('OneFetchStep', [('Any XN,COUNT(X) GROUPBY XN WHERE X name XN, X is IN(Basket, State, Folder)',
[{'X': 'Basket', 'XN': 'String'},
- {'X': 'File', 'XN': 'String'},
+ {'X': 'State', 'XN': 'String'},
{'X': 'Folder', 'XN': 'String'}])],
None, None,
[self.system], None, [])])
--- a/server/utils.py Wed Sep 30 18:57:42 2009 +0200
+++ b/server/utils.py Wed Oct 07 12:31:08 2009 +0200
@@ -133,6 +133,7 @@
def start(self):
self.running_threads.append(self)
+ self.daemon = True
Thread.start(self)
@property
--- a/sobjects/notification.py Wed Sep 30 18:57:42 2009 +0200
+++ b/sobjects/notification.py Wed Oct 07 12:31:08 2009 +0200
@@ -15,12 +15,9 @@
from cubicweb.selectors import yes
from cubicweb.view import Component
-from cubicweb.common.mail import format_mail
-from cubicweb.common.mail import NotificationView
+from cubicweb.common.mail import NotificationView, SkipEmail
from cubicweb.server.hook import SendMailOp
-parse_message_id = deprecated('parse_message_id is now defined in cubicweb.common.mail')(parse_message_id)
-
class RecipientsFinder(Component):
"""this component is responsible to find recipients of a notification
--- a/test/unittest_entity.py Wed Sep 30 18:57:42 2009 +0200
+++ b/test/unittest_entity.py Wed Oct 07 12:31:08 2009 +0200
@@ -306,11 +306,11 @@
def test_printable_value_bytes(self):
e = self.add_entity('File', data=Binary('lambda x: 1'), data_format=u'text/x-python',
- data_encoding=u'ascii', name=u'toto.py')
+ data_encoding=u'ascii', data_name=u'toto.py')
from cubicweb.common import mttransforms
if mttransforms.HAS_PYGMENTS_TRANSFORMS:
self.assertEquals(e.printable_value('data'),
- '''<div class="highlight"><pre><span class="k">lambda</span> <span class="n">x</span><span class="p">:</span> <span class="mf">1</span>
+ '''<div class="highlight"><pre><span class="k">lambda</span> <span class="n">x</span><span class="p">:</span> <span class="mi">1</span>
</pre></div>
''')
else:
@@ -321,7 +321,7 @@
''')
e = self.add_entity('File', data=Binary('*héhéhé*'), data_format=u'text/rest',
- data_encoding=u'utf-8', name=u'toto.txt')
+ data_encoding=u'utf-8', data_name=u'toto.txt')
self.assertEquals(e.printable_value('data'),
u'<p><em>héhéhé</em></p>\n')
@@ -348,6 +348,21 @@
e['content'] = u'C'est un exemple sérieux'
self.assertEquals(tidy(e.printable_value('content')),
u"C'est un exemple sérieux")
+ # make sure valid xhtml is left untouched
+ e['content'] = u'<div>R&D<br/></div>'
+ self.assertEquals(e.printable_value('content'), e['content'])
+ e['content'] = u'<div>été</div>'
+ self.assertEquals(e.printable_value('content'), e['content'])
+ e['content'] = u'été'
+ self.assertEquals(e.printable_value('content'), e['content'])
+ e['content'] = u'hop\r\nhop\nhip\rmomo'
+ self.assertEquals(e.printable_value('content'), u'hop\nhop\nhip\nmomo')
+
+ def test_printable_value_bad_html_ms(self):
+ self.skip('fix soup2xhtml to handle this test')
+ e = self.add_entity('Card', title=u'bad html', content=u'<div>R&D<br>',
+ content_format=u'text/html')
+ tidy = lambda x: x.replace('\n', '')
e['content'] = u'<div x:foo="bar">ms orifice produces weird html</div>'
self.assertEquals(tidy(e.printable_value('content')),
u'<div>ms orifice produces weird html</div>')
@@ -360,25 +375,17 @@
'char_encoding' : 'utf8'})).decode('utf-8').strip()
self.assertEquals(tidy(e.printable_value('content')),
u'<div>ms orifice produces weird html</div>')
- # make sure valid xhtml is left untouched
- e['content'] = u'<div>R&D<br/></div>'
- self.assertEquals(e.printable_value('content'), e['content'])
- e['content'] = u'<div>été</div>'
- self.assertEquals(e.printable_value('content'), e['content'])
- e['content'] = u'été'
- self.assertEquals(e.printable_value('content'), e['content'])
- e['content'] = u'hop\r\nhop\nhip\rmomo'
- self.assertEquals(e.printable_value('content'), u'hop\nhop\nhip\nmomo')
def test_fulltextindex(self):
e = self.vreg['etypes'].etype_class('File')(self.request())
- e['name'] = 'an html file'
e['description'] = 'du <em>html</em>'
e['description_format'] = 'text/html'
e['data'] = Binary('some <em>data</em>')
+ e['data_name'] = 'an html file'
e['data_format'] = 'text/html'
e['data_encoding'] = 'ascii'
+ e.req.transaction_data = {} # XXX req should be a session
self.assertEquals(set(e.get_words()),
set(['an', 'html', 'file', 'du', 'html', 'some', 'data']))
@@ -401,7 +408,7 @@
self.failUnless(trinfo.relation_cached('from_state', 'subject'))
self.failUnless(trinfo.relation_cached('to_state', 'subject'))
self.failUnless(trinfo.relation_cached('wf_info_for', 'subject'))
- self.assertEquals(trinfo.by_transition, [])
+ self.assertEquals(trinfo.by_transition, ())
def test_request_cache(self):
req = self.request()
--- a/test/unittest_rset.py Wed Sep 30 18:57:42 2009 +0200
+++ b/test/unittest_rset.py Wed Oct 07 12:31:08 2009 +0200
@@ -287,7 +287,7 @@
e = rset.get_entity(0, 0)
# if any of the assertion below fails with a KeyError, the relation is not cached
# related entities should be an empty list
- self.assertEquals(e.related_cache('primary_email', 'subject', True), [])
+ self.assertEquals(e.related_cache('primary_email', 'subject', True), ())
# related rset should be an empty rset
cached = e.related_cache('primary_email', 'subject', False)
self.assertIsInstance(cached, ResultSet)
--- a/test/unittest_schema.py Wed Sep 30 18:57:42 2009 +0200
+++ b/test/unittest_schema.py Wed Oct 07 12:31:08 2009 +0200
@@ -166,7 +166,7 @@
'composite', 'condition', 'connait', 'constrained_by', 'content',
'content_format', 'created_by', 'creation_date', 'cstrtype', 'custom_workflow', 'cwuri',
- 'data', 'data_encoding', 'data_format', 'default_workflow', 'defaultval', 'delete_permission',
+ 'data', 'data_encoding', 'data_format', 'data_name', 'default_workflow', 'defaultval', 'delete_permission',
'description', 'description_format', 'destination_state',
'ecrit_par', 'eid', 'evaluee', 'expression', 'exprtype',
--- a/vregistry.py Wed Sep 30 18:57:42 2009 +0200
+++ b/vregistry.py Wed Oct 07 12:31:08 2009 +0200
@@ -380,10 +380,13 @@
# if it was modified, raise RegistryOutOfDate to reload everything
self.info('File %s changed since last visit', filepath)
raise RegistryOutOfDate()
+ # set update time before module loading, else we get some reloading
+ # weirdness in case of syntax error or other error while importing the
+ # module
+ self._lastmodifs[filepath] = modified_on
# load the module
module = load_module_from_name(modname, use_sys=not force_reload)
self.load_module(module)
- self._lastmodifs[filepath] = modified_on
return True
def load_module(self, module):
--- a/web/data/cubicweb.css Wed Sep 30 18:57:42 2009 +0200
+++ b/web/data/cubicweb.css Wed Oct 07 12:31:08 2009 +0200
@@ -747,6 +747,21 @@
top: -1px;
}
+table.htableForm {
+ vertical-align: middle;
+}
+table.htableForm td{
+ padding-left: 1em;
+ padding-top: 0.5em;
+}
+table.htableForm th{
+ padding-left: 1em;
+}
+table.htableForm .validateButton {
+ margin-right: 0.2em;
+ vertical-align: top;
+ margin-bottom: 0.2em; /* because vertical-align doesn't seems to have any effect */
+}
/***************************************/
/* error view (views/management.py) */
--- a/web/data/cubicweb.edition.js Wed Sep 30 18:57:42 2009 +0200
+++ b/web/data/cubicweb.edition.js Wed Oct 07 12:31:08 2009 +0200
@@ -285,10 +285,12 @@
* @param eid : the inlined entity eid
*/
function removeInlinedEntity(peid, rtype, eid) {
- var nodeid = ['rel', peid, rtype, eid].join('-');
+ // XXX work around the eid_param thing (eid + ':' + eid) for #471746
+ var nodeid = ['rel', peid, rtype, eid + ':' + eid].join('-');
var node = jqNode(nodeid);
- if (node && node.length) {
- node.remove();
+ if (! node.attr('cubicweb:type')) {
+ node.attr('cubicweb:type', node.val());
+ node.val('');
var divid = ['div', peid, rtype, eid].join('-');
jqNode(divid).fadeTo('fast', 0.5);
var noticeid = ['notice', peid, rtype, eid].join('-');
@@ -297,15 +299,16 @@
}
function restoreInlinedEntity(peid, rtype, eid) {
- var nodeid = ['rel', peid, rtype, eid].join('-');
- var divid = ['div', peid, rtype, eid].join('-');
- var noticeid = ['notice', peid, rtype, eid].join('-');
+ // XXX work around the eid_param thing (eid + ':' + eid) for #471746
+ var nodeid = ['rel', peid, rtype, eid + ':' + eid].join('-');
var node = jqNode(nodeid);
- if (!(node && node.length)) {
- node = INPUT({type: 'hidden', id: nodeid,
- name: rtype+':'+peid, value: eid});
+ if (node.attr('cubicweb:type')) {
+ node.val(node.attr('cubicweb:type'));
+ node.attr('cubicweb:type', '');
jqNode(['fs', peid, rtype, eid].join('-')).append(node);
+ var divid = ['div', peid, rtype, eid].join('-');
jqNode(divid).fadeTo('fast', 1);
+ var noticeid = ['notice', peid, rtype, eid].join('-');
jqNode(noticeid).hide();
}
}
--- a/web/data/cubicweb.html_tree.css Wed Sep 30 18:57:42 2009 +0200
+++ b/web/data/cubicweb.html_tree.css Wed Oct 07 12:31:08 2009 +0200
@@ -18,8 +18,9 @@
padding-right: 5px;
}
-#selected {
+div.selected {
border: 2px solid black;
+ background-color: #ccc;
}
table.tree td.tree_cell {
--- a/web/formfields.py Wed Sep 30 18:57:42 2009 +0200
+++ b/web/formfields.py Wed Oct 07 12:31:08 2009 +0200
@@ -598,9 +598,9 @@
kwargs['required'] = card in '1+'
kwargs['name'] = rschema.type
if role == 'object':
- kwargs['label'] = (eschema.type + '_object', rschema.type)
+ kwargs.setdefault('label', (eschema.type, rschema.type + '_object'))
else:
- kwargs['label'] = (eschema.type, rschema.type)
+ kwargs.setdefault('label', (eschema.type, rschema.type))
kwargs['eidparam'] = True
kwargs.setdefault('help', help)
if rschema.is_final():
--- a/web/test/unittest_views_basecontrollers.py Wed Sep 30 18:57:42 2009 +0200
+++ b/web/test/unittest_views_basecontrollers.py Wed Oct 07 12:31:08 2009 +0200
@@ -206,7 +206,7 @@
def test_interval_bound_constraint_success(self):
- feid = self.execute('INSERT File X: X name "toto.txt", X data %(data)s',
+ feid = self.execute('INSERT File X: X data_name "toto.txt", X data %(data)s',
{'data': Binary('yo')})[0][0]
req = self.request()
req.form = {'eid': ['X'],
--- a/web/test/unittest_views_editforms.py Wed Sep 30 18:57:42 2009 +0200
+++ b/web/test/unittest_views_editforms.py Wed Oct 07 12:31:08 2009 +0200
@@ -34,7 +34,6 @@
[('login', 'subject'),
('upassword', 'subject'),
('in_group', 'subject'),
- ('in_state', 'subject'),
('eid', 'subject'),
])
self.assertListEquals(rbc(e, 'secondary'),
@@ -59,6 +58,7 @@
# owned_by is defined both as subject and object relations on CWUser
self.assertListEquals(rbc(e, 'generated'),
[('use_email', 'subject'),
+ ('in_state', 'subject'),
('has_text', 'subject'),
('identity', 'subject'),
('is', 'subject'),
@@ -152,12 +152,12 @@
geid = self.execute('CWGroup X LIMIT 1')[0][0]
rset = self.execute('CWUser X LIMIT 1')
self.view('inline-edition', rset, row=0, col=0, rtype='in_group',
- peid=geid, role='subject', template=None, i18nctx='').source
+ peid=geid, role='object', template=None, i18nctx='').source
def test_automatic_inline_creation_formview(self):
geid = self.execute('CWGroup X LIMIT 1')[0][0]
self.view('inline-creation', None, etype='CWUser', rtype='in_group',
- peid=geid, template=None, i18nctx='', role='subject').source
+ peid=geid, template=None, i18nctx='', role='object').source
if __name__ == '__main__':
--- a/web/test/unittest_viewselector.py Wed Sep 30 18:57:42 2009 +0200
+++ b/web/test/unittest_viewselector.py Wed Oct 07 12:31:08 2009 +0200
@@ -27,6 +27,9 @@
schema.ViewSchemaAction,
actions.SiteInfoAction,
]
+FOOTERACTIONS = [wdoc.ChangeLogAction,
+ wdoc.AboutAction,
+ actions.PoweredByAction]
class ViewSelectorTC(CubicWebTC):
@@ -225,6 +228,7 @@
self.assertDictEqual(self.pactionsdict(req, None, skipcategories=()),
{'useractions': USERACTIONS,
'siteactions': SITEACTIONS,
+ 'footer': FOOTERACTIONS,
})
def test_possible_actions_no_entity(self):
@@ -233,6 +237,7 @@
self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()),
{'useractions': USERACTIONS,
'siteactions': SITEACTIONS,
+ 'footer': FOOTERACTIONS,
})
def test_possible_actions_same_type_entities(self):
@@ -241,6 +246,7 @@
self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()),
{'useractions': USERACTIONS,
'siteactions': SITEACTIONS,
+ 'footer': FOOTERACTIONS,
'mainactions': [actions.MultipleEditAction],
'moreactions': [actions.DeleteAction,
actions.AddNewAction],
@@ -252,6 +258,7 @@
self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()),
{'useractions': USERACTIONS,
'siteactions': SITEACTIONS,
+ 'footer': FOOTERACTIONS,
'moreactions': [actions.DeleteAction],
})
@@ -260,7 +267,9 @@
rset = req.execute('Any N, X WHERE X in_group Y, Y name N')
self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()),
{'useractions': USERACTIONS,
- 'siteactions': SITEACTIONS})
+ 'siteactions': SITEACTIONS,
+ 'footer': FOOTERACTIONS,
+ })
def test_possible_actions_eetype_cwuser_entity(self):
req = self.request()
@@ -268,6 +277,7 @@
self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()),
{'useractions': USERACTIONS,
'siteactions': SITEACTIONS,
+ 'footer': FOOTERACTIONS,
'mainactions': [actions.ModifyAction],
'moreactions': [actions.ManagePermissionsAction,
actions.AddRelatedActions,
@@ -381,25 +391,25 @@
tableview.TableView)
def test_interface_selector(self):
- image = self.add_entity('Image', name=u'bim.png', data=Binary('bim'))
+ image = self.add_entity('Image', data_name=u'bim.png', data=Binary('bim'))
# image primary view priority
req = self.request()
- rset = req.execute('Image X WHERE X name "bim.png"')
+ rset = req.execute('Image X WHERE X data_name "bim.png"')
self.assertIsInstance(self.vreg['views'].select('primary', req, rset=rset),
idownloadable.IDownloadablePrimaryView)
def test_score_entity_selector(self):
- image = self.add_entity('Image', name=u'bim.png', data=Binary('bim'))
+ image = self.add_entity('Image', data_name=u'bim.png', data=Binary('bim'))
# image primary view priority
req = self.request()
- rset = req.execute('Image X WHERE X name "bim.png"')
+ rset = req.execute('Image X WHERE X data_name "bim.png"')
self.assertIsInstance(self.vreg['views'].select('image', req, rset=rset),
idownloadable.ImageView)
- fileobj = self.add_entity('File', name=u'bim.txt', data=Binary('bim'))
+ fileobj = self.add_entity('File', data_name=u'bim.txt', data=Binary('bim'))
# image primary view priority
req = self.request()
- rset = req.execute('File X WHERE X name "bim.txt"')
+ rset = req.execute('File X WHERE X data_name "bim.txt"')
self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'image', req, rset=rset)
@@ -468,6 +478,7 @@
self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()),
{'useractions': USERACTIONS,
'siteactions': SITEACTIONS,
+ 'footer': FOOTERACTIONS,
'mainactions': [actions.ModifyAction],
'moreactions': [actions.ManagePermissionsAction,
actions.AddRelatedActions,
@@ -481,12 +492,12 @@
self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()),
{'useractions': USERACTIONS,
'siteactions': SITEACTIONS,
+ 'footer': FOOTERACTIONS,
'mainactions': [actions.ModifyAction],
'moreactions': [actions.ManagePermissionsAction,
actions.AddRelatedActions,
actions.DeleteAction,
actions.CopyAction,
- ],
})
--- a/web/views/autoform.py Wed Sep 30 18:57:42 2009 +0200
+++ b/web/views/autoform.py Wed Oct 07 12:31:08 2009 +0200
@@ -349,7 +349,7 @@
# display inline-edition view for all existing related entities
for i, relentity in enumerate(related.entities()):
if relentity.has_perm('update'):
- yield vvreg.select('inline-edition', self._cw, related,
+ yield vvreg.select('inline-edition', self._cw, rset=related,
row=i, col=0, rtype=rschema, role=role,
peid=entity.eid, pform=self)
--- a/web/views/basecontrollers.py Wed Sep 30 18:57:42 2009 +0200
+++ b/web/views/basecontrollers.py Wed Oct 07 12:31:08 2009 +0200
@@ -198,6 +198,10 @@
req.cnx.commit() # ValidationError may be raise on commit
except ValidationError, ex:
return (False, _validation_error(req, ex), ctrl._edited_entity)
+ except Exception, ex:
+ req.cnx.rollback()
+ req.exception('unexpected error while validating form')
+ return (False, req._(str(ex).decode('utf-8')), ctrl._edited_entity)
else:
return (True, ex.location, ctrl._edited_entity)
except Exception, ex:
--- a/web/views/editforms.py Wed Sep 30 18:57:42 2009 +0200
+++ b/web/views/editforms.py Wed Oct 07 12:31:08 2009 +0200
@@ -501,7 +501,7 @@
def add_hiddens(self, form, entity):
"""to ease overriding (see cubes.vcsfile.views.forms for instance)"""
iid = 'rel-%s-%s-%s' % (self.peid, self.rtype, entity.eid)
- # * str(self.rtype) in case it's a schema object
+ # * str(self.rtype) in case it's a schema object
# * neged_role() since role is the for parent entity, we want the role
# of the inlined entity
form.form_add_hidden(name=str(self.rtype), value=self.peid,
--- a/web/views/formrenderers.py Wed Sep 30 18:57:42 2009 +0200
+++ b/web/views/formrenderers.py Wed Oct 07 12:31:08 2009 +0200
@@ -253,7 +253,7 @@
display_help = False
def _render_fields(self, fields, w, form):
- w(u'<table border="0">')
+ w(u'<table border="0" class="htableForm">')
w(u'<tr>')
for field in fields:
if self.display_label:
--- a/web/views/schema.py Wed Sep 30 18:57:42 2009 +0200
+++ b/web/views/schema.py Wed Oct 07 12:31:08 2009 +0200
@@ -24,7 +24,9 @@
ALWAYS_SKIP_TYPES = BASE_TYPES | SCHEMA_TYPES
SKIP_TYPES = ALWAYS_SKIP_TYPES | META_RTYPES | SYSTEM_RTYPES
-SKIP_TYPES.update(set(('Transition', 'State', 'TrInfo',
+SKIP_TYPES.update(set(('Transition', 'State', 'TrInfo', 'Workflow',
+ 'WorkflowTransition', 'BaseTransition',
+ 'SubWorkflowExitPoint',
'CWUser', 'CWGroup',
'CWCache', 'CWProperty', 'CWPermission',
'ExternalUri')))
--- a/web/views/tabs.py Wed Sep 30 18:57:42 2009 +0200
+++ b/web/views/tabs.py Wed Oct 07 12:31:08 2009 +0200
@@ -15,6 +15,7 @@
from cubicweb.view import EntityView
from cubicweb.common import tags, uilib
from cubicweb.utils import make_uid
+from cubicweb.web.views import primary
class LazyViewMixin(object):
"""provides two convenience methods for the tab machinery
@@ -30,13 +31,11 @@
});""" % {'event': 'load_%s' % vid, 'vid': vid,
'reloadable' : str(reloadable).lower()})
- def lazyview(self, vid, rql=None, eid=None, rset=None, static=False,
+ def lazyview(self, vid, rql=None, eid=None, rset=None, tabid=None,
reloadable=False, show_spinbox=True, w=None):
"""a lazy version of wview
first version only support lazy viewing for an entity at a time
"""
- assert rql or eid or rset or static, \
- 'lazyview wants at least : rql, or an eid, or an rset -- or call it with static=True'
w = w or self.w
self._cw.add_js('cubicweb.lazy.js')
urlparams = {'vid' : vid, 'fname' : 'view'}
@@ -47,12 +46,12 @@
elif rset:
urlparams['rql'] = rset.printable_rql()
w(u'<div id="lazy-%s" cubicweb:loadurl="%s">' % (
- vid, xml_escape(self._cw.build_url('json', **urlparams))))
+ tabid or vid, xml_escape(self._cw.build_url('json', **urlparams))))
if show_spinbox:
w(u'<img src="data/loading.gif" id="%s-hole" alt="%s"/>'
- % (vid, self._cw._('loading')))
+ % (tabid or vid, self._cw._('loading')))
w(u'</div>')
- self._prepare_bindings(vid, reloadable)
+ self._prepare_bindings(tabid or vid, reloadable)
def forceview(self, vid):
"""trigger an event that will force immediate loading of the view
@@ -70,30 +69,39 @@
def cookie_name(self):
return str('%s_active_tab' % self._cw.config.appid)
- def active_tab(self, tabs, default):
- formtab = self._cw.form.get('tab')
- if formtab in tabs:
- return formtab
+ def active_tab(self, default):
+ if 'tab' in self._cw.form:
+ return self._cw.form['tab']
cookies = self._cw.get_cookie()
cookiename = self.cookie_name
activetab = cookies.get(cookiename)
if activetab is None:
cookies[cookiename] = default
self._cw.set_cookie(cookies, cookiename)
- tab = default
- else:
- tab = activetab.value
- return tab in tabs and tab or default
+ return default
+ return activetab.value
- def prune_tabs(self, tabs):
+ def prune_tabs(self, tabs, default_tab):
selected_tabs = []
+ may_be_active_tab = self.active_tab(default_tab)
+ active_tab = default_tab
+ viewsvreg = self._cw.vreg['views']
for tab in tabs:
try:
- self._cw.vreg['views'].select(tab, self._cw, rset=self.cw_rset)
- selected_tabs.append(tab)
+ tabid, tabkwargs = tab
+ tabkwargs = tabkwargs.copy()
+ except ValueError:
+ tabid, tabkwargs = tab, {}
+ tabkwargs.setdefault('rset', self.rset)
+ vid = tabkwargs.get('vid', tabid)
+ try:
+ viewsvreg.select(vid, self._cw, **tabkwargs)
+ selected_tabs.append((tabid, tabkwargs))
except NoSelectableObject:
continue
- return selected_tabs
+ if tabid == may_be_active_tab:
+ active_tab = tabid
+ return selected_tabs, active_tab
def render_tabs(self, tabs, default, entity=None):
# delegate to the default tab if there is more than one entity
@@ -105,30 +113,31 @@
self._cw.add_js(('ui.core.js', 'ui.tabs.js',
'cubicweb.ajax.js', 'cubicweb.tabs.js', 'cubicweb.lazy.js'))
# prune tabs : not all are to be shown
- tabs = self.prune_tabs(tabs)
- # select a tab
- active_tab = self.active_tab(tabs, default)
+ tabs, active_tab = self.prune_tabs(tabs, default)
# build the html structure
w = self.w
uid = entity and entity.eid or make_uid('tab')
w(u'<div id="entity-tabs-%s">' % uid)
w(u'<ul>')
- for tab in tabs:
+ active_tab_idx = None
+ for i, (tabid, tabkwargs) in enumerate(tabs):
w(u'<li>')
- w(u'<a href="#%s">' % tab)
- w(u'<span onclick="set_tab(\'%s\', \'%s\')">' % (tab, self.cookie_name))
- w(self._cw._(tab))
+ w(u'<a href="#%s">' % tabid)
+ w(u'<span onclick="set_tab(\'%s\', \'%s\')">' % (tabid, self.cookie_name))
+ w(tabkwargs.pop('label', self._cw._(tabid)))
w(u'</span>')
w(u'</a>')
w(u'</li>')
+ if tabid == active_tab:
+ active_tab_idx = i
w(u'</ul>')
w(u'</div>')
- for tab in tabs:
- w(u'<div id="%s">' % tab)
- if entity:
- self.lazyview(tab, eid=entity.eid)
- else:
- self.lazyview(tab, static=True)
+ for tabid, tabkwargs in tabs:
+ w(u'<div id="%s">' % tabid)
+ tabkwargs.setdefault('tabid', tabid)
+ tabkwargs.setdefault('vid', tabid)
+ tabkwargs.setdefault('rset', self.rset)
+ self.lazyview(**tabkwargs)
w(u'</div>')
# call the set_tab() JS function *after* each tab is generated
# because the callback binding needs to be done before
@@ -136,7 +145,7 @@
self._cw.add_onload(u"""
jQuery('#entity-tabs-%(eeid)s > ul').tabs( { selected: %(tabindex)s });
set_tab('%(vid)s', '%(cookiename)s');
-""" % {'tabindex' : tabs.index(active_tab),
+""" % {'tabindex' : active_tab_idx,
'vid' : active_tab,
'eeid' : (entity and entity.eid or uid),
'cookiename' : self.cookie_name})
@@ -170,3 +179,31 @@
self.w(tags.h1(self._cw._(self.title)))
self.wview(self.vid, rset, 'noresult')
self.w(u'</div>')
+
+
+class TabedPrimaryView(TabsMixin, primary.PrimaryView):
+ __abstract__ = True # don't register
+
+ tabs = ['main_tab']
+ default_tab = 'main_tab'
+
+ def cell_call(self, row, col):
+ entity = self.complete_entity(row, col)
+ self.render_entity_title(entity)
+ self.render_entity_metadata(entity)
+ self.render_tabs(self.tabs, self.default_tab, entity)
+
+
+class PrimaryTab(primary.PrimaryView):
+ id = 'main_tab'
+ title = None
+
+ def is_primary(self):
+ return True
+
+ def render_entity_title(self, entity):
+ pass
+
+ def render_entity_metadata(self, entity):
+ pass
+
--- a/web/views/urlrewrite.py Wed Sep 30 18:57:42 2009 +0200
+++ b/web/views/urlrewrite.py Wed Oct 07 12:31:08 2009 +0200
@@ -169,6 +169,8 @@
match = inputurl.match(uri)
for key in formgroups:
form2[key] = match.group(key)
+ if "vtitle" in form2:
+ form2['vtitle'] = req._(form2['vtitle'])
if form2:
req.form.update(form2)
return controller, rset
--- a/web/webconfig.py Wed Sep 30 18:57:42 2009 +0200
+++ b/web/webconfig.py Wed Oct 07 12:31:08 2009 +0200
@@ -86,12 +86,6 @@
'help': 'web instance query log file',
'group': 'main', 'inputlevel': 2,
}),
- ('pyro-instance-id',
- {'type' : 'string',
- 'default': Method('default_instance_id'),
- 'help': 'CubicWeb instance identifier in the Pyro name server',
- 'group': 'pyro-client', 'inputlevel': 1,
- }),
# web configuration
('https-url',
{'type' : 'string',
--- a/web/webctl.py Wed Sep 30 18:57:42 2009 +0200
+++ b/web/webctl.py Wed Oct 07 12:31:08 2009 +0200
@@ -16,11 +16,11 @@
def bootstrap(self, cubes, inputlevel=0):
"""bootstrap this configuration"""
- print '\n'+underline_title('Generic web configuration')
+ print '\n' + underline_title('Generic web configuration')
config = self.config
if config.repo_method == 'pyro':
- print '\n'+underline_title('Repository server configuration')
- config.input_config('pyro-client', inputlevel)
+ print '\n' + underline_title('Pyro configuration')
+ config.input_config('pyro', inputlevel)
if ASK.confirm('Allow anonymous access ?', False):
config.global_set_option('anonymous-user', 'anon')
config.global_set_option('anonymous-password', 'anon')