--- a/.hgtags Fri May 23 18:35:13 2014 +0200
+++ b/.hgtags Fri Jun 27 11:48:26 2014 +0200
@@ -329,6 +329,9 @@
fa00fc251d57f61e619d9c905502745fae21c58c cubicweb-centos-version-3.17.14-1
fa00fc251d57f61e619d9c905502745fae21c58c cubicweb-version-3.17.14
fa00fc251d57f61e619d9c905502745fae21c58c cubicweb-debian-version-3.17.14-1
+ee413076752b3e606801ef55e48f7e7ccd1f7238 cubicweb-version-3.17.15
+ee413076752b3e606801ef55e48f7e7ccd1f7238 cubicweb-debian-version-3.17.15-1
+ee413076752b3e606801ef55e48f7e7ccd1f7238 cubicweb-centos-version-3.17.15-1
db37bf35a1474843ded0a537f9cb4838f4a78cda cubicweb-version-3.18.0
db37bf35a1474843ded0a537f9cb4838f4a78cda cubicweb-debian-version-3.18.0-1
db37bf35a1474843ded0a537f9cb4838f4a78cda cubicweb-centos-version-3.18.0-1
@@ -344,6 +347,12 @@
0176da9bc75293e200de4f7b934c5d4c7c805199 cubicweb-version-3.18.4
0176da9bc75293e200de4f7b934c5d4c7c805199 cubicweb-debian-version-3.18.4-1
0176da9bc75293e200de4f7b934c5d4c7c805199 cubicweb-centos-version-3.18.4-1
+5071b69b6b0b0de937bb231404cbf652a103dbe0 cubicweb-version-3.18.5
+5071b69b6b0b0de937bb231404cbf652a103dbe0 cubicweb-debian-version-3.18.5-1
+5071b69b6b0b0de937bb231404cbf652a103dbe0 cubicweb-centos-version-3.18.5-1
1141927b8494aabd16e31b0d0d9a50fe1fed5f2f cubicweb-version-3.19.0
1141927b8494aabd16e31b0d0d9a50fe1fed5f2f cubicweb-debian-version-3.19.0-1
1141927b8494aabd16e31b0d0d9a50fe1fed5f2f cubicweb-centos-version-3.19.0-1
+1fe4bc4a8ac8831a379e9ebea08d75fbb6fc5c2a cubicweb-version-3.19.1
+1fe4bc4a8ac8831a379e9ebea08d75fbb6fc5c2a cubicweb-debian-version-3.19.1-1
+1fe4bc4a8ac8831a379e9ebea08d75fbb6fc5c2a cubicweb-centos-version-3.19.1-1
--- a/MANIFEST.in Fri May 23 18:35:13 2014 +0200
+++ b/MANIFEST.in Fri Jun 27 11:48:26 2014 +0200
@@ -10,7 +10,7 @@
recursive-include misc *.py *.png *.display
include web/views/*.pt
-recursive-include web/data external_resources *.js *.css *.py *.png *.gif *.ico *.ttf
+recursive-include web/data external_resources *.js *.css *.py *.png *.gif *.ico *.ttf *.svg *.woff *.eot
recursive-include web/wdoc *.rst *.png *.xml ChangeLog*
recursive-include devtools/data *.js *.css *.sh
--- a/__pkginfo__.py Fri May 23 18:35:13 2014 +0200
+++ b/__pkginfo__.py Fri Jun 27 11:48:26 2014 +0200
@@ -22,7 +22,7 @@
modname = distname = "cubicweb"
-numversion = (3, 19, 0)
+numversion = (3, 19, 1)
version = '.'.join(str(num) for num in numversion)
description = "a repository of entities / relations for knowledge management"
--- a/cubicweb.spec Fri May 23 18:35:13 2014 +0200
+++ b/cubicweb.spec Fri Jun 27 11:48:26 2014 +0200
@@ -7,7 +7,7 @@
%endif
Name: cubicweb
-Version: 3.19.0
+Version: 3.19.1
Release: logilab.1%{?dist}
Summary: CubicWeb is a semantic web application framework
Source0: http://download.logilab.org/pub/cubicweb/cubicweb-%{version}.tar.gz
--- a/cwconfig.py Fri May 23 18:35:13 2014 +0200
+++ b/cwconfig.py Fri Jun 27 11:48:26 2014 +0200
@@ -787,7 +787,6 @@
_cubes = None
def init_cubes(self, cubes):
- assert self._cubes is None, repr(self._cubes)
self._cubes = self.reorder_cubes(cubes)
# load cubes'__init__.py file first
for cube in cubes:
--- a/cwctl.py Fri May 23 18:35:13 2014 +0200
+++ b/cwctl.py Fri Jun 27 11:48:26 2014 +0200
@@ -317,7 +317,7 @@
'used together')
class CreateInstanceCommand(Command):
- """Create an instance from a cube. This is an unified
+ """Create an instance from a cube. This is a unified
command which can handle web / server / all-in-one installation
according to available parts of the software library and of the
desired cube.
@@ -849,7 +849,7 @@
in batch mode.
By default it will connect to a local instance using an in memory
- connection, unless an URL to a running instance is specified.
+ connection, unless a URL to a running instance is specified.
Arguments after bare "--" string will not be processed by the shell command
You can use it to pass extra arguments to your script and expect for
--- a/cwvreg.py Fri May 23 18:35:13 2014 +0200
+++ b/cwvreg.py Fri Jun 27 11:48:26 2014 +0200
@@ -753,7 +753,7 @@
return self.property_info(key)['default']
def typed_value(self, key, value):
- """value is an unicode string, return it correctly typed. Let potential
+ """value is a unicode string, return it correctly typed. Let potential
type error propagates.
"""
pdef = self.property_info(key)
--- a/dataimport.py Fri May 23 18:35:13 2014 +0200
+++ b/dataimport.py Fri Jun 27 11:48:26 2014 +0200
@@ -141,13 +141,13 @@
for row in it:
decoded = [item.decode(encoding) for item in row]
if not skip_empty or any(decoded):
- yield [item.decode(encoding) for item in row]
+ yield decoded
else:
- # Skip first line
- try:
- row = it.next()
- except csv.Error:
- pass
+ if skipfirst:
+ try:
+ row = it.next()
+ except csv.Error:
+ pass
# Safe version, that can cope with error in CSV file
while True:
try:
@@ -472,11 +472,13 @@
if isinstance(value, unicode):
value = value.encode(encoding)
elif isinstance(value, (date, datetime)):
- # Do not use strftime, as it yields issue
- # with date < 1900
value = '%04d-%02d-%02d' % (value.year,
value.month,
value.day)
+ if isinstance(value, datetime):
+ value += ' %02d:%02d:%02d' % (value.hour,
+ value.minutes,
+ value.second)
else:
return None
# We push the value to the new formatted row
@@ -620,11 +622,13 @@
self.rql('SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % rtype,
{'x': int(eid_from), 'y': int(eid_to)})
+ @deprecated("[3.19] use session.find(*args, **kwargs).entities() instead")
def find_entities(self, *args, **kwargs):
- return self.session.find_entities(*args, **kwargs)
+ return self.session.find(*args, **kwargs).entities()
+ @deprecated("[3.19] use session.find(*args, **kwargs).one() instead")
def find_one_entity(self, *args, **kwargs):
- return self.session.find_one_entity(*args, **kwargs)
+ return self.session.find(*args, **kwargs).one()
# the import controller ########################################################
@@ -858,30 +862,38 @@
del entity.cw_extra_kwargs
entity.cw_edited = EditedEntity(entity)
for attr in self.etype_attrs:
- entity.cw_edited.edited_attribute(attr, self.generate(entity, attr))
+ genfunc = self.generate(attr)
+ if genfunc:
+ entity.cw_edited.edited_attribute(attr, genfunc(entity))
rels = {}
for rel in self.etype_rels:
- rels[rel] = self.generate(entity, rel)
+ genfunc = self.generate(rel)
+ if genfunc:
+ rels[rel] = genfunc(entity)
return entity, rels
def init_entity(self, entity):
entity.eid = self.source.create_eid(self.session)
for attr in self.entity_attrs:
- entity.cw_edited.edited_attribute(attr, self.generate(entity, attr))
+ genfunc = self.generate(attr)
+ if genfunc:
+ entity.cw_edited.edited_attribute(attr, genfunc(entity))
- def generate(self, entity, rtype):
- return getattr(self, 'gen_%s' % rtype)(entity)
+ def generate(self, rtype):
+ return getattr(self, 'gen_%s' % rtype, None)
def gen_cwuri(self, entity):
return u'%s%s' % (self.baseurl, entity.eid)
def gen_creation_date(self, entity):
return self.time
+
def gen_modification_date(self, entity):
return self.time
def gen_created_by(self, entity):
return self.session.user.eid
+
def gen_owned_by(self, entity):
return self.session.user.eid
--- a/dbapi.py Fri May 23 18:35:13 2014 +0200
+++ b/dbapi.py Fri Jun 27 11:48:26 2014 +0200
@@ -118,7 +118,7 @@
* a simple instance id for in-memory connection
- * an uri like scheme://host:port/instanceid where scheme may be one of
+ * a uri like scheme://host:port/instanceid where scheme may be one of
'pyro', 'inmemory' or 'zmqpickle'
* if scheme is 'pyro', <host:port> determine the name server address. If
@@ -343,10 +343,12 @@
# low level session data management #######################################
+ @deprecated('[3.19] use session or transaction data')
def get_shared_data(self, key, default=None, pop=False, txdata=False):
"""see :meth:`Connection.get_shared_data`"""
return self.cnx.get_shared_data(key, default, pop, txdata)
+ @deprecated('[3.19] use session or transaction data')
def set_shared_data(self, key, value, txdata=False, querydata=None):
"""see :meth:`Connection.set_shared_data`"""
if querydata is not None:
@@ -409,7 +411,7 @@
"""execute a rql query, return resulting rows and their description in
a :class:`~cubicweb.rset.ResultSet` object
- * `rql` should be an Unicode string or a plain ASCII string, containing
+ * `rql` should be a Unicode string or a plain ASCII string, containing
the rql query
* `args` the optional args dictionary associated to the query, with key
--- a/debian/changelog Fri May 23 18:35:13 2014 +0200
+++ b/debian/changelog Fri Jun 27 11:48:26 2014 +0200
@@ -1,9 +1,21 @@
+cubicweb (3.19.1-1) unstable; urgency=low
+
+ * new upstream release
+
+ -- Julien Cristau <julien.cristau@logilab.fr> Tue, 03 Jun 2014 12:16:00 +0200
+
cubicweb (3.19.0-1) unstable; urgency=low
* new upstream release
-- Julien Cristau <julien.cristau@logilab.fr> Mon, 28 Apr 2014 18:35:27 +0200
+cubicweb (3.18.5-1) unstable; urgency=low
+
+ * new upstream release
+
+ -- Aurelien Campeas <aurelien.campeas@logilab.fr> Thu, 05 Jun 2014 16:13:03 +0200
+
cubicweb (3.18.4-1) unstable; urgency=low
* new upstream release
@@ -34,6 +46,12 @@
-- Julien Cristau <julien.cristau@logilab.fr> Fri, 10 Jan 2014 17:14:18 +0100
+cubicweb (3.17.15-1) unstable; urgency=low
+
+ * new upstream release
+
+ -- Aurelien Campeas <aurelien.campeas@logilab.fr> Wed, 13 May 2014 17:47:00 +0200
+
cubicweb (3.17.14-1) unstable; urgency=low
* new upstream release
--- a/debian/control Fri May 23 18:35:13 2014 +0200
+++ b/debian/control Fri Jun 27 11:48:26 2014 +0200
@@ -170,7 +170,7 @@
cubicweb-forgotpwd (<< 0.4.3),
cubicweb-registration (<< 0.4.3),
cubicweb-vcsfile (<< 1.15.0),
- cubicweb-bootstrap,
+ cubicweb-bootstrap (<< 0.6),
Description: common library for the CubicWeb framework
CubicWeb is a semantic web application framework.
.
--- a/devtools/__init__.py Fri May 23 18:35:13 2014 +0200
+++ b/devtools/__init__.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -293,8 +293,9 @@
db_cache = {}
explored_glob = set()
- def __init__(self, config):
+ def __init__(self, config, init_config=None):
self.config = config
+ self.init_config = init_config
self._repo = None
# pure consistency check
assert self.system_source['db-driver'] == self.DRIVER
@@ -382,6 +383,9 @@
"""
if self._repo is None:
self._repo = self._new_repo(self.config)
+ # config has now been bootstrapped, call init_config if specified
+ if self.init_config is not None:
+ self.init_config(self.config)
repo = self._repo
repo.turn_repo_on()
if startup and not repo._has_started:
@@ -490,15 +494,9 @@
self.restore_database(DEFAULT_EMPTY_DB_ID)
repo = self.get_repo(startup=True)
cnx = self.get_cnx()
- session = repo._sessions[cnx.sessionid]
- session.set_cnxset()
- _commit = session.commit
- def keep_cnxset_commit(free_cnxset=False):
- _commit(free_cnxset=free_cnxset)
- session.commit = keep_cnxset_commit
- pre_setup_func(session, self.config)
- session.commit()
- cnx.close()
+ with cnx:
+ pre_setup_func(cnx._cnx, self.config)
+ cnx.commit()
self.backup_database(test_db_id)
@@ -542,8 +540,8 @@
for datadir in cls.__CTL:
subprocess.call(['pg_ctl', 'stop', '-D', datadir, '-m', 'fast'])
- def __init__(self, config):
- super(PostgresTestDataBaseHandler, self).__init__(config)
+ def __init__(self, *args, **kwargs):
+ super(PostgresTestDataBaseHandler, self).__init__(*args, **kwargs)
datadir = join(self.config.apphome, 'pgdb')
if not exists(datadir):
subprocess.check_call(['initdb', '-D', datadir, '-E', 'utf-8', '--locale=C'])
@@ -611,7 +609,8 @@
finally:
templcursor.close()
cnx.close()
- init_repository(self.config, interactive=False)
+ init_repository(self.config, interactive=False,
+ init_config=self.init_config)
except BaseException:
if self.dbcnx is not None:
self.dbcnx.rollback()
@@ -687,7 +686,8 @@
"""initialize a fresh sqlserver databse used for testing purpose"""
if self.config.init_repository:
from cubicweb.server import init_repository
- init_repository(self.config, interactive=False, drop=True)
+ init_repository(self.config, interactive=False, drop=True,
+ init_config=self.init_config)
### sqlite test database handling ##############################################
@@ -764,7 +764,8 @@
# initialize the database
from cubicweb.server import init_repository
self._cleanup_database(self.absolute_dbfile())
- init_repository(self.config, interactive=False)
+ init_repository(self.config, interactive=False,
+ init_config=self.init_config)
import atexit
atexit.register(SQLiteTestDataBaseHandler._cleanup_all_tmpdb)
@@ -858,7 +859,7 @@
# XXX a class method on Test ?
_CONFIG = None
-def get_test_db_handler(config):
+def get_test_db_handler(config, init_config=None):
global _CONFIG
if _CONFIG is not None and config is not _CONFIG:
from logilab.common.modutils import cleanup_sys_modules
@@ -879,7 +880,7 @@
key = (driver, config)
handlerkls = HANDLERS.get(driver, None)
if handlerkls is not None:
- handler = handlerkls(config)
+ handler = handlerkls(config, init_config)
if config.skip_db_create_and_restore:
handler = NoCreateDropDatabaseHandler(handler)
HCACHE.set(config, handler)
--- a/devtools/testlib.py Fri May 23 18:35:13 2014 +0200
+++ b/devtools/testlib.py Fri Jun 27 11:48:26 2014 +0200
@@ -203,17 +203,26 @@
self._repo = repo
self._login = login
self.requestcls = requestcls
- # opening session
- #
- # XXX this very hackish code should be cleaned and move on repo.
- with repo.internal_cnx() as cnx:
- rset = cnx.execute('CWUser U WHERE U login %(u)s', {'u': login})
- user = rset.get_entity(0, 0)
+ self._session = self._unsafe_connect(login)
+
+ def _unsafe_connect(self, login, **kwargs):
+ """ a completely unsafe connect method for the tests """
+ # use an internal connection
+ with self._repo.internal_cnx() as cnx:
+ # try to get a user object
+ user = cnx.find('CWUser', login=login).one()
user.groups
user.properties
- self._session = Session(user, repo)
- repo._sessions[self._session.sessionid] = self._session
- self._session.user._cw = self._session
+ user.login
+ session = Session(user, self._repo)
+ self._repo._sessions[session.sessionid] = session
+ user._cw = user.cw_rset.req = session
+ with session.new_cnx() as cnx:
+ self._repo.hm.call_hooks('session_open', cnx)
+ # commit connection at this point in case write operation has been
+ # done during `session_open` hooks
+ cnx.commit()
+ return session
@contextmanager
def repo_cnx(self):
@@ -404,12 +413,9 @@
def _init_repo(self):
"""init the repository and connection to it.
"""
- # setup configuration for test
- self.init_config(self.config)
# get or restore and working db.
- db_handler = devtools.get_test_db_handler(self.config)
+ db_handler = devtools.get_test_db_handler(self.config, self.init_config)
db_handler.build_db_cache(self.test_db_id, self.pre_setup_database)
-
db_handler.restore_database(self.test_db_id)
self.repo = db_handler.get_repo(startup=True)
# get an admin session (without actual login)
@@ -492,14 +498,18 @@
config.mode = 'test'
return config
- @classmethod
+ @classmethod # XXX could be turned into a regular method
def init_config(cls, config):
"""configuration initialization hooks.
You may only want to override here the configuraton logic.
Otherwise, consider to use a different :class:`ApptestConfiguration`
- defined in the `configcls` class attribute"""
+ defined in the `configcls` class attribute.
+
+ This method will be called by the database handler once the config has
+ been properly bootstrapped.
+ """
source = config.system_source_config
cls.admlogin = unicode(source['db-user'])
cls.admpassword = source['db-password']
--- a/doc/3.19.rst Fri May 23 18:35:13 2014 +0200
+++ b/doc/3.19.rst Fri Jun 27 11:48:26 2014 +0200
@@ -85,6 +85,9 @@
The authentication stack has been altered to use the ``repoapi`` instead of
the ``dbapi``. Cubes adding new element to this stack are likely to break.
+Session data can be accessed using the cnx.data dictionary, while
+transaction data is available through cnx.transaction_data. These
+replace the [gs]et_shared_data methods with optional txid kwarg.
New API in tests
~~~~~~~~~~~~~~~~
--- a/doc/book/en/devrepo/profiling.rst Fri May 23 18:35:13 2014 +0200
+++ b/doc/book/en/devrepo/profiling.rst Fri Jun 27 11:48:26 2014 +0200
@@ -10,7 +10,7 @@
queries. In your ``all-in-one.conf`` file, set the **query-log-file** option::
# web application query log file
- query-log-file=~/myapp-rql.log
+ query-log-file=/home/user/myapp-rql.log
Then restart your application, reload your page and stop your application.
The file ``myapp-rql.log`` now contains the list of RQL queries that were
@@ -28,7 +28,7 @@
.. sourcecode:: sh
- $ cubicweb-ctl exlog ~/myapp-rql.log
+ $ cubicweb-ctl exlog /home/user/myapp-rql.log
0.07 50 Any A WHERE X eid %(x)s, X firstname A {}
0.05 50 Any A WHERE X eid %(x)s, X lastname A {}
0.01 1 Any X,AA ORDERBY AA DESC WHERE E eid %(x)s, E employees X, X modification_date AA {}
--- a/doc/book/en/devweb/rtags.rst Fri May 23 18:35:13 2014 +0200
+++ b/doc/book/en/devweb/rtags.rst Fri Jun 27 11:48:26 2014 +0200
@@ -17,7 +17,7 @@
The part of uicfg that deals with primary views is in the
:ref:`primary_view_configuration` chapter.
-.. automodule:: cubicweb.web.uicfg
+.. automodule:: cubicweb.web.views.uicfg
The uihelper module
--- a/doc/tutorials/dataimport/diseasome_parser.py Fri May 23 18:35:13 2014 +0200
+++ b/doc/tutorials/dataimport/diseasome_parser.py Fri Jun 27 11:48:26 2014 +0200
@@ -31,9 +31,9 @@
def _retrieve_reltype(uri):
"""
- Retrieve a relation type from an URI.
+ Retrieve a relation type from a URI.
- Internal function which takes an URI containing a relation type as input
+ Internal function which takes a URI containing a relation type as input
and returns the name of the relation.
If no URI string is given, then the function returns None.
"""
--- a/doc/tutorials/dataimport/schema.py Fri May 23 18:35:13 2014 +0200
+++ b/doc/tutorials/dataimport/schema.py Fri Jun 27 11:48:26 2014 +0200
@@ -47,7 +47,7 @@
only has unique omim and omim_page identifiers, when it has them,
these attributes have been defined through relations such that
for each disease there is at most one omim and one omim_page.
- Each such identifier is defined through an URI, that is, through
+ Each such identifier is defined through a URI, that is, through
an ``ExternalUri`` entity.
That is, these relations are of cardinality "?*". For optimization
purposes, one might be tempted to defined them as inlined, by setting
@@ -55,7 +55,7 @@
- chromosomal_location is also defined through a relation of
cardinality "?*", since any disease has at most one chromosomal
location associated to it.
- - same_as is also defined through an URI, and hence through a
+ - same_as is also defined through a URI, and hence through a
relation having ``ExternalUri`` entities as objects.
For more information on this data set and the data set itself,
@@ -109,12 +109,12 @@
- label, defined through a Yams String.
- bio2rdf_symbol, also defined as a Yams String, since it is
just an identifier.
- - gene_id is an URI identifying a gene, hence it is defined
+ - gene_id is a URI identifying a gene, hence it is defined
as a relation with an ``ExternalUri`` object.
- a pair of unique identifiers in the HUGO Gene Nomenclature
Committee (http://http://www.genenames.org/). They are defined
as ``ExternalUri`` entities as well.
- - same_as is also defined through an URI, and hence through a
+ - same_as is also defined through a URI, and hence through a
relation having ``ExternalUri`` entities as objects.
"""
# Corresponds to http://www.w3.org/2000/01/rdf-schema#label
--- a/entities/adapters.py Fri May 23 18:35:13 2014 +0200
+++ b/entities/adapters.py Fri Jun 27 11:48:26 2014 +0200
@@ -166,7 +166,7 @@
__abstract__ = True
def download_url(self, **kwargs): # XXX not really part of this interface
- """return an url to download entity's content"""
+ """return a URL to download entity's content"""
raise NotImplementedError
def download_content_type(self):
@@ -187,13 +187,11 @@
# XXX should propose to use two different relations for children/parent
class ITreeAdapter(view.EntityAdapter):
- """This adapter has to be overriden to be configured using the
- tree_relation, child_role and parent_role class attributes to benefit from
- this default implementation.
+ """This adapter provides a tree interface.
- This adapter provides a tree interface. It has to be overriden to be
- configured using the tree_relation, child_role and parent_role class
- attributes to benefit from this default implementation.
+ It has to be overriden to be configured using the tree_relation,
+ child_role and parent_role class attributes to benefit from this default
+ implementation.
This class provides the following methods:
--- a/entities/test/data/schema.py Fri May 23 18:35:13 2014 +0200
+++ b/entities/test/data/schema.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -15,11 +15,9 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
-"""entities tests schema
+"""entities tests schema"""
-"""
-
-from yams.buildobjs import EntityType, String
+from yams.buildobjs import EntityType, String, RichString
from cubicweb.schema import make_workflowable
class Company(EntityType):
--- a/entities/test/unittest_wfobjs.py Fri May 23 18:35:13 2014 +0200
+++ b/entities/test/unittest_wfobjs.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -19,12 +19,11 @@
from cubicweb import ValidationError
from cubicweb.devtools.testlib import CubicWebTC
-
-def add_wf(self, etype, name=None, default=False):
+def add_wf(shell, etype, name=None, default=False):
if name is None:
name = etype
- return self.shell().add_workflow(name, etype, default=default,
- ensure_workflowable=False)
+ return shell.add_workflow(name, etype, default=default,
+ ensure_workflowable=False)
def parse_hist(wfhist):
return [(ti.previous_state.name, ti.new_state.name,
@@ -35,101 +34,104 @@
class WorkflowBuildingTC(CubicWebTC):
def test_wf_construction(self):
- wf = add_wf(self, 'Company')
- foo = wf.add_state(u'foo', initial=True)
- bar = wf.add_state(u'bar')
- self.assertEqual(wf.state_by_name('bar').eid, bar.eid)
- self.assertEqual(wf.state_by_name('barrr'), None)
- baz = wf.add_transition(u'baz', (foo,), bar, ('managers',))
- self.assertEqual(wf.transition_by_name('baz').eid, baz.eid)
- self.assertEqual(len(baz.require_group), 1)
- self.assertEqual(baz.require_group[0].name, 'managers')
+ with self.admin_access.shell() as shell:
+ wf = add_wf(shell, 'Company')
+ foo = wf.add_state(u'foo', initial=True)
+ bar = wf.add_state(u'bar')
+ self.assertEqual(wf.state_by_name('bar').eid, bar.eid)
+ self.assertEqual(wf.state_by_name('barrr'), None)
+ baz = wf.add_transition(u'baz', (foo,), bar, ('managers',))
+ self.assertEqual(wf.transition_by_name('baz').eid, baz.eid)
+ self.assertEqual(len(baz.require_group), 1)
+ self.assertEqual(baz.require_group[0].name, 'managers')
def test_duplicated_state(self):
- wf = add_wf(self, 'Company')
- wf.add_state(u'foo', initial=True)
- self.commit()
- wf.add_state(u'foo')
- with self.assertRaises(ValidationError) as cm:
- self.commit()
- self.assertEqual({'name-subject': 'workflow already has a state of that name'},
- cm.exception.errors)
- # no pb if not in the same workflow
- wf2 = add_wf(self, 'Company')
- foo = wf2.add_state(u'foo', initial=True)
- self.commit()
- # gnark gnark
- bar = wf.add_state(u'bar')
- self.commit()
- bar.cw_set(name=u'foo')
- with self.assertRaises(ValidationError) as cm:
- self.commit()
- self.assertEqual({'name-subject': 'workflow already has a state of that name'},
- cm.exception.errors)
+ with self.admin_access.shell() as shell:
+ wf = add_wf(shell, 'Company')
+ wf.add_state(u'foo', initial=True)
+ shell.commit()
+ wf.add_state(u'foo')
+ with self.assertRaises(ValidationError) as cm:
+ shell.commit()
+ self.assertEqual({'name-subject': 'workflow already has a state of that name'},
+ cm.exception.errors)
+ # no pb if not in the same workflow
+ wf2 = add_wf(shell, 'Company')
+ foo = wf2.add_state(u'foo', initial=True)
+ shell.commit()
+ # gnark gnark
+ bar = wf.add_state(u'bar')
+ shell.commit()
+ bar.cw_set(name=u'foo')
+ with self.assertRaises(ValidationError) as cm:
+ shell.commit()
+ self.assertEqual({'name-subject': 'workflow already has a state of that name'},
+ cm.exception.errors)
def test_duplicated_transition(self):
- wf = add_wf(self, 'Company')
- foo = wf.add_state(u'foo', initial=True)
- bar = wf.add_state(u'bar')
- wf.add_transition(u'baz', (foo,), bar, ('managers',))
- wf.add_transition(u'baz', (bar,), foo)
- with self.assertRaises(ValidationError) as cm:
- self.commit()
- self.assertEqual(cm.exception.errors, {'name-subject': 'workflow already has a transition of that name'})
- # no pb if not in the same workflow
- wf2 = add_wf(self, 'Company')
- foo = wf.add_state(u'foo', initial=True)
- bar = wf.add_state(u'bar')
- wf.add_transition(u'baz', (foo,), bar, ('managers',))
- self.commit()
- # gnark gnark
- biz = wf.add_transition(u'biz', (bar,), foo)
- self.commit()
- biz.cw_set(name=u'baz')
- with self.assertRaises(ValidationError) as cm:
- self.commit()
- self.assertEqual(cm.exception.errors, {'name-subject': 'workflow already has a transition of that name'})
+ with self.admin_access.shell() as shell:
+ wf = add_wf(shell, 'Company')
+ foo = wf.add_state(u'foo', initial=True)
+ bar = wf.add_state(u'bar')
+ wf.add_transition(u'baz', (foo,), bar, ('managers',))
+ wf.add_transition(u'baz', (bar,), foo)
+ with self.assertRaises(ValidationError) as cm:
+ shell.commit()
+ self.assertEqual(cm.exception.errors, {'name-subject': 'workflow already has a transition of that name'})
+ # no pb if not in the same workflow
+ wf2 = add_wf(shell, 'Company')
+ foo = wf.add_state(u'foo', initial=True)
+ bar = wf.add_state(u'bar')
+ wf.add_transition(u'baz', (foo,), bar, ('managers',))
+ shell.commit()
+ # gnark gnark
+ biz = wf.add_transition(u'biz', (bar,), foo)
+ shell.commit()
+ biz.cw_set(name=u'baz')
+ with self.assertRaises(ValidationError) as cm:
+ shell.commit()
+ self.assertEqual(cm.exception.errors, {'name-subject': 'workflow already has a transition of that name'})
class WorkflowTC(CubicWebTC):
def setup_database(self):
- req = self.request()
rschema = self.schema['in_state']
for rdef in rschema.rdefs.itervalues():
self.assertEqual(rdef.cardinality, '1*')
- self.member = self.create_user(req, 'member')
+ with self.admin_access.client_cnx() as cnx:
+ self.member_eid = self.create_user(cnx, 'member').eid
+ cnx.commit()
def test_workflow_base(self):
- req = self.request()
- e = self.create_user(req, 'toto')
- iworkflowable = e.cw_adapt_to('IWorkflowable')
- self.assertEqual(iworkflowable.state, 'activated')
- iworkflowable.change_state('deactivated', u'deactivate 1')
- self.commit()
- iworkflowable.change_state('activated', u'activate 1')
- self.commit()
- iworkflowable.change_state('deactivated', u'deactivate 2')
- self.commit()
- e.cw_clear_relation_cache('wf_info_for', 'object')
- self.assertEqual([tr.comment for tr in e.reverse_wf_info_for],
- ['deactivate 1', 'activate 1', 'deactivate 2'])
- self.assertEqual(iworkflowable.latest_trinfo().comment, 'deactivate 2')
+ with self.admin_access.web_request() as req:
+ e = self.create_user(req, 'toto')
+ iworkflowable = e.cw_adapt_to('IWorkflowable')
+ self.assertEqual(iworkflowable.state, 'activated')
+ iworkflowable.change_state('deactivated', u'deactivate 1')
+ req.cnx.commit()
+ iworkflowable.change_state('activated', u'activate 1')
+ req.cnx.commit()
+ iworkflowable.change_state('deactivated', u'deactivate 2')
+ req.cnx.commit()
+ e.cw_clear_relation_cache('wf_info_for', 'object')
+ self.assertEqual([tr.comment for tr in e.reverse_wf_info_for],
+ ['deactivate 1', 'activate 1', 'deactivate 2'])
+ self.assertEqual(iworkflowable.latest_trinfo().comment, 'deactivate 2')
def test_possible_transitions(self):
- user = self.execute('CWUser X').get_entity(0, 0)
- iworkflowable = user.cw_adapt_to('IWorkflowable')
- trs = list(iworkflowable.possible_transitions())
- self.assertEqual(len(trs), 1)
- self.assertEqual(trs[0].name, u'deactivate')
- self.assertEqual(trs[0].destination(None).name, u'deactivated')
+ with self.admin_access.web_request() as req:
+ user = req.execute('CWUser X').get_entity(0, 0)
+ iworkflowable = user.cw_adapt_to('IWorkflowable')
+ trs = list(iworkflowable.possible_transitions())
+ self.assertEqual(len(trs), 1)
+ self.assertEqual(trs[0].name, u'deactivate')
+ self.assertEqual(trs[0].destination(None).name, u'deactivated')
# test a std user get no possible transition
- cnx = self.login('member')
- req = self.request()
- # fetch the entity using the new session
- trs = list(req.user.cw_adapt_to('IWorkflowable').possible_transitions())
- self.assertEqual(len(trs), 0)
- cnx.close()
+ with self.new_access('member').web_request() as req:
+ # fetch the entity using the new session
+ trs = list(req.user.cw_adapt_to('IWorkflowable').possible_transitions())
+ self.assertEqual(len(trs), 0)
def _test_manager_deactivate(self, user):
iworkflowable = user.cw_adapt_to('IWorkflowable')
@@ -144,90 +146,93 @@
return trinfo
def test_change_state(self):
- user = self.user()
- iworkflowable = user.cw_adapt_to('IWorkflowable')
- iworkflowable.change_state('deactivated', comment=u'deactivate user')
- trinfo = self._test_manager_deactivate(user)
- self.assertEqual(trinfo.transition, None)
+ with self.admin_access.client_cnx() as cnx:
+ user = cnx.user
+ iworkflowable = user.cw_adapt_to('IWorkflowable')
+ iworkflowable.change_state('deactivated', comment=u'deactivate user')
+ trinfo = self._test_manager_deactivate(user)
+ self.assertEqual(trinfo.transition, None)
def test_set_in_state_bad_wf(self):
- wf = add_wf(self, 'CWUser')
- s = wf.add_state(u'foo', initial=True)
- self.commit()
- with self.session.security_enabled(write=False):
- with self.assertRaises(ValidationError) as cm:
- self.session.execute('SET X in_state S WHERE X eid %(x)s, S eid %(s)s',
- {'x': self.user().eid, 's': s.eid})
- self.assertEqual(cm.exception.errors, {'in_state-subject': "state doesn't belong to entity's workflow. "
- "You may want to set a custom workflow for this entity first."})
+ with self.admin_access.shell() as shell:
+ wf = add_wf(shell, 'CWUser')
+ s = wf.add_state(u'foo', initial=True)
+ shell.commit()
+ with self.admin_access.repo_cnx() as cnx:
+ with cnx.security_enabled(write=False):
+ with self.assertRaises(ValidationError) as cm:
+ cnx.execute('SET X in_state S WHERE X eid %(x)s, S eid %(s)s',
+ {'x': cnx.user.eid, 's': s.eid})
+ self.assertEqual(cm.exception.errors, {'in_state-subject': "state doesn't belong to entity's workflow. "
+ "You may want to set a custom workflow for this entity first."})
def test_fire_transition(self):
- user = self.user()
- iworkflowable = user.cw_adapt_to('IWorkflowable')
- iworkflowable.fire_transition('deactivate', comment=u'deactivate user')
- user.cw_clear_all_caches()
- self.assertEqual(iworkflowable.state, 'deactivated')
- self._test_manager_deactivate(user)
- trinfo = self._test_manager_deactivate(user)
- self.assertEqual(trinfo.transition.name, 'deactivate')
+ with self.admin_access.client_cnx() as cnx:
+ user = cnx.user
+ iworkflowable = user.cw_adapt_to('IWorkflowable')
+ iworkflowable.fire_transition('deactivate', comment=u'deactivate user')
+ user.cw_clear_all_caches()
+ self.assertEqual(iworkflowable.state, 'deactivated')
+ self._test_manager_deactivate(user)
+ trinfo = self._test_manager_deactivate(user)
+ self.assertEqual(trinfo.transition.name, 'deactivate')
def test_goback_transition(self):
- req = self.request()
- wf = req.user.cw_adapt_to('IWorkflowable').current_workflow
- asleep = wf.add_state('asleep')
- wf.add_transition('rest', (wf.state_by_name('activated'),
- wf.state_by_name('deactivated')),
- asleep)
- wf.add_transition('wake up', asleep)
- user = self.create_user(req, 'stduser')
- iworkflowable = user.cw_adapt_to('IWorkflowable')
- iworkflowable.fire_transition('rest')
- self.commit()
- iworkflowable.fire_transition('wake up')
- self.commit()
- self.assertEqual(iworkflowable.state, 'activated')
- iworkflowable.fire_transition('deactivate')
- self.commit()
- iworkflowable.fire_transition('rest')
- self.commit()
- iworkflowable.fire_transition('wake up')
- self.commit()
- user.cw_clear_all_caches()
- self.assertEqual(iworkflowable.state, 'deactivated')
+ with self.admin_access.web_request() as req:
+ wf = req.user.cw_adapt_to('IWorkflowable').current_workflow
+ asleep = wf.add_state('asleep')
+ wf.add_transition('rest', (wf.state_by_name('activated'),
+ wf.state_by_name('deactivated')),
+ asleep)
+ wf.add_transition('wake up', asleep)
+ user = self.create_user(req, 'stduser')
+ iworkflowable = user.cw_adapt_to('IWorkflowable')
+ iworkflowable.fire_transition('rest')
+ req.cnx.commit()
+ iworkflowable.fire_transition('wake up')
+ req.cnx.commit()
+ self.assertEqual(iworkflowable.state, 'activated')
+ iworkflowable.fire_transition('deactivate')
+ req.cnx.commit()
+ iworkflowable.fire_transition('rest')
+ req.cnx.commit()
+ iworkflowable.fire_transition('wake up')
+ req.cnx.commit()
+ user.cw_clear_all_caches()
+ self.assertEqual(iworkflowable.state, 'deactivated')
# XXX test managers can change state without matching transition
def _test_stduser_deactivate(self):
- ueid = self.member.eid
- req = self.request()
- self.create_user(req, 'tutu')
- cnx = self.login('tutu')
- req = self.request()
- iworkflowable = req.entity_from_eid(self.member.eid).cw_adapt_to('IWorkflowable')
- with self.assertRaises(ValidationError) as cm:
+ with self.admin_access.repo_cnx() as cnx:
+ self.create_user(cnx, 'tutu')
+ with self.new_access('tutu').web_request() as req:
+ iworkflowable = req.entity_from_eid(self.member_eid).cw_adapt_to('IWorkflowable')
+ with self.assertRaises(ValidationError) as cm:
+ iworkflowable.fire_transition('deactivate')
+ self.assertEqual(cm.exception.errors, {'by_transition-subject': "transition may not be fired"})
+ with self.new_access('member').web_request() as req:
+ iworkflowable = req.entity_from_eid(self.member_eid).cw_adapt_to('IWorkflowable')
iworkflowable.fire_transition('deactivate')
- self.assertEqual(cm.exception.errors, {'by_transition-subject': "transition may not be fired"})
- cnx.close()
- cnx = self.login('member')
- req = self.request()
- iworkflowable = req.entity_from_eid(self.member.eid).cw_adapt_to('IWorkflowable')
- iworkflowable.fire_transition('deactivate')
- cnx.commit()
- with self.assertRaises(ValidationError) as cm:
- iworkflowable.fire_transition('activate')
- self.assertEqual(cm.exception.errors, {'by_transition-subject': "transition may not be fired"})
- cnx.close()
+ req.cnx.commit()
+ with self.assertRaises(ValidationError) as cm:
+ iworkflowable.fire_transition('activate')
+ self.assertEqual(cm.exception.errors, {'by_transition-subject': "transition may not be fired"})
def test_fire_transition_owned_by(self):
- self.execute('INSERT RQLExpression X: X exprtype "ERQLExpression", '
- 'X expression "X owned_by U", T condition X '
- 'WHERE T name "deactivate"')
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.execute('INSERT RQLExpression X: X exprtype "ERQLExpression", '
+ 'X expression "X owned_by U", T condition X '
+ 'WHERE T name "deactivate"')
+ cnx.commit()
self._test_stduser_deactivate()
def test_fire_transition_has_update_perm(self):
- self.execute('INSERT RQLExpression X: X exprtype "ERQLExpression", '
- 'X expression "U has_update_permission X", T condition X '
- 'WHERE T name "deactivate"')
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.execute('INSERT RQLExpression X: X exprtype "ERQLExpression", '
+ 'X expression "U has_update_permission X", T condition X '
+ 'WHERE T name "deactivate"')
+ cnx.commit()
self._test_stduser_deactivate()
def test_swf_base(self):
@@ -250,335 +255,357 @@
+--------+
"""
# sub-workflow
- swf = add_wf(self, 'CWGroup', name='subworkflow')
- swfstate1 = swf.add_state(u'swfstate1', initial=True)
- swfstate2 = swf.add_state(u'swfstate2')
- swfstate3 = swf.add_state(u'swfstate3')
- tr1 = swf.add_transition(u'tr1', (swfstate1,), swfstate2)
- tr2 = swf.add_transition(u'tr2', (swfstate1,), swfstate3)
- # main workflow
- mwf = add_wf(self, 'CWGroup', name='main workflow', default=True)
- state1 = mwf.add_state(u'state1', initial=True)
- state2 = mwf.add_state(u'state2')
- state3 = mwf.add_state(u'state3')
- swftr1 = mwf.add_wftransition(u'swftr1', swf, state1,
- [(swfstate2, state2), (swfstate3, state3)])
- swf.cw_clear_all_caches()
- self.assertEqual(swftr1.destination(None).eid, swfstate1.eid)
+ with self.admin_access.shell() as shell:
+ swf = add_wf(shell, 'CWGroup', name='subworkflow')
+ swfstate1 = swf.add_state(u'swfstate1', initial=True)
+ swfstate2 = swf.add_state(u'swfstate2')
+ swfstate3 = swf.add_state(u'swfstate3')
+ tr1 = swf.add_transition(u'tr1', (swfstate1,), swfstate2)
+ tr2 = swf.add_transition(u'tr2', (swfstate1,), swfstate3)
+ # main workflow
+ mwf = add_wf(shell, 'CWGroup', name='main workflow', default=True)
+ state1 = mwf.add_state(u'state1', initial=True)
+ state2 = mwf.add_state(u'state2')
+ state3 = mwf.add_state(u'state3')
+ swftr1 = mwf.add_wftransition(u'swftr1', swf, state1,
+ [(swfstate2, state2), (swfstate3, state3)])
+ swf.cw_clear_all_caches()
+ self.assertEqual(swftr1.destination(None).eid, swfstate1.eid)
# workflows built, begin test
- group = self.request().create_entity('CWGroup', name=u'grp1')
- self.commit()
- iworkflowable = group.cw_adapt_to('IWorkflowable')
- self.assertEqual(iworkflowable.current_state.eid, state1.eid)
- self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid)
- self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid)
- self.assertEqual(iworkflowable.subworkflow_input_transition(), None)
- iworkflowable.fire_transition('swftr1', u'go')
- self.commit()
- group.cw_clear_all_caches()
- self.assertEqual(iworkflowable.current_state.eid, swfstate1.eid)
- self.assertEqual(iworkflowable.current_workflow.eid, swf.eid)
- self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid)
- self.assertEqual(iworkflowable.subworkflow_input_transition().eid, swftr1.eid)
- iworkflowable.fire_transition('tr1', u'go')
- self.commit()
- group.cw_clear_all_caches()
- self.assertEqual(iworkflowable.current_state.eid, state2.eid)
- self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid)
- self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid)
- self.assertEqual(iworkflowable.subworkflow_input_transition(), None)
- # force back to swfstate1 is impossible since we can't any more find
- # subworkflow input transition
- with self.assertRaises(ValidationError) as cm:
- iworkflowable.change_state(swfstate1, u'gadget')
- self.assertEqual(cm.exception.errors, {'to_state-subject': "state doesn't belong to entity's workflow"})
- self.rollback()
- # force back to state1
- iworkflowable.change_state('state1', u'gadget')
- iworkflowable.fire_transition('swftr1', u'au')
- group.cw_clear_all_caches()
- iworkflowable.fire_transition('tr2', u'chapeau')
- self.commit()
- group.cw_clear_all_caches()
- self.assertEqual(iworkflowable.current_state.eid, state3.eid)
- self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid)
- self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid)
- self.assertListEqual(parse_hist(iworkflowable.workflow_history),
- [('state1', 'swfstate1', 'swftr1', 'go'),
- ('swfstate1', 'swfstate2', 'tr1', 'go'),
- ('swfstate2', 'state2', 'swftr1', 'exiting from subworkflow subworkflow'),
- ('state2', 'state1', None, 'gadget'),
- ('state1', 'swfstate1', 'swftr1', 'au'),
- ('swfstate1', 'swfstate3', 'tr2', 'chapeau'),
- ('swfstate3', 'state3', 'swftr1', 'exiting from subworkflow subworkflow'),
- ])
+ with self.admin_access.web_request() as req:
+ group = req.create_entity('CWGroup', name=u'grp1')
+ req.cnx.commit()
+ iworkflowable = group.cw_adapt_to('IWorkflowable')
+ self.assertEqual(iworkflowable.current_state.eid, state1.eid)
+ self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid)
+ self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid)
+ self.assertEqual(iworkflowable.subworkflow_input_transition(), None)
+ iworkflowable.fire_transition('swftr1', u'go')
+ req.cnx.commit()
+ group.cw_clear_all_caches()
+ self.assertEqual(iworkflowable.current_state.eid, swfstate1.eid)
+ self.assertEqual(iworkflowable.current_workflow.eid, swf.eid)
+ self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid)
+ self.assertEqual(iworkflowable.subworkflow_input_transition().eid, swftr1.eid)
+ iworkflowable.fire_transition('tr1', u'go')
+ req.cnx.commit()
+ group.cw_clear_all_caches()
+ self.assertEqual(iworkflowable.current_state.eid, state2.eid)
+ self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid)
+ self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid)
+ self.assertEqual(iworkflowable.subworkflow_input_transition(), None)
+ # force back to swfstate1 is impossible since we can't any more find
+ # subworkflow input transition
+ with self.assertRaises(ValidationError) as cm:
+ iworkflowable.change_state(swfstate1, u'gadget')
+ self.assertEqual(cm.exception.errors, {'to_state-subject': "state doesn't belong to entity's workflow"})
+ req.cnx.rollback()
+ # force back to state1
+ iworkflowable.change_state('state1', u'gadget')
+ iworkflowable.fire_transition('swftr1', u'au')
+ group.cw_clear_all_caches()
+ iworkflowable.fire_transition('tr2', u'chapeau')
+ req.cnx.commit()
+ group.cw_clear_all_caches()
+ self.assertEqual(iworkflowable.current_state.eid, state3.eid)
+ self.assertEqual(iworkflowable.current_workflow.eid, mwf.eid)
+ self.assertEqual(iworkflowable.main_workflow.eid, mwf.eid)
+ self.assertListEqual(parse_hist(iworkflowable.workflow_history),
+ [('state1', 'swfstate1', 'swftr1', 'go'),
+ ('swfstate1', 'swfstate2', 'tr1', 'go'),
+ ('swfstate2', 'state2', 'swftr1', 'exiting from subworkflow subworkflow'),
+ ('state2', 'state1', None, 'gadget'),
+ ('state1', 'swfstate1', 'swftr1', 'au'),
+ ('swfstate1', 'swfstate3', 'tr2', 'chapeau'),
+ ('swfstate3', 'state3', 'swftr1', 'exiting from subworkflow subworkflow'),
+ ])
def test_swf_exit_consistency(self):
- # sub-workflow
- swf = add_wf(self, 'CWGroup', name='subworkflow')
- swfstate1 = swf.add_state(u'swfstate1', initial=True)
- swfstate2 = swf.add_state(u'swfstate2')
- tr1 = swf.add_transition(u'tr1', (swfstate1,), swfstate2)
- # main workflow
- mwf = add_wf(self, 'CWGroup', name='main workflow', default=True)
- state1 = mwf.add_state(u'state1', initial=True)
- state2 = mwf.add_state(u'state2')
- state3 = mwf.add_state(u'state3')
- mwf.add_wftransition(u'swftr1', swf, state1,
- [(swfstate2, state2), (swfstate2, state3)])
- with self.assertRaises(ValidationError) as cm:
- self.commit()
- self.assertEqual(cm.exception.errors, {'subworkflow_exit-subject': u"can't have multiple exits on the same state"})
+ with self.admin_access.shell() as shell:
+ # sub-workflow
+ swf = add_wf(shell, 'CWGroup', name='subworkflow')
+ swfstate1 = swf.add_state(u'swfstate1', initial=True)
+ swfstate2 = swf.add_state(u'swfstate2')
+ tr1 = swf.add_transition(u'tr1', (swfstate1,), swfstate2)
+ # main workflow
+ mwf = add_wf(shell, 'CWGroup', name='main workflow', default=True)
+ state1 = mwf.add_state(u'state1', initial=True)
+ state2 = mwf.add_state(u'state2')
+ state3 = mwf.add_state(u'state3')
+ mwf.add_wftransition(u'swftr1', swf, state1,
+ [(swfstate2, state2), (swfstate2, state3)])
+ with self.assertRaises(ValidationError) as cm:
+ shell.commit()
+ self.assertEqual(cm.exception.errors, {'subworkflow_exit-subject': u"can't have multiple exits on the same state"})
def test_swf_fire_in_a_row(self):
- # sub-workflow
- subwf = add_wf(self, 'CWGroup', name='subworkflow')
- xsigning = subwf.add_state('xsigning', initial=True)
- xaborted = subwf.add_state('xaborted')
- xsigned = subwf.add_state('xsigned')
- xabort = subwf.add_transition('xabort', (xsigning,), xaborted)
- xsign = subwf.add_transition('xsign', (xsigning,), xsigning)
- xcomplete = subwf.add_transition('xcomplete', (xsigning,), xsigned,
- type=u'auto')
- # main workflow
- twf = add_wf(self, 'CWGroup', name='mainwf', default=True)
- created = twf.add_state(_('created'), initial=True)
- identified = twf.add_state(_('identified'))
- released = twf.add_state(_('released'))
- closed = twf.add_state(_('closed'))
- twf.add_wftransition(_('identify'), subwf, (created,),
- [(xsigned, identified), (xaborted, created)])
- twf.add_wftransition(_('release'), subwf, (identified,),
- [(xsigned, released), (xaborted, identified)])
- twf.add_wftransition(_('close'), subwf, (released,),
- [(xsigned, closed), (xaborted, released)])
- self.commit()
- group = self.request().create_entity('CWGroup', name=u'grp1')
- self.commit()
- iworkflowable = group.cw_adapt_to('IWorkflowable')
- for trans in ('identify', 'release', 'close'):
- iworkflowable.fire_transition(trans)
- self.commit()
+ with self.admin_access.shell() as shell:
+ # sub-workflow
+ subwf = add_wf(shell, 'CWGroup', name='subworkflow')
+ xsigning = subwf.add_state('xsigning', initial=True)
+ xaborted = subwf.add_state('xaborted')
+ xsigned = subwf.add_state('xsigned')
+ xabort = subwf.add_transition('xabort', (xsigning,), xaborted)
+ xsign = subwf.add_transition('xsign', (xsigning,), xsigning)
+ xcomplete = subwf.add_transition('xcomplete', (xsigning,), xsigned,
+ type=u'auto')
+ # main workflow
+ twf = add_wf(shell, 'CWGroup', name='mainwf', default=True)
+ created = twf.add_state(_('created'), initial=True)
+ identified = twf.add_state(_('identified'))
+ released = twf.add_state(_('released'))
+ closed = twf.add_state(_('closed'))
+ twf.add_wftransition(_('identify'), subwf, (created,),
+ [(xsigned, identified), (xaborted, created)])
+ twf.add_wftransition(_('release'), subwf, (identified,),
+ [(xsigned, released), (xaborted, identified)])
+ twf.add_wftransition(_('close'), subwf, (released,),
+ [(xsigned, closed), (xaborted, released)])
+ shell.commit()
+ with self.admin_access.repo_cnx() as cnx:
+ group = cnx.create_entity('CWGroup', name=u'grp1')
+ cnx.commit()
+ iworkflowable = group.cw_adapt_to('IWorkflowable')
+ for trans in ('identify', 'release', 'close'):
+ iworkflowable.fire_transition(trans)
+ cnx.commit()
def test_swf_magic_tr(self):
- # sub-workflow
- subwf = add_wf(self, 'CWGroup', name='subworkflow')
- xsigning = subwf.add_state('xsigning', initial=True)
- xaborted = subwf.add_state('xaborted')
- xsigned = subwf.add_state('xsigned')
- xabort = subwf.add_transition('xabort', (xsigning,), xaborted)
- xsign = subwf.add_transition('xsign', (xsigning,), xsigned)
- # main workflow
- twf = add_wf(self, 'CWGroup', name='mainwf', default=True)
- created = twf.add_state(_('created'), initial=True)
- identified = twf.add_state(_('identified'))
- released = twf.add_state(_('released'))
- twf.add_wftransition(_('identify'), subwf, created,
- [(xaborted, None), (xsigned, identified)])
- twf.add_wftransition(_('release'), subwf, identified,
- [(xaborted, None)])
- self.commit()
- group = self.request().create_entity('CWGroup', name=u'grp1')
- self.commit()
- iworkflowable = group.cw_adapt_to('IWorkflowable')
- for trans, nextstate in (('identify', 'xsigning'),
- ('xabort', 'created'),
- ('identify', 'xsigning'),
- ('xsign', 'identified'),
- ('release', 'xsigning'),
- ('xabort', 'identified')
- ):
- iworkflowable.fire_transition(trans)
- self.commit()
- group.cw_clear_all_caches()
- self.assertEqual(iworkflowable.state, nextstate)
+ with self.admin_access.shell() as shell:
+ # sub-workflow
+ subwf = add_wf(shell, 'CWGroup', name='subworkflow')
+ xsigning = subwf.add_state('xsigning', initial=True)
+ xaborted = subwf.add_state('xaborted')
+ xsigned = subwf.add_state('xsigned')
+ xabort = subwf.add_transition('xabort', (xsigning,), xaborted)
+ xsign = subwf.add_transition('xsign', (xsigning,), xsigned)
+ # main workflow
+ twf = add_wf(shell, 'CWGroup', name='mainwf', default=True)
+ created = twf.add_state(_('created'), initial=True)
+ identified = twf.add_state(_('identified'))
+ released = twf.add_state(_('released'))
+ twf.add_wftransition(_('identify'), subwf, created,
+ [(xaborted, None), (xsigned, identified)])
+ twf.add_wftransition(_('release'), subwf, identified,
+ [(xaborted, None)])
+ shell.commit()
+ with self.admin_access.web_request() as req:
+ group = req.create_entity('CWGroup', name=u'grp1')
+ req.cnx.commit()
+ iworkflowable = group.cw_adapt_to('IWorkflowable')
+ for trans, nextstate in (('identify', 'xsigning'),
+ ('xabort', 'created'),
+ ('identify', 'xsigning'),
+ ('xsign', 'identified'),
+ ('release', 'xsigning'),
+ ('xabort', 'identified')
+ ):
+ iworkflowable.fire_transition(trans)
+ req.cnx.commit()
+ group.cw_clear_all_caches()
+ self.assertEqual(iworkflowable.state, nextstate)
class CustomWorkflowTC(CubicWebTC):
def setup_database(self):
- req = self.request()
- self.member = self.create_user(req, 'member')
+ with self.admin_access.repo_cnx() as cnx:
+ self.member_eid = self.create_user(cnx, 'member').eid
def test_custom_wf_replace_state_no_history(self):
"""member in inital state with no previous history, state is simply
redirected when changing workflow
"""
- wf = add_wf(self, 'CWUser')
- wf.add_state('asleep', initial=True)
- self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
- {'wf': wf.eid, 'x': self.member.eid})
- self.member.cw_clear_all_caches()
- iworkflowable = self.member.cw_adapt_to('IWorkflowable')
- self.assertEqual(iworkflowable.state, 'activated')# no change before commit
- self.commit()
- self.member.cw_clear_all_caches()
- self.assertEqual(iworkflowable.current_workflow.eid, wf.eid)
- self.assertEqual(iworkflowable.state, 'asleep')
- self.assertEqual(iworkflowable.workflow_history, ())
+ with self.admin_access.shell() as shell:
+ wf = add_wf(shell, 'CWUser')
+ wf.add_state('asleep', initial=True)
+ with self.admin_access.web_request() as req:
+ req.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
+ {'wf': wf.eid, 'x': self.member_eid})
+ member = req.entity_from_eid(self.member_eid)
+ iworkflowable = member.cw_adapt_to('IWorkflowable')
+ self.assertEqual(iworkflowable.state, 'activated') # no change before commit
+ req.cnx.commit()
+ member.cw_clear_all_caches()
+ self.assertEqual(iworkflowable.current_workflow.eid, wf.eid)
+ self.assertEqual(iworkflowable.state, 'asleep')
+ self.assertEqual(iworkflowable.workflow_history, ())
def test_custom_wf_replace_state_keep_history(self):
"""member in inital state with some history, state is redirected and
state change is recorded to history
"""
- iworkflowable = self.member.cw_adapt_to('IWorkflowable')
- iworkflowable.fire_transition('deactivate')
- iworkflowable.fire_transition('activate')
- wf = add_wf(self, 'CWUser')
- wf.add_state('asleep', initial=True)
- self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
- {'wf': wf.eid, 'x': self.member.eid})
- self.commit()
- self.member.cw_clear_all_caches()
- self.assertEqual(iworkflowable.current_workflow.eid, wf.eid)
- self.assertEqual(iworkflowable.state, 'asleep')
- self.assertEqual(parse_hist(iworkflowable.workflow_history),
- [('activated', 'deactivated', 'deactivate', None),
- ('deactivated', 'activated', 'activate', None),
- ('activated', 'asleep', None, 'workflow changed to "CWUser"')])
+ with self.admin_access.web_request() as req:
+ member = req.entity_from_eid(self.member_eid)
+ iworkflowable = member.cw_adapt_to('IWorkflowable')
+ iworkflowable.fire_transition('deactivate')
+ iworkflowable.fire_transition('activate')
+ req.cnx.commit()
+ with self.admin_access.shell() as shell:
+ wf = add_wf(shell, 'CWUser')
+ wf.add_state('asleep', initial=True)
+ shell.rqlexec('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
+ {'wf': wf.eid, 'x': self.member_eid})
+ with self.admin_access.web_request() as req:
+ member = req.entity_from_eid(self.member_eid)
+ iworkflowable = member.cw_adapt_to('IWorkflowable')
+ self.assertEqual(iworkflowable.current_workflow.eid, wf.eid)
+ self.assertEqual(iworkflowable.state, 'asleep')
+ self.assertEqual(parse_hist(iworkflowable.workflow_history),
+ [('activated', 'deactivated', 'deactivate', None),
+ ('deactivated', 'activated', 'activate', None),
+ ('activated', 'asleep', None, 'workflow changed to "CWUser"')])
def test_custom_wf_no_initial_state(self):
"""try to set a custom workflow which has no initial state"""
- iworkflowable = self.member.cw_adapt_to('IWorkflowable')
- iworkflowable.fire_transition('deactivate')
- wf = add_wf(self, 'CWUser')
- wf.add_state('asleep')
- self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
- {'wf': wf.eid, 'x': self.member.eid})
- with self.assertRaises(ValidationError) as cm:
- self.commit()
- self.assertEqual(cm.exception.errors, {'custom_workflow-subject': u'workflow has no initial state'})
+ with self.admin_access.shell() as shell:
+ wf = add_wf(shell, 'CWUser')
+ wf.add_state('asleep')
+ shell.rqlexec('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
+ {'wf': wf.eid, 'x': self.member_eid})
+ with self.assertRaises(ValidationError) as cm:
+ shell.commit()
+ self.assertEqual(cm.exception.errors, {'custom_workflow-subject': u'workflow has no initial state'})
def test_custom_wf_bad_etype(self):
"""try to set a custom workflow which doesn't apply to entity type"""
- wf = add_wf(self, 'Company')
- wf.add_state('asleep', initial=True)
- self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
- {'wf': wf.eid, 'x': self.member.eid})
- with self.assertRaises(ValidationError) as cm:
- self.commit()
- self.assertEqual(cm.exception.errors, {'custom_workflow-subject': u"workflow isn't a workflow for this type"})
+ with self.admin_access.shell() as shell:
+ wf = add_wf(shell, 'Company')
+ wf.add_state('asleep', initial=True)
+ shell.rqlexec('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
+ {'wf': wf.eid, 'x': self.member_eid})
+ with self.assertRaises(ValidationError) as cm:
+ shell.commit()
+ self.assertEqual(cm.exception.errors, {'custom_workflow-subject': u"workflow isn't a workflow for this type"})
def test_del_custom_wf(self):
"""member in some state shared by the new workflow, nothing has to be
done
"""
- iworkflowable = self.member.cw_adapt_to('IWorkflowable')
- iworkflowable.fire_transition('deactivate')
- wf = add_wf(self, 'CWUser')
- wf.add_state('asleep', initial=True)
- self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
- {'wf': wf.eid, 'x': self.member.eid})
- self.commit()
- self.execute('DELETE X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
- {'wf': wf.eid, 'x': self.member.eid})
- self.member.cw_clear_all_caches()
- self.assertEqual(iworkflowable.state, 'asleep')# no change before commit
- self.commit()
- self.member.cw_clear_all_caches()
- self.assertEqual(iworkflowable.current_workflow.name, "default user workflow")
- self.assertEqual(iworkflowable.state, 'activated')
- self.assertEqual(parse_hist(iworkflowable.workflow_history),
- [('activated', 'deactivated', 'deactivate', None),
- ('deactivated', 'asleep', None, 'workflow changed to "CWUser"'),
- ('asleep', 'activated', None, 'workflow changed to "default user workflow"'),])
+ with self.admin_access.web_request() as req:
+ member = req.entity_from_eid(self.member_eid)
+ iworkflowable = member.cw_adapt_to('IWorkflowable')
+ iworkflowable.fire_transition('deactivate')
+ req.cnx.commit()
+ with self.admin_access.shell() as shell:
+ wf = add_wf(shell, 'CWUser')
+ wf.add_state('asleep', initial=True)
+ shell.rqlexec('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
+ {'wf': wf.eid, 'x': self.member_eid})
+ shell.commit()
+ with self.admin_access.web_request() as req:
+ req.execute('DELETE X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
+ {'wf': wf.eid, 'x': self.member_eid})
+ member = req.entity_from_eid(self.member_eid)
+ iworkflowable = member.cw_adapt_to('IWorkflowable')
+ self.assertEqual(iworkflowable.state, 'asleep')# no change before commit
+ req.cnx.commit()
+ member.cw_clear_all_caches()
+ self.assertEqual(iworkflowable.current_workflow.name, "default user workflow")
+ self.assertEqual(iworkflowable.state, 'activated')
+ self.assertEqual(parse_hist(iworkflowable.workflow_history),
+ [('activated', 'deactivated', 'deactivate', None),
+ ('deactivated', 'asleep', None, 'workflow changed to "CWUser"'),
+ ('asleep', 'activated', None, 'workflow changed to "default user workflow"'),])
class AutoTransitionTC(CubicWebTC):
def setup_custom_wf(self):
- wf = add_wf(self, 'CWUser')
- asleep = wf.add_state('asleep', initial=True)
- dead = wf.add_state('dead')
- wf.add_transition('rest', asleep, asleep)
- wf.add_transition('sick', asleep, dead, type=u'auto',
- conditions=({'expr': u'X surname "toto"',
- 'mainvars': u'X'},))
+ with self.admin_access.shell() as shell:
+ wf = add_wf(shell, 'CWUser')
+ asleep = wf.add_state('asleep', initial=True)
+ dead = wf.add_state('dead')
+ wf.add_transition('rest', asleep, asleep)
+ wf.add_transition('sick', asleep, dead, type=u'auto',
+ conditions=({'expr': u'X surname "toto"',
+ 'mainvars': u'X'},))
return wf
def test_auto_transition_fired(self):
wf = self.setup_custom_wf()
- req = self.request()
- user = self.create_user(req, 'member')
- iworkflowable = user.cw_adapt_to('IWorkflowable')
- self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
- {'wf': wf.eid, 'x': user.eid})
- self.commit()
- user.cw_clear_all_caches()
- self.assertEqual(iworkflowable.state, 'asleep')
- self.assertEqual([t.name for t in iworkflowable.possible_transitions()],
- ['rest'])
- iworkflowable.fire_transition('rest')
- self.commit()
- user.cw_clear_all_caches()
- self.assertEqual(iworkflowable.state, 'asleep')
- self.assertEqual([t.name for t in iworkflowable.possible_transitions()],
- ['rest'])
- self.assertEqual(parse_hist(iworkflowable.workflow_history),
- [('asleep', 'asleep', 'rest', None)])
- user.cw_set(surname=u'toto') # fulfill condition
- self.commit()
- iworkflowable.fire_transition('rest')
- self.commit()
- user.cw_clear_all_caches()
- self.assertEqual(iworkflowable.state, 'dead')
- self.assertEqual(parse_hist(iworkflowable.workflow_history),
- [('asleep', 'asleep', 'rest', None),
- ('asleep', 'asleep', 'rest', None),
- ('asleep', 'dead', 'sick', None),])
+ with self.admin_access.web_request() as req:
+ user = self.create_user(req, 'member')
+ iworkflowable = user.cw_adapt_to('IWorkflowable')
+ req.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
+ {'wf': wf.eid, 'x': user.eid})
+ req.cnx.commit()
+ user.cw_clear_all_caches()
+ self.assertEqual(iworkflowable.state, 'asleep')
+ self.assertEqual([t.name for t in iworkflowable.possible_transitions()],
+ ['rest'])
+ iworkflowable.fire_transition('rest')
+ req.cnx.commit()
+ user.cw_clear_all_caches()
+ self.assertEqual(iworkflowable.state, 'asleep')
+ self.assertEqual([t.name for t in iworkflowable.possible_transitions()],
+ ['rest'])
+ self.assertEqual(parse_hist(iworkflowable.workflow_history),
+ [('asleep', 'asleep', 'rest', None)])
+ user.cw_set(surname=u'toto') # fulfill condition
+ req.cnx.commit()
+ iworkflowable.fire_transition('rest')
+ req.cnx.commit()
+ user.cw_clear_all_caches()
+ self.assertEqual(iworkflowable.state, 'dead')
+ self.assertEqual(parse_hist(iworkflowable.workflow_history),
+ [('asleep', 'asleep', 'rest', None),
+ ('asleep', 'asleep', 'rest', None),
+ ('asleep', 'dead', 'sick', None),])
def test_auto_transition_custom_initial_state_fired(self):
wf = self.setup_custom_wf()
- req = self.request()
- user = self.create_user(req, 'member', surname=u'toto')
- self.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
+ with self.admin_access.web_request() as req:
+ user = self.create_user(req, 'member', surname=u'toto')
+ req.execute('SET X custom_workflow WF WHERE X eid %(x)s, WF eid %(wf)s',
{'wf': wf.eid, 'x': user.eid})
- self.commit()
- iworkflowable = user.cw_adapt_to('IWorkflowable')
- self.assertEqual(iworkflowable.state, 'dead')
+ req.cnx.commit()
+ iworkflowable = user.cw_adapt_to('IWorkflowable')
+ self.assertEqual(iworkflowable.state, 'dead')
def test_auto_transition_initial_state_fired(self):
- wf = self.execute('Any WF WHERE ET default_workflow WF, '
- 'ET name %(et)s', {'et': 'CWUser'}).get_entity(0, 0)
- dead = wf.add_state('dead')
- wf.add_transition('sick', wf.state_by_name('activated'), dead,
- type=u'auto', conditions=({'expr': u'X surname "toto"',
- 'mainvars': u'X'},))
- self.commit()
- req = self.request()
- user = self.create_user(req, 'member', surname=u'toto')
- self.commit()
- iworkflowable = user.cw_adapt_to('IWorkflowable')
- self.assertEqual(iworkflowable.state, 'dead')
+ with self.admin_access.web_request() as req:
+ wf = req.execute('Any WF WHERE ET default_workflow WF, '
+ 'ET name %(et)s', {'et': 'CWUser'}).get_entity(0, 0)
+ dead = wf.add_state('dead')
+ wf.add_transition('sick', wf.state_by_name('activated'), dead,
+ type=u'auto', conditions=({'expr': u'X surname "toto"',
+ 'mainvars': u'X'},))
+ req.cnx.commit()
+ with self.admin_access.web_request() as req:
+ user = self.create_user(req, 'member', surname=u'toto')
+ req.cnx.commit()
+ iworkflowable = user.cw_adapt_to('IWorkflowable')
+ self.assertEqual(iworkflowable.state, 'dead')
class WorkflowHooksTC(CubicWebTC):
def setUp(self):
CubicWebTC.setUp(self)
- req = self.request()
- self.wf = req.user.cw_adapt_to('IWorkflowable').current_workflow
- self.s_activated = self.wf.state_by_name('activated').eid
- self.s_deactivated = self.wf.state_by_name('deactivated').eid
- self.s_dummy = self.wf.add_state(u'dummy').eid
- self.wf.add_transition(u'dummy', (self.s_deactivated,), self.s_dummy)
- ueid = self.create_user(req, 'stduser', commit=False).eid
- # test initial state is set
- rset = self.execute('Any N WHERE S name N, X in_state S, X eid %(x)s',
- {'x' : ueid})
- self.assertFalse(rset, rset.rows)
- self.commit()
- initialstate = self.execute('Any N WHERE S name N, X in_state S, X eid %(x)s',
- {'x' : ueid})[0][0]
- self.assertEqual(initialstate, u'activated')
- # give access to users group on the user's wf transitions
- # so we can test wf enforcing on euser (managers don't have anymore this
- # enforcement
- self.execute('SET X require_group G '
- 'WHERE G name "users", X transition_of WF, WF eid %(wf)s',
- {'wf': self.wf.eid})
- self.commit()
+ with self.admin_access.web_request() as req:
+ self.wf = req.user.cw_adapt_to('IWorkflowable').current_workflow
+ self.s_activated = self.wf.state_by_name('activated').eid
+ self.s_deactivated = self.wf.state_by_name('deactivated').eid
+ self.s_dummy = self.wf.add_state(u'dummy').eid
+ self.wf.add_transition(u'dummy', (self.s_deactivated,), self.s_dummy)
+ ueid = self.create_user(req, 'stduser', commit=False).eid
+ # test initial state is set
+ rset = req.execute('Any N WHERE S name N, X in_state S, X eid %(x)s',
+ {'x' : ueid})
+ self.assertFalse(rset, rset.rows)
+ req.cnx.commit()
+ initialstate = req.execute('Any N WHERE S name N, X in_state S, X eid %(x)s',
+ {'x' : ueid})[0][0]
+ self.assertEqual(initialstate, u'activated')
+ # give access to users group on the user's wf transitions
+ # so we can test wf enforcing on euser (managers don't have anymore this
+ # enforcement
+ req.execute('SET X require_group G '
+ 'WHERE G name "users", X transition_of WF, WF eid %(wf)s',
+ {'wf': self.wf.eid})
+ req.cnx.commit()
# XXX currently, we've to rely on hooks to set initial state, or to use execute
# def test_initial_state(self):
@@ -603,42 +630,37 @@
return ' '.join(lmsg)
def test_transition_checking1(self):
- cnx = self.login('stduser')
- user = cnx.user(self.session)
- iworkflowable = user.cw_adapt_to('IWorkflowable')
- with self.assertRaises(ValidationError) as cm:
- iworkflowable.fire_transition('activate')
- self.assertEqual(self._cleanup_msg(cm.exception.errors['by_transition-subject']),
- u"transition isn't allowed from")
- cnx.close()
+ with self.new_access('stduser').repo_cnx() as cnx:
+ user = cnx.user
+ iworkflowable = user.cw_adapt_to('IWorkflowable')
+ with self.assertRaises(ValidationError) as cm:
+ iworkflowable.fire_transition('activate')
+ self.assertEqual(self._cleanup_msg(cm.exception.errors['by_transition-subject']),
+ u"transition isn't allowed from")
def test_transition_checking2(self):
- cnx = self.login('stduser')
- user = cnx.user(self.session)
- iworkflowable = user.cw_adapt_to('IWorkflowable')
- with self.assertRaises(ValidationError) as cm:
- iworkflowable.fire_transition('dummy')
- self.assertEqual(self._cleanup_msg(cm.exception.errors['by_transition-subject']),
- u"transition isn't allowed from")
- cnx.close()
+ with self.new_access('stduser').repo_cnx() as cnx:
+ user = cnx.user
+ iworkflowable = user.cw_adapt_to('IWorkflowable')
+ with self.assertRaises(ValidationError) as cm:
+ iworkflowable.fire_transition('dummy')
+ self.assertEqual(self._cleanup_msg(cm.exception.errors['by_transition-subject']),
+ u"transition isn't allowed from")
def test_transition_checking3(self):
- with self.login('stduser') as cnx:
- session = self.session
- user = self.user()
+ with self.new_access('stduser').repo_cnx() as cnx:
+ user = cnx.user
iworkflowable = user.cw_adapt_to('IWorkflowable')
iworkflowable.fire_transition('deactivate')
- session.commit()
- session.set_cnxset()
+ cnx.commit()
with self.assertRaises(ValidationError) as cm:
iworkflowable.fire_transition('deactivate')
self.assertEqual(self._cleanup_msg(cm.exception.errors['by_transition-subject']),
u"transition isn't allowed from")
- session.rollback()
- session.set_cnxset()
+ cnx.rollback()
# get back now
iworkflowable.fire_transition('activate')
- session.commit()
+ cnx.commit()
if __name__ == '__main__':
--- a/entity.py Fri May 23 18:35:13 2014 +0200
+++ b/entity.py Fri Jun 27 11:48:26 2014 +0200
@@ -551,14 +551,12 @@
def _cw_update_attr_cache(self, attrcache):
# if context is a repository session, don't consider dont-cache-attrs as
- # the instance already hold modified values and loosing them could
+ # the instance already holds modified values and loosing them could
# introduce severe problems
- get_set = partial(self._cw.get_shared_data, default=(), txdata=True,
- pop=True)
- uncached_attrs = set()
- uncached_attrs.update(get_set('%s.storage-special-process-attrs' % self.eid))
+ trdata = self._cw.transaction_data
+ uncached_attrs = trdata.get('%s.storage-special-process-attrs' % self.eid, set())
if self._cw.is_request:
- uncached_attrs.update(get_set('%s.dont-cache-attrs' % self.eid))
+ uncached_attrs.update(trdata.get('%s.dont-cache-attrs' % self.eid, set()))
for attr in uncached_attrs:
attrcache.pop(attr, None)
self.cw_attr_cache.pop(attr, None)
--- a/etwist/server.py Fri May 23 18:35:13 2014 +0200
+++ b/etwist/server.py Fri Jun 27 11:48:26 2014 +0200
@@ -47,14 +47,6 @@
# to wait all tasks to be finished for the server to be actually started
lc.start(interval, now=False)
-def host_prefixed_baseurl(baseurl, host):
- scheme, netloc, url, query, fragment = urlsplit(baseurl)
- netloc_domain = '.' + '.'.join(netloc.split('.')[-2:])
- if host.endswith(netloc_domain):
- netloc = host
- baseurl = urlunsplit((scheme, netloc, url, query, fragment))
- return baseurl
-
class CubicWebRootResource(resource.Resource):
def __init__(self, config, repo):
--- a/etwist/test/unittest_server.py Fri May 23 18:35:13 2014 +0200
+++ b/etwist/test/unittest_server.py Fri Jun 27 11:48:26 2014 +0200
@@ -19,41 +19,7 @@
import os, os.path as osp, glob
import urllib
-from cubicweb.devtools.testlib import CubicWebTC
from cubicweb.devtools.httptest import CubicWebServerTC
-from cubicweb.etwist.server import host_prefixed_baseurl
-
-
-class HostPrefixedBaseURLTC(CubicWebTC):
-
- def _check(self, baseurl, host, waited):
- self.assertEqual(host_prefixed_baseurl(baseurl, host), waited,
- 'baseurl %s called through host %s should be considered as %s'
- % (baseurl, host, waited))
-
- def test1(self):
- self._check('http://www.cubicweb.org/hg/', 'code.cubicweb.org',
- 'http://code.cubicweb.org/hg/')
-
- def test2(self):
- self._check('http://www.cubicweb.org/hg/', 'cubicweb.org',
- 'http://www.cubicweb.org/hg/')
-
- def test3(self):
- self._check('http://cubicweb.org/hg/', 'code.cubicweb.org',
- 'http://code.cubicweb.org/hg/')
-
- def test4(self):
- self._check('http://www.cubicweb.org/hg/', 'localhost',
- 'http://www.cubicweb.org/hg/')
-
- def test5(self):
- self._check('http://www.cubicweb.org/cubes/', 'hg.code.cubicweb.org',
- 'http://hg.code.cubicweb.org/cubes/')
-
- def test6(self):
- self._check('http://localhost:8080/hg/', 'code.cubicweb.org',
- 'http://localhost:8080/hg/')
class ETwistHTTPTC(CubicWebServerTC):
--- a/hooks/__init__.py Fri May 23 18:35:13 2014 +0200
+++ b/hooks/__init__.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -34,14 +34,11 @@
lifetime = timedelta(days=self.repo.config['keep-transaction-lifetime'])
def cleanup_old_transactions(repo=self.repo, lifetime=lifetime):
mindate = datetime.now() - lifetime
- session = repo.internal_session()
- try:
- session.system_sql(
+ with repo.internal_cnx() as cnx:
+ cnx.system_sql(
'DELETE FROM transactions WHERE tx_time < %(time)s',
{'time': mindate})
- session.commit()
- finally:
- session.close()
+ cnx.commit()
if self.repo.config['undo-enabled']:
self.repo.looping_task(60*60*24, cleanup_old_transactions,
self.repo)
@@ -60,13 +57,11 @@
or not repo.config.source_enabled(source)
or not source.config['synchronize']):
continue
- session = repo.internal_session(safe=True)
- try:
- source.pull_data(session)
- except Exception as exc:
- session.exception('while trying to update feed %s', source)
- finally:
- session.close()
+ with repo.internal_cnx() as cnx:
+ try:
+ source.pull_data(cnx)
+ except Exception as exc:
+ cnx.exception('while trying to update feed %s', source)
self.repo.looping_task(60, update_feeds, self.repo)
@@ -81,12 +76,9 @@
if (uri == 'system'
or not repo.config.source_enabled(source)):
continue
- session = repo.internal_session()
- try:
+ with repo.internal_cnx() as cnx:
mindate = datetime.now() - timedelta(seconds=source.config['logs-lifetime'])
- session.execute('DELETE CWDataImport X WHERE X start_timestamp < %(time)s',
+ cnx.execute('DELETE CWDataImport X WHERE X start_timestamp < %(time)s',
{'time': mindate})
- session.commit()
- finally:
- session.close()
+ cnx.commit()
self.repo.looping_task(60*60*24, expire_dataimports, self.repo)
--- a/hooks/metadata.py Fri May 23 18:35:13 2014 +0200
+++ b/hooks/metadata.py Fri Jun 27 11:48:26 2014 +0200
@@ -46,7 +46,7 @@
edited['creation_date'] = timestamp
if not edited.get('modification_date'):
edited['modification_date'] = timestamp
- if not self._cw.get_shared_data('do-not-insert-cwuri'):
+ if not self._cw.transaction_data.get('do-not-insert-cwuri'):
cwuri = u'%s%s' % (self._cw.base_url(), self.entity.eid)
edited.setdefault('cwuri', cwuri)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/multipart.py Fri Jun 27 11:48:26 2014 +0200
@@ -0,0 +1,413 @@
+# -*- coding: utf-8 -*-
+'''
+Parser for multipart/form-data
+==============================
+
+This module provides a parser for the multipart/form-data format. It can read
+from a file, a socket or a WSGI environment. The parser can be used to replace
+cgi.FieldStorage (without the bugs) and works with Python 2.5+ and 3.x (2to3).
+
+Licence (MIT)
+-------------
+
+ Copyright (c) 2010, Marcel Hellkamp.
+ Inspired by the Werkzeug library: http://werkzeug.pocoo.org/
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in
+ all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ THE SOFTWARE.
+
+'''
+
+__author__ = 'Marcel Hellkamp'
+__version__ = '0.1'
+__license__ = 'MIT'
+
+from tempfile import TemporaryFile
+from wsgiref.headers import Headers
+import re, sys
+try:
+ from urlparse import parse_qs
+except ImportError: # pragma: no cover (fallback for Python 2.5)
+ from cgi import parse_qs
+try:
+ from io import BytesIO
+except ImportError: # pragma: no cover (fallback for Python 2.5)
+ from StringIO import StringIO as BytesIO
+
+##############################################################################
+################################ Helper & Misc ################################
+##############################################################################
+# Some of these were copied from bottle: http://bottle.paws.de/
+
+try:
+ from collections import MutableMapping as DictMixin
+except ImportError: # pragma: no cover (fallback for Python 2.5)
+ from UserDict import DictMixin
+
+class MultiDict(DictMixin):
+ """ A dict that remembers old values for each key """
+ def __init__(self, *a, **k):
+ self.dict = dict()
+ for k, v in dict(*a, **k).iteritems():
+ self[k] = v
+
+ def __len__(self): return len(self.dict)
+ def __iter__(self): return iter(self.dict)
+ def __contains__(self, key): return key in self.dict
+ def __delitem__(self, key): del self.dict[key]
+ def keys(self): return self.dict.keys()
+ def __getitem__(self, key): return self.get(key, KeyError, -1)
+ def __setitem__(self, key, value): self.append(key, value)
+
+ def append(self, key, value): self.dict.setdefault(key, []).append(value)
+ def replace(self, key, value): self.dict[key] = [value]
+ def getall(self, key): return self.dict.get(key) or []
+
+ def get(self, key, default=None, index=-1):
+ if key not in self.dict and default != KeyError:
+ return [default][index]
+ return self.dict[key][index]
+
+ def iterallitems(self):
+ for key, values in self.dict.iteritems():
+ for value in values:
+ yield key, value
+
+def tob(data, enc='utf8'): # Convert strings to bytes (py2 and py3)
+ return data.encode(enc) if isinstance(data, unicode) else data
+
+def copy_file(stream, target, maxread=-1, buffer_size=2*16):
+ ''' Read from :stream and write to :target until :maxread or EOF. '''
+ size, read = 0, stream.read
+ while 1:
+ to_read = buffer_size if maxread < 0 else min(buffer_size, maxread-size)
+ part = read(to_read)
+ if not part: return size
+ target.write(part)
+ size += len(part)
+
+##############################################################################
+################################ Header Parser ################################
+##############################################################################
+
+_special = re.escape('()<>@,;:\\"/[]?={} \t')
+_re_special = re.compile('[%s]' % _special)
+_qstr = '"(?:\\\\.|[^"])*"' # Quoted string
+_value = '(?:[^%s]+|%s)' % (_special, _qstr) # Save or quoted string
+_option = '(?:;|^)\s*([^%s]+)\s*=\s*(%s)' % (_special, _value)
+_re_option = re.compile(_option) # key=value part of an Content-Type like header
+
+def header_quote(val):
+ if not _re_special.search(val):
+ return val
+ return '"' + val.replace('\\','\\\\').replace('"','\\"') + '"'
+
+def header_unquote(val, filename=False):
+ if val[0] == val[-1] == '"':
+ val = val[1:-1]
+ if val[1:3] == ':\\' or val[:2] == '\\\\':
+ val = val.split('\\')[-1] # fix ie6 bug: full path --> filename
+ return val.replace('\\\\','\\').replace('\\"','"')
+ return val
+
+def parse_options_header(header, options=None):
+ if ';' not in header:
+ return header.lower().strip(), {}
+ ctype, tail = header.split(';', 1)
+ options = options or {}
+ for match in _re_option.finditer(tail):
+ key = match.group(1).lower()
+ value = header_unquote(match.group(2), key=='filename')
+ options[key] = value
+ return ctype, options
+
+##############################################################################
+################################## Multipart ##################################
+##############################################################################
+
+
+class MultipartError(ValueError): pass
+
+
+class MultipartParser(object):
+
+ def __init__(self, stream, boundary, content_length=-1,
+ disk_limit=2**30, mem_limit=2**20, memfile_limit=2**18,
+ buffer_size=2**16, charset='latin1'):
+ ''' Parse a multipart/form-data byte stream. This object is an iterator
+ over the parts of the message.
+
+ :param stream: A file-like stream. Must implement ``.read(size)``.
+ :param boundary: The multipart boundary as a byte string.
+ :param content_length: The maximum number of bytes to read.
+ '''
+ self.stream, self.boundary = stream, boundary
+ self.content_length = content_length
+ self.disk_limit = disk_limit
+ self.memfile_limit = memfile_limit
+ self.mem_limit = min(mem_limit, self.disk_limit)
+ self.buffer_size = min(buffer_size, self.mem_limit)
+ self.charset = charset
+ if self.buffer_size - 6 < len(boundary): # "--boundary--\r\n"
+ raise MultipartError('Boundary does not fit into buffer_size.')
+ self._done = []
+ self._part_iter = None
+
+ def __iter__(self):
+ ''' Iterate over the parts of the multipart message. '''
+ if not self._part_iter:
+ self._part_iter = self._iterparse()
+ for part in self._done:
+ yield part
+ for part in self._part_iter:
+ self._done.append(part)
+ yield part
+
+ def parts(self):
+ ''' Returns a list with all parts of the multipart message. '''
+ return list(iter(self))
+
+ def get(self, name, default=None):
+ ''' Return the first part with that name or a default value (None). '''
+ for part in self:
+ if name == part.name:
+ return part
+ return default
+
+ def get_all(self, name):
+ ''' Return a list of parts with that name. '''
+ return [p for p in self if p.name == name]
+
+ def _lineiter(self):
+ ''' Iterate over a binary file-like object line by line. Each line is
+ returned as a (line, line_ending) tuple. If the line does not fit
+ into self.buffer_size, line_ending is empty and the rest of the line
+ is returned with the next iteration.
+ '''
+ read = self.stream.read
+ maxread, maxbuf = self.content_length, self.buffer_size
+ _bcrnl = tob('\r\n')
+ _bcr = _bcrnl[:1]
+ _bnl = _bcrnl[1:]
+ _bempty = _bcrnl[:0] # b'rn'[:0] -> b''
+ buffer = _bempty # buffer for the last (partial) line
+ while 1:
+ data = read(maxbuf if maxread < 0 else min(maxbuf, maxread))
+ maxread -= len(data)
+ lines = (buffer+data).splitlines(True)
+ len_first_line = len(lines[0])
+ # be sure that the first line does not become too big
+ if len_first_line > self.buffer_size:
+ # at the same time don't split a '\r\n' accidentally
+ if (len_first_line == self.buffer_size+1 and
+ lines[0].endswith(_bcrnl)):
+ splitpos = self.buffer_size - 1
+ else:
+ splitpos = self.buffer_size
+ lines[:1] = [lines[0][:splitpos],
+ lines[0][splitpos:]]
+ if data:
+ buffer = lines[-1]
+ lines = lines[:-1]
+ for line in lines:
+ if line.endswith(_bcrnl): yield line[:-2], _bcrnl
+ elif line.endswith(_bnl): yield line[:-1], _bnl
+ elif line.endswith(_bcr): yield line[:-1], _bcr
+ else: yield line, _bempty
+ if not data:
+ break
+
+ def _iterparse(self):
+ lines, line = self._lineiter(), ''
+ separator = tob('--') + tob(self.boundary)
+ terminator = tob('--') + tob(self.boundary) + tob('--')
+ # Consume first boundary. Ignore leading blank lines
+ for line, nl in lines:
+ if line: break
+ if line != separator:
+ raise MultipartError("Stream does not start with boundary")
+ # For each part in stream...
+ mem_used, disk_used = 0, 0 # Track used resources to prevent DoS
+ is_tail = False # True if the last line was incomplete (cutted)
+ opts = {'buffer_size': self.buffer_size,
+ 'memfile_limit': self.memfile_limit,
+ 'charset': self.charset}
+ part = MultipartPart(**opts)
+ for line, nl in lines:
+ if line == terminator and not is_tail:
+ part.file.seek(0)
+ yield part
+ break
+ elif line == separator and not is_tail:
+ if part.is_buffered(): mem_used += part.size
+ else: disk_used += part.size
+ part.file.seek(0)
+ yield part
+ part = MultipartPart(**opts)
+ else:
+ is_tail = not nl # The next line continues this one
+ part.feed(line, nl)
+ if part.is_buffered():
+ if part.size + mem_used > self.mem_limit:
+ raise MultipartError("Memory limit reached.")
+ elif part.size + disk_used > self.disk_limit:
+ raise MultipartError("Disk limit reached.")
+ if line != terminator:
+ raise MultipartError("Unexpected end of multipart stream.")
+
+
+class MultipartPart(object):
+
+ def __init__(self, buffer_size=2**16, memfile_limit=2**18, charset='latin1'):
+ self.headerlist = []
+ self.headers = None
+ self.file = False
+ self.size = 0
+ self._buf = tob('')
+ self.disposition, self.name, self.filename = None, None, None
+ self.content_type, self.charset = None, charset
+ self.memfile_limit = memfile_limit
+ self.buffer_size = buffer_size
+
+ def feed(self, line, nl=''):
+ if self.file:
+ return self.write_body(line, nl)
+ return self.write_header(line, nl)
+
+ def write_header(self, line, nl):
+ line = line.decode(self.charset or 'latin1')
+ if not nl: raise MultipartError('Unexpected end of line in header.')
+ if not line.strip(): # blank line -> end of header segment
+ self.finish_header()
+ elif line[0] in ' \t' and self.headerlist:
+ name, value = self.headerlist.pop()
+ self.headerlist.append((name, value+line.strip()))
+ else:
+ if ':' not in line:
+ raise MultipartError("Syntax error in header: No colon.")
+ name, value = line.split(':', 1)
+ self.headerlist.append((name.strip(), value.strip()))
+
+ def write_body(self, line, nl):
+ if not line and not nl: return # This does not even flush the buffer
+ self.size += len(line) + len(self._buf)
+ self.file.write(self._buf + line)
+ self._buf = nl
+ if self.content_length > 0 and self.size > self.content_length:
+ raise MultipartError('Size of body exceeds Content-Length header.')
+ if self.size > self.memfile_limit and isinstance(self.file, BytesIO):
+ # TODO: What about non-file uploads that exceed the memfile_limit?
+ self.file, old = TemporaryFile(mode='w+b'), self.file
+ old.seek(0)
+ copy_file(old, self.file, self.size, self.buffer_size)
+
+ def finish_header(self):
+ self.file = BytesIO()
+ self.headers = Headers(self.headerlist)
+ cdis = self.headers.get('Content-Disposition','')
+ ctype = self.headers.get('Content-Type','')
+ clen = self.headers.get('Content-Length','-1')
+ if not cdis:
+ raise MultipartError('Content-Disposition header is missing.')
+ self.disposition, self.options = parse_options_header(cdis)
+ self.name = self.options.get('name')
+ self.filename = self.options.get('filename')
+ self.content_type, options = parse_options_header(ctype)
+ self.charset = options.get('charset') or self.charset
+ self.content_length = int(self.headers.get('Content-Length','-1'))
+
+ def is_buffered(self):
+ ''' Return true if the data is fully buffered in memory.'''
+ return isinstance(self.file, BytesIO)
+
+ @property
+ def value(self):
+ ''' Data decoded with the specified charset '''
+ pos = self.file.tell()
+ self.file.seek(0)
+ val = self.file.read()
+ self.file.seek(pos)
+ return val.decode(self.charset)
+
+ def save_as(self, path):
+ fp = open(path, 'wb')
+ pos = self.file.tell()
+ try:
+ self.file.seek(0)
+ size = copy_file(self.file, fp)
+ finally:
+ self.file.seek(pos)
+ return size
+
+##############################################################################
+#################################### WSGI ####################################
+##############################################################################
+
+def parse_form_data(environ, charset='utf8', strict=False, **kw):
+ ''' Parse form data from an environ dict and return a (forms, files) tuple.
+ Both tuple values are dictionaries with the form-field name as a key
+ (unicode) and lists as values (multiple values per key are possible).
+ The forms-dictionary contains form-field values as unicode strings.
+ The files-dictionary contains :class:`MultipartPart` instances, either
+ because the form-field was a file-upload or the value is to big to fit
+ into memory limits.
+
+ :param environ: An WSGI environment dict.
+ :param charset: The charset to use if unsure. (default: utf8)
+ :param strict: If True, raise :exc:`MultipartError` on any parsing
+ errors. These are silently ignored by default.
+ '''
+
+ forms, files = MultiDict(), MultiDict()
+ try:
+ if environ.get('REQUEST_METHOD','GET').upper() not in ('POST', 'PUT'):
+ raise MultipartError("Request method other than POST or PUT.")
+ content_length = int(environ.get('CONTENT_LENGTH', '-1'))
+ content_type = environ.get('CONTENT_TYPE', '')
+ if not content_type:
+ raise MultipartError("Missing Content-Type header.")
+ content_type, options = parse_options_header(content_type)
+ stream = environ.get('wsgi.input') or BytesIO()
+ kw['charset'] = charset = options.get('charset', charset)
+ if content_type == 'multipart/form-data':
+ boundary = options.get('boundary','')
+ if not boundary:
+ raise MultipartError("No boundary for multipart/form-data.")
+ for part in MultipartParser(stream, boundary, content_length, **kw):
+ if part.filename or not part.is_buffered():
+ files[part.name] = part
+ else: # TODO: Big form-fields are in the files dict. really?
+ forms[part.name] = part.value
+ elif content_type in ('application/x-www-form-urlencoded',
+ 'application/x-url-encoded'):
+ mem_limit = kw.get('mem_limit', 2**20)
+ if content_length > mem_limit:
+ raise MultipartError("Request to big. Increase MAXMEM.")
+ data = stream.read(mem_limit).decode(charset)
+ if stream.read(1): # These is more that does not fit mem_limit
+ raise MultipartError("Request to big. Increase MAXMEM.")
+ data = parse_qs(data, keep_blank_values=True)
+ for key, values in data.iteritems():
+ for value in values:
+ forms[key] = value
+ else:
+ raise MultipartError("Unsupported content type.")
+ except MultipartError:
+ if strict: raise
+ return forms, files
+
--- a/repoapi.py Fri May 23 18:35:13 2014 +0200
+++ b/repoapi.py Fri Jun 27 11:48:26 2014 +0200
@@ -148,6 +148,7 @@
is_repo_in_memory = True # BC, always true
def __init__(self, session, autoclose_session=False):
+ super(ClientConnection, self).__init__(session.vreg)
self._session = session # XXX there is no real reason to keep the
# session around function still using it should
# be rewritten and migrated.
@@ -156,7 +157,6 @@
self._web_request = False
#: cache entities built during the connection
self._eid_cache = {}
- self.vreg = session.vreg
self._set_user(session.user)
self._autoclose_session = autoclose_session
@@ -247,6 +247,10 @@
get_shared_data = _srv_cnx_func('get_shared_data')
set_shared_data = _srv_cnx_func('set_shared_data')
+ @property
+ def transaction_data(self):
+ return self._cnx.transaction_data
+
# meta-data accessors ######################################################
@_open_only
--- a/req.py Fri May 23 18:35:13 2014 +0200
+++ b/req.py Fri Jun 27 11:48:26 2014 +0200
@@ -207,7 +207,7 @@
"""
parts = ['Any X WHERE X is %s' % etype]
varmaker = rqlvar_maker(defined='X')
- eschema = self.vreg.schema[etype]
+ eschema = self.vreg.schema.eschema(etype)
for attr, value in kwargs.items():
if isinstance(value, list) or isinstance(value, tuple):
raise NotImplementedError("List of values are not supported")
@@ -299,7 +299,7 @@
return u'%s%s?%s' % (base_url, path, self.build_url_params(**kwargs))
def build_url_params(self, **kwargs):
- """return encoded params to incorporate them in an URL"""
+ """return encoded params to incorporate them in a URL"""
args = []
for param, values in kwargs.iteritems():
if not isinstance(values, (list, tuple)):
@@ -365,7 +365,20 @@
@cached
def user_data(self):
- """returns a dictionary with this user's information"""
+ """returns a dictionary with this user's information.
+
+ The keys are :
+
+ login
+ The user login
+
+ name
+ The user name, returned by user.name()
+
+ email
+ The user principal email
+
+ """
userinfo = {}
user = self.user
userinfo['login'] = user.login
--- a/rset.py Fri May 23 18:35:13 2014 +0200
+++ b/rset.py Fri Jun 27 11:48:26 2014 +0200
@@ -112,9 +112,6 @@
"""returns the result set's size"""
return self.rowcount
- def __nonzero__(self):
- return self.rowcount
-
def __getitem__(self, i):
"""returns the ith element of the result set"""
return self.rows[i] #ResultSetRow(self.rows[i])
--- a/schema.py Fri May 23 18:35:13 2014 +0200
+++ b/schema.py Fri Jun 27 11:48:26 2014 +0200
@@ -563,7 +563,7 @@
PermissionMixIn.set_action_permissions = set_action_permissions
def has_local_role(self, action):
- """return true if the action *may* be granted localy (eg either rql
+ """return true if the action *may* be granted locally (eg either rql
expressions or the owners group are used in security definition)
XXX this method is only there since we don't know well how to deal with
@@ -585,7 +585,7 @@
PermissionMixIn.may_have_permission = may_have_permission
def has_perm(self, _cw, action, **kwargs):
- """return true if the action is granted globaly or localy"""
+ """return true if the action is granted globally or locally"""
try:
self.check_perm(_cw, action, **kwargs)
return True
--- a/server/__init__.py Fri May 23 18:35:13 2014 +0200
+++ b/server/__init__.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -197,7 +197,8 @@
{'u': user.eid, 'group': group})
return user
-def init_repository(config, interactive=True, drop=False, vreg=None):
+def init_repository(config, interactive=True, drop=False, vreg=None,
+ init_config=None):
"""initialise a repository database by creating tables add filling them
with the minimal set of entities (ie at least the schema, base groups and
a initial user)
@@ -215,6 +216,9 @@
config.cube_appobject_path = set(('hooks', 'entities'))
# only enable the system source at initialization time
repo = Repository(config, vreg=vreg)
+ if init_config is not None:
+ # further config initialization once it has been bootstrapped
+ init_config(config)
schema = repo.schema
sourcescfg = config.read_sources_file()
source = sourcescfg['system']
--- a/server/checkintegrity.py Fri May 23 18:35:13 2014 +0200
+++ b/server/checkintegrity.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -395,22 +395,21 @@
(no running cubicweb server needed)
"""
# yo, launch checks
- srvcnx = cnx._cnx
if checks:
eids_cache = {}
- with srvcnx.security_enabled(read=False, write=False): # ensure no read security
+ with cnx.security_enabled(read=False, write=False): # ensure no read security
for check in checks:
check_func = globals()['check_%s' % check]
- with srvcnx.ensure_cnx_set:
- check_func(repo.schema, srvcnx, eids_cache, fix=fix)
+ with cnx.ensure_cnx_set:
+ check_func(repo.schema, cnx, eids_cache, fix=fix)
if fix:
- srvcnx.commit()
+ cnx.commit()
else:
print
if not fix:
print 'WARNING: Diagnostic run, nothing has been corrected'
if reindex:
- srvcnx.rollback()
- with srvcnx.ensure_cnx_set:
- reindex_entities(repo.schema, srvcnx, withpb=withpb)
- srvcnx.commit()
+ cnx.rollback()
+ with cnx.ensure_cnx_set:
+ reindex_entities(repo.schema, cnx, withpb=withpb)
+ cnx.commit()
--- a/server/migractions.py Fri May 23 18:35:13 2014 +0200
+++ b/server/migractions.py Fri Jun 27 11:48:26 2014 +0200
@@ -150,7 +150,6 @@
sys.exit(0)
self.session = self.repo._get_session(self.cnx.sessionid)
self.session.keep_cnxset_mode('transaction')
- self.session.set_shared_data('rebuild-infered', False)
# overriden from base MigrationHelper ######################################
@@ -1064,12 +1063,19 @@
if commit:
self.commit()
- def cmd_rename_relation_type(self, oldname, newname, commit=True):
+ def cmd_rename_relation_type(self, oldname, newname, commit=True, force=False):
"""rename an existing relation
`oldname` is a string giving the name of the existing relation
`newname` is a string giving the name of the renamed relation
+
+ If `force` is True, proceed even if `oldname` still appears in the fs schema
"""
+ if oldname in self.fs_schema and not force:
+ if not self.confirm('Relation %s is still present in the filesystem schema,'
+ ' do you really want to drop it?' % oldname,
+ default='n'):
+ raise SystemExit(1)
self.cmd_add_relation_type(newname, commit=True)
self.rqlexec('SET X %s Y WHERE X %s Y' % (newname, oldname),
ask_confirm=self.verbosity>=2)
@@ -1342,17 +1348,23 @@
self.commit()
return entity
+ def cmd_find(self, etype, **kwargs):
+ """find entities of the given type and attribute values"""
+ return self.cnx.find(etype, **kwargs)
+
+ @deprecated("[3.19] use find(*args, **kwargs).entities() instead")
def cmd_find_entities(self, etype, **kwargs):
"""find entities of the given type and attribute values"""
- return self.cnx.find_entities(etype, **kwargs)
+ return self.cnx.find(etype, **kwargs).entities()
+ @deprecated("[3.19] use find(*args, **kwargs).one() instead")
def cmd_find_one_entity(self, etype, **kwargs):
"""find one entity of the given type and attribute values.
raise :exc:`cubicweb.req.FindEntityError` if can not return one and only
one entity.
"""
- return self.cnx.find_one_entity(etype, **kwargs)
+ return self.cnx.find(etype, **kwargs).one()
def cmd_update_etype_fti_weight(self, etype, weight):
if self.repo.system_source.dbdriver == 'postgres':
--- a/server/querier.py Fri May 23 18:35:13 2014 +0200
+++ b/server/querier.py Fri Jun 27 11:48:26 2014 +0200
@@ -505,7 +505,7 @@
"""execute a rql query, return resulting rows and their description in
a `ResultSet` object
- * `rql` should be an Unicode string or a plain ASCII string
+ * `rql` should be a Unicode string or a plain ASCII string
* `args` the optional parameters dictionary associated to the query
* `build_descr` is a boolean flag indicating if the description should
be built on select queries (if false, the description will be en empty
--- a/server/repository.py Fri May 23 18:35:13 2014 +0200
+++ b/server/repository.py Fri Jun 27 11:48:26 2014 +0200
@@ -652,7 +652,7 @@
return rset.rows
def connect(self, login, **kwargs):
- """open a connection for a given user
+ """open a session for a given user
raise `AuthenticationError` if the authentication failed
raise `ConnectionError` if we can't open a connection
@@ -684,7 +684,7 @@
txid=None):
"""execute a RQL query
- * rqlstring should be an unicode string or a plain ascii string
+ * rqlstring should be a unicode string or a plain ascii string
* args the optional parameters used in the query
* build_descr is a flag indicating if the description should be
built on select queries
@@ -746,6 +746,7 @@
"""
return self._get_session(sessionid, setcnxset=False).timestamp
+ @deprecated('[3.19] use session or transaction data')
def get_shared_data(self, sessionid, key, default=None, pop=False, txdata=False):
"""return value associated to key in the session's data dictionary or
session's transaction's data if `txdata` is true.
@@ -758,6 +759,7 @@
session = self._get_session(sessionid, setcnxset=False)
return session.get_shared_data(key, default, pop, txdata)
+ @deprecated('[3.19] use session or transaction data')
def set_shared_data(self, sessionid, key, value, txdata=False):
"""set value associated to `key` in shared data
@@ -909,24 +911,18 @@
@contextmanager
def internal_cnx(self):
- """return a Connection using internal user which have
- every rights on the repository. The `safe` argument is dropped. all
- hook are enabled by default.
+ """Context manager returning a Connection using internal user which have
+ every access rights on the repository.
- /!\ IN OPPOSITE OF THE OLDER INTERNAL_SESSION,
- /!\ INTERNAL CONNECTION HAVE ALL HOOKS ENABLED.
-
- This is to be used a context manager.
+ Beware that unlike the older :meth:`internal_session`, internal
+ connections have all hooks beside security enabled.
"""
with InternalSession(self) as session:
with session.new_cnx() as cnx:
- # equivalent to cnx.security_enabled(False, False) because
- # InternalSession gives full read access
- with cnx.allow_all_hooks_but('security'):
+ with cnx.security_enabled(read=False, write=False):
with cnx.ensure_cnx_set:
yield cnx
-
def _get_session(self, sessionid, setcnxset=False, txid=None,
checkshuttingdown=True):
"""return the session associated with the given session identifier"""
@@ -945,7 +941,7 @@
# * correspondance between eid and (type, source)
# * correspondance between eid and local id (i.e. specific to a given source)
- def type_and_source_from_eid(self, eid, session):
+ def type_and_source_from_eid(self, eid, cnx):
"""return a tuple `(type, extid, actual source uri)` for the entity of
the given `eid`
"""
@@ -956,8 +952,7 @@
try:
return self._type_source_cache[eid]
except KeyError:
- etype, extid, auri = self.system_source.eid_type_source(session,
- eid)
+ etype, extid, auri = self.system_source.eid_type_source(cnx, eid)
self._type_source_cache[eid] = (etype, extid, auri)
return etype, extid, auri
@@ -975,9 +970,9 @@
rqlcache.pop( ('Any X WHERE X eid %s' % eid,), None)
self.system_source.clear_eid_cache(eid, etype)
- def type_from_eid(self, eid, session):
+ def type_from_eid(self, eid, cnx):
"""return the type of the entity with id <eid>"""
- return self.type_and_source_from_eid(eid, session)[0]
+ return self.type_and_source_from_eid(eid, cnx)[0]
def querier_cache_key(self, session, rql, args, eidkeys):
cachekey = [rql]
@@ -1027,7 +1022,8 @@
cnx = cnx._cnx
except AttributeError:
pass
- eid = self.system_source.extid2eid(cnx, extid)
+ with cnx.ensure_cnx_set:
+ eid = self.system_source.extid2eid(cnx, extid)
if eid is not None:
self._extid_cache[extid] = eid
self._type_source_cache[eid] = (etype, extid, source.uri)
@@ -1035,33 +1031,37 @@
if not insert:
return
# no link between extid and eid, create one
- try:
- eid = self.system_source.create_eid(cnx)
- self._extid_cache[extid] = eid
- self._type_source_cache[eid] = (etype, extid, source.uri)
- entity = source.before_entity_insertion(
- cnx, extid, etype, eid, sourceparams)
- if source.should_call_hooks:
- # get back a copy of operation for later restore if necessary,
- # see below
- pending_operations = cnx.pending_operations[:]
- self.hm.call_hooks('before_add_entity', cnx, entity=entity)
- self.add_info(cnx, entity, source, extid)
- source.after_entity_insertion(cnx, extid, entity, sourceparams)
- if source.should_call_hooks:
- self.hm.call_hooks('after_add_entity', cnx, entity=entity)
- return eid
- except Exception:
- # XXX do some cleanup manually so that the transaction has a
- # chance to be commited, with simply this entity discarded
- self._extid_cache.pop(extid, None)
- self._type_source_cache.pop(eid, None)
- if 'entity' in locals():
- hook.CleanupDeletedEidsCacheOp.get_instance(cnx).add_data(entity.eid)
- self.system_source.delete_info_multi(cnx, [entity])
+ with cnx.ensure_cnx_set:
+ # write query, ensure connection's mode is 'write' so connections
+ # won't be released until commit/rollback
+ cnx.mode = 'write'
+ try:
+ eid = self.system_source.create_eid(cnx)
+ self._extid_cache[extid] = eid
+ self._type_source_cache[eid] = (etype, extid, source.uri)
+ entity = source.before_entity_insertion(
+ cnx, extid, etype, eid, sourceparams)
if source.should_call_hooks:
- cnx.pending_operations = pending_operations
- raise
+ # get back a copy of operation for later restore if
+ # necessary, see below
+ pending_operations = cnx.pending_operations[:]
+ self.hm.call_hooks('before_add_entity', cnx, entity=entity)
+ self.add_info(cnx, entity, source, extid)
+ source.after_entity_insertion(cnx, extid, entity, sourceparams)
+ if source.should_call_hooks:
+ self.hm.call_hooks('after_add_entity', cnx, entity=entity)
+ return eid
+ except Exception:
+ # XXX do some cleanup manually so that the transaction has a
+ # chance to be commited, with simply this entity discarded
+ self._extid_cache.pop(extid, None)
+ self._type_source_cache.pop(eid, None)
+ if 'entity' in locals():
+ hook.CleanupDeletedEidsCacheOp.get_instance(cnx).add_data(entity.eid)
+ self.system_source.delete_info_multi(cnx, [entity])
+ if source.should_call_hooks:
+ cnx.pending_operations = pending_operations
+ raise
def add_info(self, session, entity, source, extid=None):
"""add type and source info for an eid into the system table,
@@ -1263,11 +1263,7 @@
if relcache is not None:
cnx.update_rel_cache_del(entity.eid, attr, prevvalue)
del_existing_rel_if_needed(cnx, entity.eid, attr, value)
- if relcache is not None:
- cnx.update_rel_cache_add(entity.eid, attr, value)
- else:
- entity.cw_set_relation_cache(attr, 'subject',
- cnx.eid_rset(value))
+ cnx.update_rel_cache_add(entity.eid, attr, value)
hm.call_hooks('after_add_relation', cnx,
eidfrom=entity.eid, rtype=attr, eidto=value)
finally:
--- a/server/serverctl.py Fri May 23 18:35:13 2014 +0200
+++ b/server/serverctl.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -459,7 +459,7 @@
init_repository(config, drop=self.config.drop)
if not self.config.automatic:
while ASK.confirm('Enter another source ?', default_is_yes=False):
- CWCTL.run(['add-source', '--config-level',
+ CWCTL.run(['source-add', '--config-level',
str(self.config.config_level), config.appid])
@@ -469,7 +469,7 @@
<instance>
the identifier of the instance to initialize.
"""
- name = 'add-source'
+ name = 'source-add'
arguments = '<instance>'
min_args = max_args = 1
options = (
@@ -979,10 +979,12 @@
appid = args[0]
config = ServerConfiguration.config_for(appid)
config.repairing = self.config.force
- repo, cnx = repo_cnx(config)
- with cnx:
+ repo, _cnx = repo_cnx(config)
+ with repo.internal_cnx() as cnx:
check(repo, cnx,
- self.config.checks, self.config.reindex, self.config.autofix)
+ self.config.checks,
+ self.config.reindex,
+ self.config.autofix)
class RebuildFTICommand(Command):
--- a/server/session.py Fri May 23 18:35:13 2014 +0200
+++ b/server/session.py Fri Jun 27 11:48:26 2014 +0200
@@ -23,7 +23,6 @@
from time import time
from uuid import uuid4
from warnings import warn
-import json
import functools
from contextlib import contextmanager
@@ -518,6 +517,7 @@
# other session utility
if session.user.login == '__internal_manager__':
self.user = session.user
+ self.set_language(self.user.prefered_language())
else:
self._set_user(session.user)
@@ -549,6 +549,7 @@
return self._rewriter
@_open_only
+ @deprecated('[3.19] use session or transaction data')
def get_shared_data(self, key, default=None, pop=False, txdata=False):
"""return value associated to `key` in session data"""
if txdata:
@@ -561,6 +562,7 @@
return data.get(key, default)
@_open_only
+ @deprecated('[3.19] use session or transaction data')
def set_shared_data(self, key, value, txdata=False):
"""set value associated to `key` in session data"""
if txdata:
@@ -1154,18 +1156,9 @@
@_with_cnx_set
@_open_only
def call_service(self, regid, **kwargs):
- json.dumps(kwargs) # This line ensure that people use serialisable
- # argument for call service. this is very important
- # to enforce that from start to make sure RPC
- # version is available.
- self.info('calling service %s', regid)
+ self.debug('calling service %s', regid)
service = self.vreg['services'].select(regid, self, **kwargs)
- result = service.call(**kwargs)
- json.dumps(result) # This line ensure that service have serialisable
- # output. this is very important to enforce that
- # from start to make sure RPC version is
- # available.
- return result
+ return service.call(**kwargs)
@_with_cnx_set
@_open_only
@@ -1567,6 +1560,7 @@
# shared data handling ###################################################
+ @deprecated('[3.19] use session or transaction data')
def get_shared_data(self, key, default=None, pop=False, txdata=False):
"""return value associated to `key` in session data"""
if txdata:
@@ -1578,6 +1572,7 @@
else:
return data.get(key, default)
+ @deprecated('[3.19] use session or transaction data')
def set_shared_data(self, key, value, txdata=False):
"""set value associated to `key` in session data"""
if txdata:
--- a/server/sources/datafeed.py Fri May 23 18:35:13 2014 +0200
+++ b/server/sources/datafeed.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,4 +1,4 @@
-# copyright 2010-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2010-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -143,67 +143,64 @@
return False
return datetime.utcnow() < (self.latest_retrieval + self.synchro_interval)
- def update_latest_retrieval(self, session):
+ def update_latest_retrieval(self, cnx):
self.latest_retrieval = datetime.utcnow()
- session.set_cnxset()
- session.execute('SET X latest_retrieval %(date)s WHERE X eid %(x)s',
+ cnx.execute('SET X latest_retrieval %(date)s WHERE X eid %(x)s',
{'x': self.eid, 'date': self.latest_retrieval})
- session.commit()
+ cnx.commit()
- def acquire_synchronization_lock(self, session):
+ def acquire_synchronization_lock(self, cnx):
# XXX race condition until WHERE of SET queries is executed using
# 'SELECT FOR UPDATE'
now = datetime.utcnow()
- session.set_cnxset()
- if not session.execute(
+ if not cnx.execute(
'SET X in_synchronization %(now)s WHERE X eid %(x)s, '
'X in_synchronization NULL OR X in_synchronization < %(maxdt)s',
{'x': self.eid, 'now': now, 'maxdt': now - self.max_lock_lifetime}):
self.error('concurrent synchronization detected, skip pull')
- session.commit()
+ cnx.commit()
return False
- session.commit()
+ cnx.commit()
return True
- def release_synchronization_lock(self, session):
- session.set_cnxset()
- session.execute('SET X in_synchronization NULL WHERE X eid %(x)s',
+ def release_synchronization_lock(self, cnx):
+ cnx.execute('SET X in_synchronization NULL WHERE X eid %(x)s',
{'x': self.eid})
- session.commit()
+ cnx.commit()
- def pull_data(self, session, force=False, raise_on_error=False):
+ def pull_data(self, cnx, force=False, raise_on_error=False):
"""Launch synchronization of the source if needed.
This method is responsible to handle commit/rollback on the given
- session.
+ connection.
"""
if not force and self.fresh():
return {}
- if not self.acquire_synchronization_lock(session):
+ if not self.acquire_synchronization_lock(cnx):
return {}
try:
- with session.transaction(free_cnxset=False):
- return self._pull_data(session, force, raise_on_error)
+ return self._pull_data(cnx, force, raise_on_error)
finally:
- self.release_synchronization_lock(session)
+ cnx.rollback() # rollback first in case there is some dirty
+ # transaction remaining
+ self.release_synchronization_lock(cnx)
- def _pull_data(self, session, force=False, raise_on_error=False):
- importlog = self.init_import_log(session)
- myuris = self.source_cwuris(session)
- parser = self._get_parser(session, sourceuris=myuris, import_log=importlog)
+ def _pull_data(self, cnx, force=False, raise_on_error=False):
+ importlog = self.init_import_log(cnx)
+ myuris = self.source_cwuris(cnx)
+ parser = self._get_parser(cnx, sourceuris=myuris, import_log=importlog)
if self.process_urls(parser, self.urls, raise_on_error):
self.warning("some error occurred, don't attempt to delete entities")
else:
- parser.handle_deletion(self.config, session, myuris)
- self.update_latest_retrieval(session)
+ parser.handle_deletion(self.config, cnx, myuris)
+ self.update_latest_retrieval(cnx)
stats = parser.stats
if stats.get('created'):
importlog.record_info('added %s entities' % len(stats['created']))
if stats.get('updated'):
importlog.record_info('updated %s entities' % len(stats['updated']))
- session.set_cnxset()
- importlog.write_log(session, end_timestamp=self.latest_retrieval)
- session.commit()
+ importlog.write_log(cnx, end_timestamp=self.latest_retrieval)
+ cnx.commit()
return stats
def process_urls(self, parser, urls, raise_on_error=False):
@@ -416,11 +413,9 @@
# Check whether self._cw is a session or a connection
if getattr(self._cw, 'commit', None) is not None:
commit = self._cw.commit
- set_cnxset = self._cw.set_cnxset
rollback = self._cw.rollback
else:
commit = self._cw.cnx.commit
- set_cnxset = lambda: None
rollback = self._cw.cnx.rollback
for args in parsed:
try:
@@ -428,14 +423,12 @@
# commit+set_cnxset instead of commit(free_cnxset=False) to let
# other a chance to get our connections set
commit()
- set_cnxset()
except ValidationError as exc:
if raise_on_error:
raise
self.source.error('Skipping %s because of validation error %s'
% (args, exc))
rollback()
- set_cnxset()
error = True
return error
--- a/server/sources/native.py Fri May 23 18:35:13 2014 +0200
+++ b/server/sources/native.py Fri Jun 27 11:48:26 2014 +0200
@@ -715,7 +715,7 @@
# instance
print 'exec', query, args, getattr(cnx, '_cnx', cnx)
try:
- # str(query) to avoid error if it's an unicode string
+ # str(query) to avoid error if it's a unicode string
cursor.execute(str(query), args)
except Exception as ex:
if self.repo.config.mode != 'test':
@@ -762,7 +762,7 @@
print 'execmany', query, 'with', len(args), 'arguments'
cursor = cnx.cnxset.cu
try:
- # str(query) to avoid error if it's an unicode string
+ # str(query) to avoid error if it's a unicode string
cursor.executemany(str(query), args)
except Exception as ex:
if self.repo.config.mode != 'test':
@@ -894,32 +894,32 @@
def add_info(self, cnx, entity, source, extid):
"""add type and source info for an eid into the system table"""
- with cnx.ensure_cnx_set:
- # begin by inserting eid/type/source/extid into the entities table
- if extid is not None:
- assert isinstance(extid, str)
- extid = b64encode(extid)
- attrs = {'type': entity.cw_etype, 'eid': entity.eid, 'extid': extid,
- 'asource': source.uri}
- self._handle_insert_entity_sql(cnx, self.sqlgen.insert('entities', attrs), attrs)
- # insert core relations: is, is_instance_of and cw_source
- try:
- self._handle_is_relation_sql(cnx, 'INSERT INTO is_relation(eid_from,eid_to) VALUES (%s,%s)',
- (entity.eid, eschema_eid(cnx, entity.e_schema)))
- except IndexError:
- # during schema serialization, skip
- pass
- else:
- for eschema in entity.e_schema.ancestors() + [entity.e_schema]:
- self._handle_is_relation_sql(cnx,
- 'INSERT INTO is_instance_of_relation(eid_from,eid_to) VALUES (%s,%s)',
- (entity.eid, eschema_eid(cnx, eschema)))
- if 'CWSource' in self.schema and source.eid is not None: # else, cw < 3.10
- self._handle_is_relation_sql(cnx, 'INSERT INTO cw_source_relation(eid_from,eid_to) VALUES (%s,%s)',
- (entity.eid, source.eid))
- # now we can update the full text index
- if self.do_fti and self.need_fti_indexation(entity.cw_etype):
- self.index_entity(cnx, entity=entity)
+ assert cnx.cnxset is not None
+ # begin by inserting eid/type/source/extid into the entities table
+ if extid is not None:
+ assert isinstance(extid, str)
+ extid = b64encode(extid)
+ attrs = {'type': entity.cw_etype, 'eid': entity.eid, 'extid': extid,
+ 'asource': source.uri}
+ self._handle_insert_entity_sql(cnx, self.sqlgen.insert('entities', attrs), attrs)
+ # insert core relations: is, is_instance_of and cw_source
+ try:
+ self._handle_is_relation_sql(cnx, 'INSERT INTO is_relation(eid_from,eid_to) VALUES (%s,%s)',
+ (entity.eid, eschema_eid(cnx, entity.e_schema)))
+ except IndexError:
+ # during schema serialization, skip
+ pass
+ else:
+ for eschema in entity.e_schema.ancestors() + [entity.e_schema]:
+ self._handle_is_relation_sql(cnx,
+ 'INSERT INTO is_instance_of_relation(eid_from,eid_to) VALUES (%s,%s)',
+ (entity.eid, eschema_eid(cnx, eschema)))
+ if 'CWSource' in self.schema and source.eid is not None: # else, cw < 3.10
+ self._handle_is_relation_sql(cnx, 'INSERT INTO cw_source_relation(eid_from,eid_to) VALUES (%s,%s)',
+ (entity.eid, source.eid))
+ # now we can update the full text index
+ if self.do_fti and self.need_fti_indexation(entity.cw_etype):
+ self.index_entity(cnx, entity=entity)
def update_info(self, cnx, entity, need_fti_update):
"""mark entity as being modified, fulltext reindex if needed"""
--- a/server/test/unittest_checkintegrity.py Fri May 23 18:35:13 2014 +0200
+++ b/server/test/unittest_checkintegrity.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -25,41 +25,40 @@
from cubicweb.server.checkintegrity import check, reindex_entities
class CheckIntegrityTC(TestCase):
+
def setUp(self):
handler = get_test_db_handler(TestServerConfiguration(apphome=self.datadir))
handler.build_db_cache()
- self.repo, self.cnx = handler.get_repo_and_cnx()
- session = self.repo._get_session(self.cnx.sessionid, setcnxset=True)
- self.session = session
- self.execute = session.execute
+ self.repo, _cnx = handler.get_repo_and_cnx()
sys.stderr = sys.stdout = StringIO()
def tearDown(self):
sys.stderr = sys.__stderr__
sys.stdout = sys.__stdout__
- self.cnx.close()
self.repo.shutdown()
def test_checks(self):
- with self.cnx:
- check(self.repo, self.cnx, ('entities', 'relations', 'text_index', 'metadata'),
+ with self.repo.internal_cnx() as cnx:
+ check(self.repo, cnx, ('entities', 'relations', 'text_index', 'metadata'),
reindex=False, fix=True, withpb=False)
def test_reindex_all(self):
- self.execute('INSERT Personne X: X nom "toto", X prenom "tutu"')
- self.session.commit(False)
- self.assertTrue(self.execute('Any X WHERE X has_text "tutu"'))
- reindex_entities(self.repo.schema, self.session, withpb=False)
- self.assertTrue(self.execute('Any X WHERE X has_text "tutu"'))
+ with self.repo.internal_cnx() as cnx:
+ cnx.execute('INSERT Personne X: X nom "toto", X prenom "tutu"')
+ cnx.commit()
+ self.assertTrue(cnx.execute('Any X WHERE X has_text "tutu"'))
+ reindex_entities(self.repo.schema, cnx, withpb=False)
+ self.assertTrue(cnx.execute('Any X WHERE X has_text "tutu"'))
def test_reindex_etype(self):
- self.execute('INSERT Personne X: X nom "toto", X prenom "tutu"')
- self.execute('INSERT Affaire X: X ref "toto"')
- self.session.commit(False)
- reindex_entities(self.repo.schema, self.session, withpb=False,
- etypes=('Personne',))
- self.assertTrue(self.execute('Any X WHERE X has_text "tutu"'))
- self.assertTrue(self.execute('Any X WHERE X has_text "toto"'))
+ with self.repo.internal_cnx() as cnx:
+ cnx.execute('INSERT Personne X: X nom "toto", X prenom "tutu"')
+ cnx.execute('INSERT Affaire X: X ref "toto"')
+ cnx.commit()
+ reindex_entities(self.repo.schema, cnx, withpb=False,
+ etypes=('Personne',))
+ self.assertTrue(cnx.execute('Any X WHERE X has_text "tutu"'))
+ self.assertTrue(cnx.execute('Any X WHERE X has_text "toto"'))
if __name__ == '__main__':
unittest_main()
--- a/server/test/unittest_datafeed.py Fri May 23 18:35:13 2014 +0200
+++ b/server/test/unittest_datafeed.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,4 +1,4 @@
-# copyright 2011-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2011-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -24,9 +24,11 @@
class DataFeedTC(CubicWebTC):
def setup_database(self):
- self.request().create_entity('CWSource', name=u'myfeed', type=u'datafeed',
- parser=u'testparser', url=u'ignored',
- config=u'synchronization-interval=1min')
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.create_entity('CWSource', name=u'myfeed', type=u'datafeed',
+ parser=u'testparser', url=u'ignored',
+ config=u'synchronization-interval=1min')
+ cnx.commit()
def test(self):
self.assertIn('myfeed', self.repo.sources_by_uri)
@@ -48,47 +50,45 @@
entity.cw_edited.update(sourceparams['item'])
with self.temporary_appobjects(AParser):
- session = self.repo.internal_session()
- stats = dfsource.pull_data(session, force=True)
- self.commit()
- # test import stats
- self.assertEqual(sorted(stats), ['checked', 'created', 'updated'])
- self.assertEqual(len(stats['created']), 1)
- entity = self.execute('Card X').get_entity(0, 0)
- self.assertIn(entity.eid, stats['created'])
- self.assertEqual(stats['updated'], set())
- # test imported entities
- self.assertEqual(entity.title, 'cubicweb.org')
- self.assertEqual(entity.content, 'the cw web site')
- self.assertEqual(entity.cwuri, 'http://www.cubicweb.org/')
- self.assertEqual(entity.cw_source[0].name, 'myfeed')
- self.assertEqual(entity.cw_metainformation(),
- {'type': 'Card',
- 'source': {'uri': 'myfeed', 'type': 'datafeed', 'use-cwuri-as-url': True},
- 'extid': 'http://www.cubicweb.org/'}
- )
- self.assertEqual(entity.absolute_url(), 'http://www.cubicweb.org/')
- # test repo cache keys
- self.assertEqual(self.repo._type_source_cache[entity.eid],
- ('Card', 'http://www.cubicweb.org/', 'myfeed'))
- self.assertEqual(self.repo._extid_cache['http://www.cubicweb.org/'],
- entity.eid)
- # test repull
- session.set_cnxset()
- stats = dfsource.pull_data(session, force=True)
- self.assertEqual(stats['created'], set())
- self.assertEqual(stats['updated'], set((entity.eid,)))
- # test repull with caches reseted
- self.repo._type_source_cache.clear()
- self.repo._extid_cache.clear()
- session.set_cnxset()
- stats = dfsource.pull_data(session, force=True)
- self.assertEqual(stats['created'], set())
- self.assertEqual(stats['updated'], set((entity.eid,)))
- self.assertEqual(self.repo._type_source_cache[entity.eid],
- ('Card', 'http://www.cubicweb.org/', 'myfeed'))
- self.assertEqual(self.repo._extid_cache['http://www.cubicweb.org/'],
- entity.eid)
+ with self.repo.internal_cnx() as cnx:
+ stats = dfsource.pull_data(cnx, force=True)
+ cnx.commit()
+ # test import stats
+ self.assertEqual(sorted(stats), ['checked', 'created', 'updated'])
+ self.assertEqual(len(stats['created']), 1)
+ entity = cnx.execute('Card X').get_entity(0, 0)
+ self.assertIn(entity.eid, stats['created'])
+ self.assertEqual(stats['updated'], set())
+ # test imported entities
+ self.assertEqual(entity.title, 'cubicweb.org')
+ self.assertEqual(entity.content, 'the cw web site')
+ self.assertEqual(entity.cwuri, 'http://www.cubicweb.org/')
+ self.assertEqual(entity.cw_source[0].name, 'myfeed')
+ self.assertEqual(entity.cw_metainformation(),
+ {'type': 'Card',
+ 'source': {'uri': 'myfeed', 'type': 'datafeed', 'use-cwuri-as-url': True},
+ 'extid': 'http://www.cubicweb.org/'}
+ )
+ self.assertEqual(entity.absolute_url(), 'http://www.cubicweb.org/')
+ # test repo cache keys
+ self.assertEqual(self.repo._type_source_cache[entity.eid],
+ ('Card', 'http://www.cubicweb.org/', 'myfeed'))
+ self.assertEqual(self.repo._extid_cache['http://www.cubicweb.org/'],
+ entity.eid)
+ # test repull
+ stats = dfsource.pull_data(cnx, force=True)
+ self.assertEqual(stats['created'], set())
+ self.assertEqual(stats['updated'], set((entity.eid,)))
+ # test repull with caches reseted
+ self.repo._type_source_cache.clear()
+ self.repo._extid_cache.clear()
+ stats = dfsource.pull_data(cnx, force=True)
+ self.assertEqual(stats['created'], set())
+ self.assertEqual(stats['updated'], set((entity.eid,)))
+ self.assertEqual(self.repo._type_source_cache[entity.eid],
+ ('Card', 'http://www.cubicweb.org/', 'myfeed'))
+ self.assertEqual(self.repo._extid_cache['http://www.cubicweb.org/'],
+ entity.eid)
self.assertEqual(dfsource.source_cwuris(self.session),
{'http://www.cubicweb.org/': (entity.eid, 'Card')}
@@ -97,28 +97,27 @@
self.assertTrue(dfsource.fresh())
# test_rename_source
- req = self.request()
- req.execute('SET S name "myrenamedfeed" WHERE S is CWSource, S name "myfeed"')
- self.commit()
- entity = self.execute('Card X').get_entity(0, 0)
- self.assertEqual(entity.cwuri, 'http://www.cubicweb.org/')
- self.assertEqual(entity.cw_source[0].name, 'myrenamedfeed')
- self.assertEqual(entity.cw_metainformation(),
- {'type': 'Card',
- 'source': {'uri': 'myrenamedfeed', 'type': 'datafeed', 'use-cwuri-as-url': True},
- 'extid': 'http://www.cubicweb.org/'}
- )
- self.assertEqual(self.repo._type_source_cache[entity.eid],
- ('Card', 'http://www.cubicweb.org/', 'myrenamedfeed'))
- self.assertEqual(self.repo._extid_cache['http://www.cubicweb.org/'],
- entity.eid)
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.execute('SET S name "myrenamedfeed" WHERE S is CWSource, S name "myfeed"')
+ cnx.commit()
+ entity = cnx.execute('Card X').get_entity(0, 0)
+ self.assertEqual(entity.cwuri, 'http://www.cubicweb.org/')
+ self.assertEqual(entity.cw_source[0].name, 'myrenamedfeed')
+ self.assertEqual(entity.cw_metainformation(),
+ {'type': 'Card',
+ 'source': {'uri': 'myrenamedfeed', 'type': 'datafeed', 'use-cwuri-as-url': True},
+ 'extid': 'http://www.cubicweb.org/'}
+ )
+ self.assertEqual(self.repo._type_source_cache[entity.eid],
+ ('Card', 'http://www.cubicweb.org/', 'myrenamedfeed'))
+ self.assertEqual(self.repo._extid_cache['http://www.cubicweb.org/'],
+ entity.eid)
- # test_delete_source
- req = self.request()
- req.execute('DELETE CWSource S WHERE S name "myrenamedfeed"')
- self.commit()
- self.assertFalse(self.execute('Card X WHERE X title "cubicweb.org"'))
- self.assertFalse(self.execute('Any X WHERE X has_text "cubicweb.org"'))
+ # test_delete_source
+ cnx.execute('DELETE CWSource S WHERE S name "myrenamedfeed"')
+ cnx.commit()
+ self.assertFalse(cnx.execute('Card X WHERE X title "cubicweb.org"'))
+ self.assertFalse(cnx.execute('Any X WHERE X has_text "cubicweb.org"'))
if __name__ == '__main__':
from logilab.common.testlib import unittest_main
--- a/server/test/unittest_hook.py Fri May 23 18:35:13 2014 +0200
+++ b/server/test/unittest_hook.py Fri Jun 27 11:48:26 2014 +0200
@@ -25,46 +25,35 @@
from cubicweb.server import hook
from cubicweb.hooks import integrity, syncschema
-def clean_session_ops(func):
- def wrapper(self, *args, **kwargs):
- try:
- return func(self, *args, **kwargs)
- finally:
- self.session.pending_operations[:] = []
- return wrapper
-
class OperationsTC(CubicWebTC):
def setUp(self):
CubicWebTC.setUp(self)
self.hm = self.repo.hm
- @clean_session_ops
def test_late_operation(self):
- session = self.session
- l1 = hook.LateOperation(session)
- l2 = hook.LateOperation(session)
- l3 = hook.Operation(session)
- self.assertEqual(session.pending_operations, [l3, l1, l2])
+ with self.admin_access.repo_cnx() as cnx:
+ l1 = hook.LateOperation(cnx)
+ l2 = hook.LateOperation(cnx)
+ l3 = hook.Operation(cnx)
+ self.assertEqual(cnx.pending_operations, [l3, l1, l2])
- @clean_session_ops
def test_single_last_operation(self):
- session = self.session
- l0 = hook.SingleLastOperation(session)
- l1 = hook.LateOperation(session)
- l2 = hook.LateOperation(session)
- l3 = hook.Operation(session)
- self.assertEqual(session.pending_operations, [l3, l1, l2, l0])
- l4 = hook.SingleLastOperation(session)
- self.assertEqual(session.pending_operations, [l3, l1, l2, l4])
+ with self.admin_access.repo_cnx() as cnx:
+ l0 = hook.SingleLastOperation(cnx)
+ l1 = hook.LateOperation(cnx)
+ l2 = hook.LateOperation(cnx)
+ l3 = hook.Operation(cnx)
+ self.assertEqual(cnx.pending_operations, [l3, l1, l2, l0])
+ l4 = hook.SingleLastOperation(cnx)
+ self.assertEqual(cnx.pending_operations, [l3, l1, l2, l4])
- @clean_session_ops
def test_global_operation_order(self):
- session = self.session
- op1 = syncschema.RDefDelOp(session)
- op2 = integrity._CheckORelationOp(session)
- op3 = syncschema.MemSchemaNotifyChanges(session)
- self.assertEqual([op1, op2, op3], session.pending_operations)
+ with self.admin_access.repo_cnx() as cnx:
+ op1 = syncschema.RDefDelOp(cnx)
+ op2 = integrity._CheckORelationOp(cnx)
+ op3 = syncschema.MemSchemaNotifyChanges(cnx)
+ self.assertEqual([op1, op2, op3], cnx.pending_operations)
class HookCalled(Exception): pass
@@ -139,9 +128,10 @@
def test_session_open_close(self):
import hooks # cubicweb/server/test/data/hooks.py
- cnx = self.login('anon')
- self.assertEqual(hooks.CALLED_EVENTS['session_open'], 'anon')
- cnx.close()
+ anonaccess = self.new_access('anon')
+ with anonaccess.repo_cnx() as cnx:
+ self.assertEqual(hooks.CALLED_EVENTS['session_open'], 'anon')
+ anonaccess.close()
self.assertEqual(hooks.CALLED_EVENTS['session_close'], 'anon')
--- a/server/test/unittest_ldapsource.py Fri May 23 18:35:13 2014 +0200
+++ b/server/test/unittest_ldapsource.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -31,7 +31,6 @@
from cubicweb.devtools.testlib import CubicWebTC
from cubicweb.devtools.repotest import RQLGeneratorTC
from cubicweb.devtools.httptest import get_available_port
-from cubicweb.devtools import get_test_db_handler
CONFIG_LDAPFEED = u'''
@@ -123,32 +122,29 @@
pass
@classmethod
- def pre_setup_database(cls, session, config):
- session.create_entity('CWSource', name=u'ldap', type=u'ldapfeed', parser=u'ldapfeed',
- url=URL, config=CONFIG_LDAPFEED)
+ def pre_setup_database(cls, cnx, config):
+ cnx.create_entity('CWSource', name=u'ldap', type=u'ldapfeed', parser=u'ldapfeed',
+ url=URL, config=CONFIG_LDAPFEED)
- session.commit()
- return cls._pull(session)
+ cnx.commit()
+ return cls.pull(cnx)
@classmethod
- def _pull(cls, session):
- with session.repo.internal_session() as isession:
- lfsource = isession.repo.sources_by_uri['ldap']
- stats = lfsource.pull_data(isession, force=True, raise_on_error=True)
- isession.commit()
- return stats
-
- def pull(self):
- return self._pull(self.session)
+ def pull(self, cnx):
+ lfsource = cnx.repo.sources_by_uri['ldap']
+ stats = lfsource.pull_data(cnx, force=True, raise_on_error=True)
+ cnx.commit()
+ return stats
def setup_database(self):
- with self.session.repo.internal_session(safe=True) as session:
- session.execute('DELETE Any E WHERE E cw_source S, S name "ldap"')
- session.execute('SET S config %(conf)s, S url %(url)s '
- 'WHERE S is CWSource, S name "ldap"',
- {"conf": CONFIG_LDAPFEED, 'url': URL} )
- session.commit()
- self.pull()
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.execute('DELETE Any E WHERE E cw_source S, S name "ldap"')
+ cnx.execute('SET S config %(conf)s, S url %(url)s '
+ 'WHERE S is CWSource, S name "ldap"',
+ {"conf": CONFIG_LDAPFEED, 'url': URL} )
+ cnx.commit()
+ with self.repo.internal_cnx() as cnx:
+ self.pull(cnx)
def add_ldap_entry(self, dn, mods):
"""
@@ -201,16 +197,16 @@
"""
def test_wrong_group(self):
- with self.session.repo.internal_session(safe=True) as session:
- source = self.session.execute('CWSource S WHERE S type="ldapfeed"').get_entity(0,0)
+ with self.admin_access.repo_cnx() as cnx:
+ source = cnx.execute('CWSource S WHERE S type="ldapfeed"').get_entity(0,0)
config = source.repo_source.check_config(source)
# inject a bogus group here, along with at least a valid one
config['user-default-group'] = ('thisgroupdoesnotexists','users')
source.repo_source.update_config(source, config)
- session.commit(free_cnxset=False)
+ cnx.commit()
# here we emitted an error log entry
- stats = source.repo_source.pull_data(session, force=True, raise_on_error=True)
- session.commit()
+ stats = source.repo_source.pull_data(cnx, force=True, raise_on_error=True)
+ cnx.commit()
@@ -225,119 +221,131 @@
def test_authenticate(self):
source = self.repo.sources_by_uri['ldap']
- self.session.set_cnxset()
- # ensure we won't be logged against
- self.assertRaises(AuthenticationError,
- source.authenticate, self.session, 'toto', 'toto')
- self.assertTrue(source.authenticate(self.session, 'syt', 'syt'))
- self.assertTrue(self.repo.connect('syt', password='syt'))
+ with self.admin_access.repo_cnx() as cnx:
+ # ensure we won't be logged against
+ self.assertRaises(AuthenticationError,
+ source.authenticate, cnx, 'toto', 'toto')
+ self.assertTrue(source.authenticate(cnx, 'syt', 'syt'))
+ sessionid = self.repo.connect('syt', password='syt')
+ self.assertTrue(sessionid)
+ self.repo.close(sessionid)
def test_base(self):
- # check a known one
- rset = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'})
- e = rset.get_entity(0, 0)
- self.assertEqual(e.login, 'syt')
- e.complete()
- self.assertMetadata(e)
- self.assertEqual(e.firstname, None)
- self.assertEqual(e.surname, None)
- self.assertIn('users', set(g.name for g in e.in_group))
- self.assertEqual(e.owned_by[0].login, 'syt')
- self.assertEqual(e.created_by, ())
- addresses = [pe.address for pe in e.use_email]
- addresses.sort()
- self.assertEqual(['sylvain.thenault@logilab.fr', 'syt@logilab.fr'],
- addresses)
- self.assertIn(e.primary_email[0].address, ['sylvain.thenault@logilab.fr',
- 'syt@logilab.fr'])
- # email content should be indexed on the user
- rset = self.sexecute('CWUser X WHERE X has_text "thenault"')
- self.assertEqual(rset.rows, [[e.eid]])
+ with self.admin_access.repo_cnx() as cnx:
+ # check a known one
+ rset = cnx.execute('CWUser X WHERE X login %(login)s', {'login': 'syt'})
+ e = rset.get_entity(0, 0)
+ self.assertEqual(e.login, 'syt')
+ e.complete()
+ self.assertMetadata(e)
+ self.assertEqual(e.firstname, None)
+ self.assertEqual(e.surname, None)
+ self.assertIn('users', set(g.name for g in e.in_group))
+ self.assertEqual(e.owned_by[0].login, 'syt')
+ self.assertEqual(e.created_by, ())
+ addresses = [pe.address for pe in e.use_email]
+ addresses.sort()
+ self.assertEqual(['sylvain.thenault@logilab.fr', 'syt@logilab.fr'],
+ addresses)
+ self.assertIn(e.primary_email[0].address, ['sylvain.thenault@logilab.fr',
+ 'syt@logilab.fr'])
+ # email content should be indexed on the user
+ rset = cnx.execute('CWUser X WHERE X has_text "thenault"')
+ self.assertEqual(rset.rows, [[e.eid]])
def test_copy_to_system_source(self):
"make sure we can 'convert' an LDAP user into a system one"
- source = self.repo.sources_by_uri['ldap']
- eid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'})[0][0]
- self.sexecute('SET X cw_source S WHERE X eid %(x)s, S name "system"', {'x': eid})
- self.commit()
- source.reset_caches()
- rset = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'})
- self.assertEqual(len(rset), 1)
- e = rset.get_entity(0, 0)
- self.assertEqual(e.eid, eid)
- self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native',
- 'uri': u'system',
- 'use-cwuri-as-url': False},
- 'type': 'CWUser',
- 'extid': None})
- self.assertEqual(e.cw_source[0].name, 'system')
- self.assertTrue(e.creation_date)
- self.assertTrue(e.modification_date)
- source.pull_data(self.session)
- rset = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'})
- self.assertEqual(len(rset), 1)
- self.assertTrue(self.repo.system_source.authenticate(
- self.session, 'syt', password='syt'))
- # make sure the pull from ldap have not "reverted" user as a ldap-feed user
- self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native',
- 'uri': u'system',
- 'use-cwuri-as-url': False},
- 'type': 'CWUser',
- 'extid': None})
- # and that the password stored in the system source is not empty or so
- user = self.execute('CWUser U WHERE U login "syt"').get_entity(0, 0)
- user.cw_clear_all_caches()
- pwd = self.session.system_sql("SELECT cw_upassword FROM cw_cwuser WHERE cw_login='syt';").fetchall()[0][0]
- self.assertIsNotNone(pwd)
- self.assertTrue(str(pwd))
+ with self.admin_access.repo_cnx() as cnx:
+ source = self.repo.sources_by_uri['ldap']
+ eid = cnx.execute('CWUser X WHERE X login %(login)s', {'login': 'syt'})[0][0]
+ cnx.execute('SET X cw_source S WHERE X eid %(x)s, S name "system"', {'x': eid})
+ cnx.commit()
+ source.reset_caches()
+ rset = cnx.execute('CWUser X WHERE X login %(login)s', {'login': 'syt'})
+ self.assertEqual(len(rset), 1)
+ e = rset.get_entity(0, 0)
+ self.assertEqual(e.eid, eid)
+ self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native',
+ 'uri': u'system',
+ 'use-cwuri-as-url': False},
+ 'type': 'CWUser',
+ 'extid': None})
+ self.assertEqual(e.cw_source[0].name, 'system')
+ self.assertTrue(e.creation_date)
+ self.assertTrue(e.modification_date)
+ source.pull_data(cnx)
+ rset = cnx.execute('CWUser X WHERE X login %(login)s', {'login': 'syt'})
+ self.assertEqual(len(rset), 1)
+ self.assertTrue(self.repo.system_source.authenticate(cnx, 'syt', password='syt'))
+ # make sure the pull from ldap have not "reverted" user as a ldap-feed user
+ self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native',
+ 'uri': u'system',
+ 'use-cwuri-as-url': False},
+ 'type': 'CWUser',
+ 'extid': None})
+ # and that the password stored in the system source is not empty or so
+ user = cnx.execute('CWUser U WHERE U login "syt"').get_entity(0, 0)
+ user.cw_clear_all_caches()
+ pwd = cnx.system_sql("SELECT cw_upassword FROM cw_cwuser WHERE cw_login='syt';").fetchall()[0][0]
+ self.assertIsNotNone(pwd)
+ self.assertTrue(str(pwd))
class LDAPFeedUserDeletionTC(LDAPFeedTestBase):
"""
A testcase for situations where users are deleted from or
- unavailabe in the LDAP database.
+ unavailable in the LDAP database.
"""
+
def test_a_filter_inactivate(self):
""" filtered out people should be deactivated, unable to authenticate """
- source = self.session.execute('CWSource S WHERE S type="ldapfeed"').get_entity(0,0)
- config = source.repo_source.check_config(source)
- # filter with adim's phone number
- config['user-filter'] = u'(%s=%s)' % ('telephoneNumber', '109')
- source.repo_source.update_config(source, config)
- self.commit()
- self.pull()
+ with self.admin_access.repo_cnx() as cnx:
+ source = cnx.execute('CWSource S WHERE S type="ldapfeed"').get_entity(0,0)
+ config = source.repo_source.check_config(source)
+ # filter with adim's phone number
+ config['user-filter'] = u'(%s=%s)' % ('telephoneNumber', '109')
+ source.repo_source.update_config(source, config)
+ cnx.commit()
+ with self.repo.internal_cnx() as cnx:
+ self.pull(cnx)
self.assertRaises(AuthenticationError, self.repo.connect, 'syt', password='syt')
- self.assertEqual(self.execute('Any N WHERE U login "syt", '
- 'U in_state S, S name N').rows[0][0],
- 'deactivated')
- self.assertEqual(self.execute('Any N WHERE U login "adim", '
- 'U in_state S, S name N').rows[0][0],
- 'activated')
- # unfilter, syt should be activated again
- config['user-filter'] = u''
- source.repo_source.update_config(source, config)
- self.commit()
- self.pull()
- self.assertEqual(self.execute('Any N WHERE U login "syt", '
- 'U in_state S, S name N').rows[0][0],
- 'activated')
- self.assertEqual(self.execute('Any N WHERE U login "adim", '
- 'U in_state S, S name N').rows[0][0],
- 'activated')
+ with self.admin_access.repo_cnx() as cnx:
+ self.assertEqual(cnx.execute('Any N WHERE U login "syt", '
+ 'U in_state S, S name N').rows[0][0],
+ 'deactivated')
+ self.assertEqual(cnx.execute('Any N WHERE U login "adim", '
+ 'U in_state S, S name N').rows[0][0],
+ 'activated')
+ # unfilter, syt should be activated again
+ config['user-filter'] = u''
+ source.repo_source.update_config(source, config)
+ cnx.commit()
+ with self.repo.internal_cnx() as cnx:
+ self.pull(cnx)
+ with self.admin_access.repo_cnx() as cnx:
+ self.assertEqual(cnx.execute('Any N WHERE U login "syt", '
+ 'U in_state S, S name N').rows[0][0],
+ 'activated')
+ self.assertEqual(cnx.execute('Any N WHERE U login "adim", '
+ 'U in_state S, S name N').rows[0][0],
+ 'activated')
def test_delete(self):
""" delete syt, pull, check deactivation, repull,
read syt, pull, check activation
"""
self.delete_ldap_entry('uid=syt,ou=People,dc=cubicweb,dc=test')
- self.pull()
+ with self.repo.internal_cnx() as cnx:
+ self.pull(cnx)
self.assertRaises(AuthenticationError, self.repo.connect, 'syt', password='syt')
- self.assertEqual(self.execute('Any N WHERE U login "syt", '
- 'U in_state S, S name N').rows[0][0],
- 'deactivated')
- # check that it doesn't choke
- self.pull()
+ with self.admin_access.repo_cnx() as cnx:
+ self.assertEqual(cnx.execute('Any N WHERE U login "syt", '
+ 'U in_state S, S name N').rows[0][0],
+ 'deactivated')
+ with self.repo.internal_cnx() as cnx:
+ # check that it doesn't choke
+ self.pull(cnx)
# reinsert syt
self.add_ldap_entry('uid=syt,ou=People,dc=cubicweb,dc=test',
{ 'objectClass': ['OpenLDAPperson','posixAccount','top','shadowAccount'],
@@ -354,31 +362,38 @@
'gecos': 'Sylvain Thenault',
'mail': ['sylvain.thenault@logilab.fr','syt@logilab.fr'],
'userPassword': 'syt',
- })
- self.pull()
- self.assertEqual(self.execute('Any N WHERE U login "syt", '
- 'U in_state S, S name N').rows[0][0],
- 'activated')
+ })
+ with self.repo.internal_cnx() as cnx:
+ self.pull(cnx)
+ with self.admin_access.repo_cnx() as cnx:
+ self.assertEqual(cnx.execute('Any N WHERE U login "syt", '
+ 'U in_state S, S name N').rows[0][0],
+ 'activated')
def test_reactivate_deleted(self):
# test reactivating BY HAND the user isn't enough to
# authenticate, as the native source refuse to authenticate
# user from other sources
self.delete_ldap_entry('uid=syt,ou=People,dc=cubicweb,dc=test')
- self.pull()
- # reactivate user (which source is still ldap-feed)
- user = self.execute('CWUser U WHERE U login "syt"').get_entity(0, 0)
- user.cw_adapt_to('IWorkflowable').fire_transition('activate')
- self.commit()
- with self.assertRaises(AuthenticationError):
- self.repo.connect('syt', password='syt')
+ with self.repo.internal_cnx() as cnx:
+ self.pull(cnx)
+ with self.admin_access.repo_cnx() as cnx:
+ # reactivate user (which source is still ldap-feed)
+ user = cnx.execute('CWUser U WHERE U login "syt"').get_entity(0, 0)
+ user.cw_adapt_to('IWorkflowable').fire_transition('activate')
+ cnx.commit()
+ with self.assertRaises(AuthenticationError):
+ self.repo.connect('syt', password='syt')
- # ok now let's try to make it a system user
- self.sexecute('SET X cw_source S WHERE X eid %(x)s, S name "system"', {'x': user.eid})
- self.commit()
+ # ok now let's try to make it a system user
+ cnx.execute('SET X cw_source S WHERE X eid %(x)s, S name "system"', {'x': user.eid})
+ cnx.commit()
# and that we can now authenticate again
self.assertRaises(AuthenticationError, self.repo.connect, 'syt', password='toto')
- self.assertTrue(self.repo.connect('syt', password='syt'))
+ sessionid = self.repo.connect('syt', password='syt')
+ self.assertTrue(sessionid)
+ self.repo.close(sessionid)
+
class LDAPFeedGroupTC(LDAPFeedTestBase):
"""
@@ -386,44 +401,51 @@
"""
def test_groups_exist(self):
- rset = self.sexecute('CWGroup X WHERE X name "dir"')
- self.assertEqual(len(rset), 1)
+ with self.admin_access.repo_cnx() as cnx:
+ rset = cnx.execute('CWGroup X WHERE X name "dir"')
+ self.assertEqual(len(rset), 1)
- rset = self.sexecute('CWGroup X WHERE X cw_source S, S name "ldap"')
- self.assertEqual(len(rset), 2)
+ rset = cnx.execute('CWGroup X WHERE X cw_source S, S name "ldap"')
+ self.assertEqual(len(rset), 2)
def test_group_deleted(self):
- rset = self.sexecute('CWGroup X WHERE X name "dir"')
- self.assertEqual(len(rset), 1)
+ with self.admin_access.repo_cnx() as cnx:
+ rset = cnx.execute('CWGroup X WHERE X name "dir"')
+ self.assertEqual(len(rset), 1)
def test_in_group(self):
- rset = self.sexecute('CWGroup X WHERE X name %(name)s', {'name': 'dir'})
- dirgroup = rset.get_entity(0, 0)
- self.assertEqual(set(['syt', 'adim']),
- set([u.login for u in dirgroup.reverse_in_group]))
- rset = self.sexecute('CWGroup X WHERE X name %(name)s', {'name': 'logilab'})
- logilabgroup = rset.get_entity(0, 0)
- self.assertEqual(set(['adim']),
- set([u.login for u in logilabgroup.reverse_in_group]))
+ with self.admin_access.repo_cnx() as cnx:
+ rset = cnx.execute('CWGroup X WHERE X name %(name)s', {'name': 'dir'})
+ dirgroup = rset.get_entity(0, 0)
+ self.assertEqual(set(['syt', 'adim']),
+ set([u.login for u in dirgroup.reverse_in_group]))
+ rset = cnx.execute('CWGroup X WHERE X name %(name)s', {'name': 'logilab'})
+ logilabgroup = rset.get_entity(0, 0)
+ self.assertEqual(set(['adim']),
+ set([u.login for u in logilabgroup.reverse_in_group]))
def test_group_member_added(self):
- self.pull()
- rset = self.sexecute('Any L WHERE U in_group G, G name %(name)s, U login L',
- {'name': 'logilab'})
- self.assertEqual(len(rset), 1)
- self.assertEqual(rset[0][0], 'adim')
+ with self.repo.internal_cnx() as cnx:
+ self.pull(cnx)
+ with self.admin_access.repo_cnx() as cnx:
+ rset = cnx.execute('Any L WHERE U in_group G, G name %(name)s, U login L',
+ {'name': 'logilab'})
+ self.assertEqual(len(rset), 1)
+ self.assertEqual(rset[0][0], 'adim')
try:
self.update_ldap_entry('cn=logilab,ou=Group,dc=cubicweb,dc=test',
- {('add', 'memberUid'): ['syt']})
+ {('add', 'memberUid'): ['syt']})
time.sleep(1.1) # timestamps precision is 1s
- self.pull()
+ with self.repo.internal_cnx() as cnx:
+ self.pull(cnx)
- rset = self.sexecute('Any L WHERE U in_group G, G name %(name)s, U login L',
- {'name': 'logilab'})
- self.assertEqual(len(rset), 2)
- members = set([u[0] for u in rset])
- self.assertEqual(set(['adim', 'syt']), members)
+ with self.admin_access.repo_cnx() as cnx:
+ rset = cnx.execute('Any L WHERE U in_group G, G name %(name)s, U login L',
+ {'name': 'logilab'})
+ self.assertEqual(len(rset), 2)
+ members = set([u[0] for u in rset])
+ self.assertEqual(set(['adim', 'syt']), members)
finally:
# back to normal ldap setup
@@ -431,21 +453,25 @@
self.setUpClass()
def test_group_member_deleted(self):
- self.pull() # ensure we are sync'ed
- rset = self.sexecute('Any L WHERE U in_group G, G name %(name)s, U login L',
- {'name': 'logilab'})
- self.assertEqual(len(rset), 1)
- self.assertEqual(rset[0][0], 'adim')
+ with self.repo.internal_cnx() as cnx:
+ self.pull(cnx) # ensure we are sync'ed
+ with self.admin_access.repo_cnx() as cnx:
+ rset = cnx.execute('Any L WHERE U in_group G, G name %(name)s, U login L',
+ {'name': 'logilab'})
+ self.assertEqual(len(rset), 1)
+ self.assertEqual(rset[0][0], 'adim')
try:
self.update_ldap_entry('cn=logilab,ou=Group,dc=cubicweb,dc=test',
{('delete', 'memberUid'): ['adim']})
time.sleep(1.1) # timestamps precision is 1s
- self.pull()
+ with self.repo.internal_cnx() as cnx:
+ self.pull(cnx)
- rset = self.sexecute('Any L WHERE U in_group G, G name %(name)s, U login L',
- {'name': 'logilab'})
- self.assertEqual(len(rset), 0)
+ with self.admin_access.repo_cnx() as cnx:
+ rset = cnx.execute('Any L WHERE U in_group G, G name %(name)s, U login L',
+ {'name': 'logilab'})
+ self.assertEqual(len(rset), 0)
finally:
# back to normal ldap setup
self.tearDownClass()
--- a/server/test/unittest_migractions.py Fri May 23 18:35:13 2014 +0200
+++ b/server/test/unittest_migractions.py Fri Jun 27 11:48:26 2014 +0200
@@ -19,16 +19,16 @@
from datetime import date
from os.path import join
+from contextlib import contextmanager
-from logilab.common.testlib import TestCase, unittest_main, Tags, tag
+from logilab.common.testlib import unittest_main, Tags, tag
from yams.constraints import UniqueConstraint
from cubicweb import ConfigurationError, ValidationError
from cubicweb.devtools.testlib import CubicWebTC
-from cubicweb.schema import CubicWebSchemaLoader
from cubicweb.server.sqlutils import SQL_PREFIX
-from cubicweb.server.migractions import *
+from cubicweb.server.migractions import ServerMigrationHelper
import cubicweb.devtools
@@ -61,467 +61,485 @@
def setUp(self):
CubicWebTC.setUp(self)
- self.mh = ServerMigrationHelper(self.repo.config, migrschema,
- repo=self.repo, cnx=self.cnx,
- interactive=False)
- assert self.cnx is self.mh.cnx
- assert self.session is self.mh.session, (self.session.id, self.mh.session.id)
def tearDown(self):
CubicWebTC.tearDown(self)
self.repo.vreg['etypes'].clear_caches()
+ @contextmanager
+ def mh(self):
+ with self.admin_access.client_cnx() as cnx:
+ yield cnx, ServerMigrationHelper(self.repo.config, migrschema,
+ repo=self.repo, cnx=cnx,
+ interactive=False)
+
def test_add_attribute_bool(self):
- self.assertNotIn('yesno', self.schema)
- self.session.create_entity('Note')
- self.commit()
- self.mh.cmd_add_attribute('Note', 'yesno')
- self.assertIn('yesno', self.schema)
- self.assertEqual(self.schema['yesno'].subjects(), ('Note',))
- self.assertEqual(self.schema['yesno'].objects(), ('Boolean',))
- self.assertEqual(self.schema['Note'].default('yesno'), False)
- # test default value set on existing entities
- note = self.session.execute('Note X').get_entity(0, 0)
- self.assertEqual(note.yesno, False)
- # test default value set for next entities
- self.assertEqual(self.session.create_entity('Note').yesno, False)
- self.mh.rollback()
+ with self.mh() as (cnx, mh):
+ self.assertNotIn('yesno', self.schema)
+ cnx.create_entity('Note')
+ cnx.commit()
+ mh.cmd_add_attribute('Note', 'yesno')
+ self.assertIn('yesno', self.schema)
+ self.assertEqual(self.schema['yesno'].subjects(), ('Note',))
+ self.assertEqual(self.schema['yesno'].objects(), ('Boolean',))
+ self.assertEqual(self.schema['Note'].default('yesno'), False)
+ # test default value set on existing entities
+ note = cnx.execute('Note X').get_entity(0, 0)
+ self.assertEqual(note.yesno, False)
+ # test default value set for next entities
+ self.assertEqual(cnx.create_entity('Note').yesno, False)
def test_add_attribute_int(self):
- self.assertNotIn('whatever', self.schema)
- self.session.create_entity('Note')
- self.session.commit(free_cnxset=False)
- orderdict = dict(self.mh.rqlexec('Any RTN, O WHERE X name "Note", RDEF from_entity X, '
+ with self.mh() as (cnx, mh):
+ self.assertNotIn('whatever', self.schema)
+ cnx.create_entity('Note')
+ cnx.commit()
+ orderdict = dict(mh.rqlexec('Any RTN, O WHERE X name "Note", RDEF from_entity X, '
+ 'RDEF relation_type RT, RDEF ordernum O, RT name RTN'))
+ mh.cmd_add_attribute('Note', 'whatever')
+ self.assertIn('whatever', self.schema)
+ self.assertEqual(self.schema['whatever'].subjects(), ('Note',))
+ self.assertEqual(self.schema['whatever'].objects(), ('Int',))
+ self.assertEqual(self.schema['Note'].default('whatever'), 0)
+ # test default value set on existing entities
+ note = cnx.execute('Note X').get_entity(0, 0)
+ self.assertIsInstance(note.whatever, int)
+ self.assertEqual(note.whatever, 0)
+ # test default value set for next entities
+ self.assertEqual(cnx.create_entity('Note').whatever, 0)
+ # test attribute order
+ orderdict2 = dict(mh.rqlexec('Any RTN, O WHERE X name "Note", RDEF from_entity X, '
'RDEF relation_type RT, RDEF ordernum O, RT name RTN'))
- self.mh.cmd_add_attribute('Note', 'whatever')
- self.assertIn('whatever', self.schema)
- self.assertEqual(self.schema['whatever'].subjects(), ('Note',))
- self.assertEqual(self.schema['whatever'].objects(), ('Int',))
- self.assertEqual(self.schema['Note'].default('whatever'), 0)
- # test default value set on existing entities
- note = self.session.execute('Note X').get_entity(0, 0)
- self.assertIsInstance(note.whatever, int)
- self.assertEqual(note.whatever, 0)
- # test default value set for next entities
- self.assertEqual(self.session.create_entity('Note').whatever, 0)
- # test attribute order
- orderdict2 = dict(self.mh.rqlexec('Any RTN, O WHERE X name "Note", RDEF from_entity X, '
- 'RDEF relation_type RT, RDEF ordernum O, RT name RTN'))
- whateverorder = migrschema['whatever'].rdef('Note', 'Int').order
- for k, v in orderdict.iteritems():
- if v >= whateverorder:
- orderdict[k] = v+1
- orderdict['whatever'] = whateverorder
- self.assertDictEqual(orderdict, orderdict2)
- #self.assertEqual([r.type for r in self.schema['Note'].ordered_relations()],
- # ['modification_date', 'creation_date', 'owned_by',
- # 'eid', 'ecrit_par', 'inline1', 'date', 'type',
- # 'whatever', 'date', 'in_basket'])
- # NB: commit instead of rollback make following test fail with py2.5
- # this sounds like a pysqlite/2.5 bug (the same eid is affected to
- # two different entities)
- self.mh.rollback()
+ whateverorder = migrschema['whatever'].rdef('Note', 'Int').order
+ for k, v in orderdict.iteritems():
+ if v >= whateverorder:
+ orderdict[k] = v+1
+ orderdict['whatever'] = whateverorder
+ self.assertDictEqual(orderdict, orderdict2)
+ #self.assertEqual([r.type for r in self.schema['Note'].ordered_relations()],
+ # ['modification_date', 'creation_date', 'owned_by',
+ # 'eid', 'ecrit_par', 'inline1', 'date', 'type',
+ # 'whatever', 'date', 'in_basket'])
+ # NB: commit instead of rollback make following test fail with py2.5
+ # this sounds like a pysqlite/2.5 bug (the same eid is affected to
+ # two different entities)
def test_add_attribute_varchar(self):
- self.assertNotIn('whatever', self.schema)
- self.session.create_entity('Note')
- self.session.commit(free_cnxset=False)
- self.assertNotIn('shortpara', self.schema)
- self.mh.cmd_add_attribute('Note', 'shortpara')
- self.assertIn('shortpara', self.schema)
- self.assertEqual(self.schema['shortpara'].subjects(), ('Note', ))
- self.assertEqual(self.schema['shortpara'].objects(), ('String', ))
- # test created column is actually a varchar(64)
- notesql = self.mh.sqlexec("SELECT sql FROM sqlite_master WHERE type='table' and name='%sNote'" % SQL_PREFIX)[0][0]
- fields = dict(x.strip().split()[:2] for x in notesql.split('(', 1)[1].rsplit(')', 1)[0].split(','))
- self.assertEqual(fields['%sshortpara' % SQL_PREFIX], 'varchar(64)')
- # test default value set on existing entities
- self.assertEqual(self.session.execute('Note X').get_entity(0, 0).shortpara, 'hop')
- # test default value set for next entities
- self.assertEqual(self.session.create_entity('Note').shortpara, 'hop')
- self.mh.rollback()
+ with self.mh() as (cnx, mh):
+ self.assertNotIn('whatever', self.schema)
+ cnx.create_entity('Note')
+ cnx.commit()
+ self.assertNotIn('shortpara', self.schema)
+ mh.cmd_add_attribute('Note', 'shortpara')
+ self.assertIn('shortpara', self.schema)
+ self.assertEqual(self.schema['shortpara'].subjects(), ('Note', ))
+ self.assertEqual(self.schema['shortpara'].objects(), ('String', ))
+ # test created column is actually a varchar(64)
+ notesql = mh.sqlexec("SELECT sql FROM sqlite_master WHERE type='table' and name='%sNote'" % SQL_PREFIX)[0][0]
+ fields = dict(x.strip().split()[:2] for x in notesql.split('(', 1)[1].rsplit(')', 1)[0].split(','))
+ self.assertEqual(fields['%sshortpara' % SQL_PREFIX], 'varchar(64)')
+ # test default value set on existing entities
+ self.assertEqual(cnx.execute('Note X').get_entity(0, 0).shortpara, 'hop')
+ # test default value set for next entities
+ self.assertEqual(cnx.create_entity('Note').shortpara, 'hop')
def test_add_datetime_with_default_value_attribute(self):
- self.assertNotIn('mydate', self.schema)
- self.assertNotIn('oldstyledefaultdate', self.schema)
- self.assertNotIn('newstyledefaultdate', self.schema)
- self.mh.cmd_add_attribute('Note', 'mydate')
- self.mh.cmd_add_attribute('Note', 'oldstyledefaultdate')
- self.mh.cmd_add_attribute('Note', 'newstyledefaultdate')
- self.assertIn('mydate', self.schema)
- self.assertIn('oldstyledefaultdate', self.schema)
- self.assertIn('newstyledefaultdate', self.schema)
- self.assertEqual(self.schema['mydate'].subjects(), ('Note', ))
- self.assertEqual(self.schema['mydate'].objects(), ('Date', ))
- testdate = date(2005, 12, 13)
- eid1 = self.mh.rqlexec('INSERT Note N')[0][0]
- eid2 = self.mh.rqlexec('INSERT Note N: N mydate %(mydate)s', {'mydate' : testdate})[0][0]
- d1 = self.mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid1})[0][0]
- d2 = self.mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid2})[0][0]
- d3 = self.mh.rqlexec('Any D WHERE X eid %(x)s, X oldstyledefaultdate D', {'x': eid1})[0][0]
- d4 = self.mh.rqlexec('Any D WHERE X eid %(x)s, X newstyledefaultdate D', {'x': eid1})[0][0]
- self.assertEqual(d1, date.today())
- self.assertEqual(d2, testdate)
- myfavoritedate = date(2013, 1, 1)
- self.assertEqual(d3, myfavoritedate)
- self.assertEqual(d4, myfavoritedate)
- self.mh.rollback()
+ with self.mh() as (cnx, mh):
+ self.assertNotIn('mydate', self.schema)
+ self.assertNotIn('oldstyledefaultdate', self.schema)
+ self.assertNotIn('newstyledefaultdate', self.schema)
+ mh.cmd_add_attribute('Note', 'mydate')
+ mh.cmd_add_attribute('Note', 'oldstyledefaultdate')
+ mh.cmd_add_attribute('Note', 'newstyledefaultdate')
+ self.assertIn('mydate', self.schema)
+ self.assertIn('oldstyledefaultdate', self.schema)
+ self.assertIn('newstyledefaultdate', self.schema)
+ self.assertEqual(self.schema['mydate'].subjects(), ('Note', ))
+ self.assertEqual(self.schema['mydate'].objects(), ('Date', ))
+ testdate = date(2005, 12, 13)
+ eid1 = mh.rqlexec('INSERT Note N')[0][0]
+ eid2 = mh.rqlexec('INSERT Note N: N mydate %(mydate)s', {'mydate' : testdate})[0][0]
+ d1 = mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid1})[0][0]
+ d2 = mh.rqlexec('Any D WHERE X eid %(x)s, X mydate D', {'x': eid2})[0][0]
+ d3 = mh.rqlexec('Any D WHERE X eid %(x)s, X oldstyledefaultdate D', {'x': eid1})[0][0]
+ d4 = mh.rqlexec('Any D WHERE X eid %(x)s, X newstyledefaultdate D', {'x': eid1})[0][0]
+ self.assertEqual(d1, date.today())
+ self.assertEqual(d2, testdate)
+ myfavoritedate = date(2013, 1, 1)
+ self.assertEqual(d3, myfavoritedate)
+ self.assertEqual(d4, myfavoritedate)
def test_drop_chosen_constraints_ctxmanager(self):
- with self.mh.cmd_dropped_constraints('Note', 'unique_id', UniqueConstraint):
- self.mh.cmd_add_attribute('Note', 'unique_id')
- # make sure the maxsize constraint is not dropped
- self.assertRaises(ValidationError,
- self.mh.rqlexec,
- 'INSERT Note N: N unique_id "xyz"')
- self.mh.rollback()
- # make sure the unique constraint is dropped
- self.mh.rqlexec('INSERT Note N: N unique_id "x"')
- self.mh.rqlexec('INSERT Note N: N unique_id "x"')
- self.mh.rqlexec('DELETE Note N')
- self.mh.rollback()
+ with self.mh() as (cnx, mh):
+ with mh.cmd_dropped_constraints('Note', 'unique_id', UniqueConstraint):
+ mh.cmd_add_attribute('Note', 'unique_id')
+ # make sure the maxsize constraint is not dropped
+ self.assertRaises(ValidationError,
+ mh.rqlexec,
+ 'INSERT Note N: N unique_id "xyz"')
+ mh.rollback()
+ # make sure the unique constraint is dropped
+ mh.rqlexec('INSERT Note N: N unique_id "x"')
+ mh.rqlexec('INSERT Note N: N unique_id "x"')
+ mh.rqlexec('DELETE Note N')
def test_drop_required_ctxmanager(self):
- with self.mh.cmd_dropped_constraints('Note', 'unique_id', cstrtype=None,
- droprequired=True):
- self.mh.cmd_add_attribute('Note', 'unique_id')
- self.mh.rqlexec('INSERT Note N')
- # make sure the required=True was restored
- self.assertRaises(ValidationError, self.mh.rqlexec, 'INSERT Note N')
- self.mh.rollback()
+ with self.mh() as (cnx, mh):
+ with mh.cmd_dropped_constraints('Note', 'unique_id', cstrtype=None,
+ droprequired=True):
+ mh.cmd_add_attribute('Note', 'unique_id')
+ mh.rqlexec('INSERT Note N')
+ # make sure the required=True was restored
+ self.assertRaises(ValidationError, mh.rqlexec, 'INSERT Note N')
+ mh.rollback()
def test_rename_attribute(self):
- self.assertNotIn('civility', self.schema)
- eid1 = self.mh.rqlexec('INSERT Personne X: X nom "lui", X sexe "M"')[0][0]
- eid2 = self.mh.rqlexec('INSERT Personne X: X nom "l\'autre", X sexe NULL')[0][0]
- self.mh.cmd_rename_attribute('Personne', 'sexe', 'civility')
- self.assertNotIn('sexe', self.schema)
- self.assertIn('civility', self.schema)
- # test data has been backported
- c1 = self.mh.rqlexec('Any C WHERE X eid %s, X civility C' % eid1)[0][0]
- self.assertEqual(c1, 'M')
- c2 = self.mh.rqlexec('Any C WHERE X eid %s, X civility C' % eid2)[0][0]
- self.assertEqual(c2, None)
-
+ with self.mh() as (cnx, mh):
+ self.assertNotIn('civility', self.schema)
+ eid1 = mh.rqlexec('INSERT Personne X: X nom "lui", X sexe "M"')[0][0]
+ eid2 = mh.rqlexec('INSERT Personne X: X nom "l\'autre", X sexe NULL')[0][0]
+ mh.cmd_rename_attribute('Personne', 'sexe', 'civility')
+ self.assertNotIn('sexe', self.schema)
+ self.assertIn('civility', self.schema)
+ # test data has been backported
+ c1 = mh.rqlexec('Any C WHERE X eid %s, X civility C' % eid1)[0][0]
+ self.assertEqual(c1, 'M')
+ c2 = mh.rqlexec('Any C WHERE X eid %s, X civility C' % eid2)[0][0]
+ self.assertEqual(c2, None)
def test_workflow_actions(self):
- wf = self.mh.cmd_add_workflow(u'foo', ('Personne', 'Email'),
- ensure_workflowable=False)
- for etype in ('Personne', 'Email'):
- s1 = self.mh.rqlexec('Any N WHERE WF workflow_of ET, ET name "%s", WF name N' %
- etype)[0][0]
- self.assertEqual(s1, "foo")
- s1 = self.mh.rqlexec('Any N WHERE ET default_workflow WF, ET name "%s", WF name N' %
- etype)[0][0]
- self.assertEqual(s1, "foo")
+ with self.mh() as (cnx, mh):
+ wf = mh.cmd_add_workflow(u'foo', ('Personne', 'Email'),
+ ensure_workflowable=False)
+ for etype in ('Personne', 'Email'):
+ s1 = mh.rqlexec('Any N WHERE WF workflow_of ET, ET name "%s", WF name N' %
+ etype)[0][0]
+ self.assertEqual(s1, "foo")
+ s1 = mh.rqlexec('Any N WHERE ET default_workflow WF, ET name "%s", WF name N' %
+ etype)[0][0]
+ self.assertEqual(s1, "foo")
def test_add_entity_type(self):
- self.assertNotIn('Folder2', self.schema)
- self.assertNotIn('filed_under2', self.schema)
- self.mh.cmd_add_entity_type('Folder2')
- self.assertIn('Folder2', self.schema)
- self.assertIn('Old', self.schema)
- self.assertTrue(self.session.execute('CWEType X WHERE X name "Folder2"'))
- self.assertIn('filed_under2', self.schema)
- self.assertTrue(self.session.execute('CWRType X WHERE X name "filed_under2"'))
- self.assertEqual(sorted(str(rs) for rs in self.schema['Folder2'].subject_relations()),
- ['created_by', 'creation_date', 'cw_source', 'cwuri',
- 'description', 'description_format',
- 'eid',
- 'filed_under2', 'has_text',
- 'identity', 'in_basket', 'is', 'is_instance_of',
- 'modification_date', 'name', 'owned_by'])
- self.assertEqual([str(rs) for rs in self.schema['Folder2'].object_relations()],
- ['filed_under2', 'identity'])
- # Old will be missing as it has been renamed into 'New' in the migrated
- # schema while New hasn't been added here.
- self.assertEqual(sorted(str(e) for e in self.schema['filed_under2'].subjects()),
- sorted(str(e) for e in self.schema.entities() if not e.final and e != 'Old'))
- self.assertEqual(self.schema['filed_under2'].objects(), ('Folder2',))
- eschema = self.schema.eschema('Folder2')
- for cstr in eschema.rdef('name').constraints:
- self.assertTrue(hasattr(cstr, 'eid'))
+ with self.mh() as (cnx, mh):
+ self.assertNotIn('Folder2', self.schema)
+ self.assertNotIn('filed_under2', self.schema)
+ mh.cmd_add_entity_type('Folder2')
+ self.assertIn('Folder2', self.schema)
+ self.assertIn('Old', self.schema)
+ self.assertTrue(cnx.execute('CWEType X WHERE X name "Folder2"'))
+ self.assertIn('filed_under2', self.schema)
+ self.assertTrue(cnx.execute('CWRType X WHERE X name "filed_under2"'))
+ self.assertEqual(sorted(str(rs) for rs in self.schema['Folder2'].subject_relations()),
+ ['created_by', 'creation_date', 'cw_source', 'cwuri',
+ 'description', 'description_format',
+ 'eid',
+ 'filed_under2', 'has_text',
+ 'identity', 'in_basket', 'is', 'is_instance_of',
+ 'modification_date', 'name', 'owned_by'])
+ self.assertEqual([str(rs) for rs in self.schema['Folder2'].object_relations()],
+ ['filed_under2', 'identity'])
+ # Old will be missing as it has been renamed into 'New' in the migrated
+ # schema while New hasn't been added here.
+ self.assertEqual(sorted(str(e) for e in self.schema['filed_under2'].subjects()),
+ sorted(str(e) for e in self.schema.entities() if not e.final and e != 'Old'))
+ self.assertEqual(self.schema['filed_under2'].objects(), ('Folder2',))
+ eschema = self.schema.eschema('Folder2')
+ for cstr in eschema.rdef('name').constraints:
+ self.assertTrue(hasattr(cstr, 'eid'))
def test_add_drop_entity_type(self):
- self.mh.cmd_add_entity_type('Folder2')
- wf = self.mh.cmd_add_workflow(u'folder2 wf', 'Folder2',
- ensure_workflowable=False)
- todo = wf.add_state(u'todo', initial=True)
- done = wf.add_state(u'done')
- wf.add_transition(u'redoit', done, todo)
- wf.add_transition(u'markasdone', todo, done)
- self.session.commit(free_cnxset=False)
- eschema = self.schema.eschema('Folder2')
- self.mh.cmd_drop_entity_type('Folder2')
- self.assertNotIn('Folder2', self.schema)
- self.assertFalse(self.session.execute('CWEType X WHERE X name "Folder2"'))
- # test automatic workflow deletion
- self.assertFalse(self.session.execute('Workflow X WHERE NOT X workflow_of ET'))
- self.assertFalse(self.session.execute('State X WHERE NOT X state_of WF'))
- self.assertFalse(self.session.execute('Transition X WHERE NOT X transition_of WF'))
+ with self.mh() as (cnx, mh):
+ mh.cmd_add_entity_type('Folder2')
+ wf = mh.cmd_add_workflow(u'folder2 wf', 'Folder2',
+ ensure_workflowable=False)
+ todo = wf.add_state(u'todo', initial=True)
+ done = wf.add_state(u'done')
+ wf.add_transition(u'redoit', done, todo)
+ wf.add_transition(u'markasdone', todo, done)
+ cnx.commit()
+ eschema = self.schema.eschema('Folder2')
+ mh.cmd_drop_entity_type('Folder2')
+ self.assertNotIn('Folder2', self.schema)
+ self.assertFalse(cnx.execute('CWEType X WHERE X name "Folder2"'))
+ # test automatic workflow deletion
+ self.assertFalse(cnx.execute('Workflow X WHERE NOT X workflow_of ET'))
+ self.assertFalse(cnx.execute('State X WHERE NOT X state_of WF'))
+ self.assertFalse(cnx.execute('Transition X WHERE NOT X transition_of WF'))
def test_rename_entity_type(self):
- entity = self.mh.create_entity('Old', name=u'old')
- self.repo.type_and_source_from_eid(entity.eid, entity._cw)
- self.mh.cmd_rename_entity_type('Old', 'New')
- self.mh.cmd_rename_attribute('New', 'name', 'new_name')
+ with self.mh() as (cnx, mh):
+ entity = mh.create_entity('Old', name=u'old')
+ self.repo.type_and_source_from_eid(entity.eid, entity._cw)
+ mh.cmd_rename_entity_type('Old', 'New')
+ mh.cmd_rename_attribute('New', 'name', 'new_name')
def test_add_drop_relation_type(self):
- self.mh.cmd_add_entity_type('Folder2', auto=False)
- self.mh.cmd_add_relation_type('filed_under2')
- self.assertIn('filed_under2', self.schema)
- # Old will be missing as it has been renamed into 'New' in the migrated
- # schema while New hasn't been added here.
- self.assertEqual(sorted(str(e) for e in self.schema['filed_under2'].subjects()),
- sorted(str(e) for e in self.schema.entities()
- if not e.final and e != 'Old'))
- self.assertEqual(self.schema['filed_under2'].objects(), ('Folder2',))
- self.mh.cmd_drop_relation_type('filed_under2')
- self.assertNotIn('filed_under2', self.schema)
+ with self.mh() as (cnx, mh):
+ mh.cmd_add_entity_type('Folder2', auto=False)
+ mh.cmd_add_relation_type('filed_under2')
+ self.assertIn('filed_under2', self.schema)
+ # Old will be missing as it has been renamed into 'New' in the migrated
+ # schema while New hasn't been added here.
+ self.assertEqual(sorted(str(e) for e in self.schema['filed_under2'].subjects()),
+ sorted(str(e) for e in self.schema.entities()
+ if not e.final and e != 'Old'))
+ self.assertEqual(self.schema['filed_under2'].objects(), ('Folder2',))
+ mh.cmd_drop_relation_type('filed_under2')
+ self.assertNotIn('filed_under2', self.schema)
def test_add_relation_definition_nortype(self):
- self.mh.cmd_add_relation_definition('Personne', 'concerne2', 'Affaire')
- self.assertEqual(self.schema['concerne2'].subjects(),
- ('Personne',))
- self.assertEqual(self.schema['concerne2'].objects(),
- ('Affaire', ))
- self.assertEqual(self.schema['concerne2'].rdef('Personne', 'Affaire').cardinality,
- '1*')
- self.mh.cmd_add_relation_definition('Personne', 'concerne2', 'Note')
- self.assertEqual(sorted(self.schema['concerne2'].objects()), ['Affaire', 'Note'])
- self.mh.create_entity('Personne', nom=u'tot')
- self.mh.create_entity('Affaire')
- self.mh.rqlexec('SET X concerne2 Y WHERE X is Personne, Y is Affaire')
- self.session.commit(free_cnxset=False)
- self.mh.cmd_drop_relation_definition('Personne', 'concerne2', 'Affaire')
- self.assertIn('concerne2', self.schema)
- self.mh.cmd_drop_relation_definition('Personne', 'concerne2', 'Note')
- self.assertNotIn('concerne2', self.schema)
+ with self.mh() as (cnx, mh):
+ mh.cmd_add_relation_definition('Personne', 'concerne2', 'Affaire')
+ self.assertEqual(self.schema['concerne2'].subjects(),
+ ('Personne',))
+ self.assertEqual(self.schema['concerne2'].objects(),
+ ('Affaire', ))
+ self.assertEqual(self.schema['concerne2'].rdef('Personne', 'Affaire').cardinality,
+ '1*')
+ mh.cmd_add_relation_definition('Personne', 'concerne2', 'Note')
+ self.assertEqual(sorted(self.schema['concerne2'].objects()), ['Affaire', 'Note'])
+ mh.create_entity('Personne', nom=u'tot')
+ mh.create_entity('Affaire')
+ mh.rqlexec('SET X concerne2 Y WHERE X is Personne, Y is Affaire')
+ cnx.commit()
+ mh.cmd_drop_relation_definition('Personne', 'concerne2', 'Affaire')
+ self.assertIn('concerne2', self.schema)
+ mh.cmd_drop_relation_definition('Personne', 'concerne2', 'Note')
+ self.assertNotIn('concerne2', self.schema)
def test_drop_relation_definition_existant_rtype(self):
- self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()),
- ['Affaire', 'Personne'])
- self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()),
- ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision'])
- self.mh.cmd_drop_relation_definition('Personne', 'concerne', 'Affaire')
- self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()),
- ['Affaire'])
- self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()),
- ['Division', 'Note', 'Societe', 'SubDivision'])
- self.mh.cmd_add_relation_definition('Personne', 'concerne', 'Affaire')
- self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()),
- ['Affaire', 'Personne'])
- self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()),
- ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision'])
- # trick: overwrite self.maxeid to avoid deletion of just reintroduced types
- self.maxeid = self.session.execute('Any MAX(X)')[0][0]
+ with self.mh() as (cnx, mh):
+ self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()),
+ ['Affaire', 'Personne'])
+ self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()),
+ ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision'])
+ mh.cmd_drop_relation_definition('Personne', 'concerne', 'Affaire')
+ self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()),
+ ['Affaire'])
+ self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()),
+ ['Division', 'Note', 'Societe', 'SubDivision'])
+ mh.cmd_add_relation_definition('Personne', 'concerne', 'Affaire')
+ self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()),
+ ['Affaire', 'Personne'])
+ self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()),
+ ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision'])
+ # trick: overwrite self.maxeid to avoid deletion of just reintroduced types
+ self.maxeid = cnx.execute('Any MAX(X)')[0][0]
def test_drop_relation_definition_with_specialization(self):
- self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()),
- ['Affaire', 'Personne'])
- self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()),
- ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision'])
- self.mh.cmd_drop_relation_definition('Affaire', 'concerne', 'Societe')
- self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()),
- ['Affaire', 'Personne'])
- self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()),
- ['Affaire', 'Note'])
- self.mh.cmd_add_relation_definition('Affaire', 'concerne', 'Societe')
- self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()),
- ['Affaire', 'Personne'])
- self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()),
- ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision'])
- # trick: overwrite self.maxeid to avoid deletion of just reintroduced types
- self.maxeid = self.session.execute('Any MAX(X)')[0][0]
+ with self.mh() as (cnx, mh):
+ self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()),
+ ['Affaire', 'Personne'])
+ self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()),
+ ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision'])
+ mh.cmd_drop_relation_definition('Affaire', 'concerne', 'Societe')
+ self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()),
+ ['Affaire', 'Personne'])
+ self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()),
+ ['Affaire', 'Note'])
+ mh.cmd_add_relation_definition('Affaire', 'concerne', 'Societe')
+ self.assertEqual(sorted(str(e) for e in self.schema['concerne'].subjects()),
+ ['Affaire', 'Personne'])
+ self.assertEqual(sorted(str(e) for e in self.schema['concerne'].objects()),
+ ['Affaire', 'Division', 'Note', 'Societe', 'SubDivision'])
+ # trick: overwrite self.maxeid to avoid deletion of just reintroduced types
+ self.maxeid = cnx.execute('Any MAX(X)')[0][0]
def test_rename_relation(self):
self.skipTest('implement me')
def test_change_relation_props_non_final(self):
- rschema = self.schema['concerne']
- card = rschema.rdef('Affaire', 'Societe').cardinality
- self.assertEqual(card, '**')
- try:
- self.mh.cmd_change_relation_props('Affaire', 'concerne', 'Societe',
- cardinality='?*')
+ with self.mh() as (cnx, mh):
+ rschema = self.schema['concerne']
card = rschema.rdef('Affaire', 'Societe').cardinality
- self.assertEqual(card, '?*')
- finally:
- self.mh.cmd_change_relation_props('Affaire', 'concerne', 'Societe',
- cardinality='**')
+ self.assertEqual(card, '**')
+ try:
+ mh.cmd_change_relation_props('Affaire', 'concerne', 'Societe',
+ cardinality='?*')
+ card = rschema.rdef('Affaire', 'Societe').cardinality
+ self.assertEqual(card, '?*')
+ finally:
+ mh.cmd_change_relation_props('Affaire', 'concerne', 'Societe',
+ cardinality='**')
def test_change_relation_props_final(self):
- rschema = self.schema['adel']
- card = rschema.rdef('Personne', 'String').fulltextindexed
- self.assertEqual(card, False)
- try:
- self.mh.cmd_change_relation_props('Personne', 'adel', 'String',
- fulltextindexed=True)
+ with self.mh() as (cnx, mh):
+ rschema = self.schema['adel']
card = rschema.rdef('Personne', 'String').fulltextindexed
- self.assertEqual(card, True)
- finally:
- self.mh.cmd_change_relation_props('Personne', 'adel', 'String',
- fulltextindexed=False)
+ self.assertEqual(card, False)
+ try:
+ mh.cmd_change_relation_props('Personne', 'adel', 'String',
+ fulltextindexed=True)
+ card = rschema.rdef('Personne', 'String').fulltextindexed
+ self.assertEqual(card, True)
+ finally:
+ mh.cmd_change_relation_props('Personne', 'adel', 'String',
+ fulltextindexed=False)
def test_sync_schema_props_perms_rqlconstraints(self):
- # Drop one of the RQLConstraint.
- rdef = self.schema['evaluee'].rdefs[('Personne', 'Note')]
- oldconstraints = rdef.constraints
- self.assertIn('S created_by U',
- [cstr.expression for cstr in oldconstraints])
- self.mh.cmd_sync_schema_props_perms('evaluee', commit=True)
- newconstraints = rdef.constraints
- self.assertNotIn('S created_by U',
- [cstr.expression for cstr in newconstraints])
+ with self.mh() as (cnx, mh):
+ # Drop one of the RQLConstraint.
+ rdef = self.schema['evaluee'].rdefs[('Personne', 'Note')]
+ oldconstraints = rdef.constraints
+ self.assertIn('S created_by U',
+ [cstr.expression for cstr in oldconstraints])
+ mh.cmd_sync_schema_props_perms('evaluee', commit=True)
+ newconstraints = rdef.constraints
+ self.assertNotIn('S created_by U',
+ [cstr.expression for cstr in newconstraints])
- # Drop all RQLConstraint.
- rdef = self.schema['travaille'].rdefs[('Personne', 'Societe')]
- oldconstraints = rdef.constraints
- self.assertEqual(len(oldconstraints), 2)
- self.mh.cmd_sync_schema_props_perms('travaille', commit=True)
- rdef = self.schema['travaille'].rdefs[('Personne', 'Societe')]
- newconstraints = rdef.constraints
- self.assertEqual(len(newconstraints), 0)
+ # Drop all RQLConstraint.
+ rdef = self.schema['travaille'].rdefs[('Personne', 'Societe')]
+ oldconstraints = rdef.constraints
+ self.assertEqual(len(oldconstraints), 2)
+ mh.cmd_sync_schema_props_perms('travaille', commit=True)
+ rdef = self.schema['travaille'].rdefs[('Personne', 'Societe')]
+ newconstraints = rdef.constraints
+ self.assertEqual(len(newconstraints), 0)
@tag('longrun')
def test_sync_schema_props_perms(self):
- cursor = self.mh.session
- cursor.set_cnxset()
- nbrqlexpr_start = cursor.execute('Any COUNT(X) WHERE X is RQLExpression')[0][0]
- migrschema['titre'].rdefs[('Personne', 'String')].order = 7
- migrschema['adel'].rdefs[('Personne', 'String')].order = 6
- migrschema['ass'].rdefs[('Personne', 'String')].order = 5
- migrschema['Personne'].description = 'blabla bla'
- migrschema['titre'].description = 'usually a title'
- migrschema['titre'].rdefs[('Personne', 'String')].description = 'title for this person'
- delete_concerne_rqlexpr = self._rrqlexpr_rset('delete', 'concerne')
- add_concerne_rqlexpr = self._rrqlexpr_rset('add', 'concerne')
+ with self.mh() as (cnx, mh):
+ nbrqlexpr_start = cnx.execute('Any COUNT(X) WHERE X is RQLExpression')[0][0]
+ migrschema['titre'].rdefs[('Personne', 'String')].order = 7
+ migrschema['adel'].rdefs[('Personne', 'String')].order = 6
+ migrschema['ass'].rdefs[('Personne', 'String')].order = 5
+ migrschema['Personne'].description = 'blabla bla'
+ migrschema['titre'].description = 'usually a title'
+ migrschema['titre'].rdefs[('Personne', 'String')].description = 'title for this person'
+ delete_concerne_rqlexpr = self._rrqlexpr_rset(cnx, 'delete', 'concerne')
+ add_concerne_rqlexpr = self._rrqlexpr_rset(cnx, 'add', 'concerne')
- self.mh.cmd_sync_schema_props_perms(commit=False)
+ mh.cmd_sync_schema_props_perms(commit=False)
- self.assertEqual(cursor.execute('Any D WHERE X name "Personne", X description D')[0][0],
- 'blabla bla')
- self.assertEqual(cursor.execute('Any D WHERE X name "titre", X description D')[0][0],
- 'usually a title')
- self.assertEqual(cursor.execute('Any D WHERE X relation_type RT, RT name "titre",'
+ self.assertEqual(cnx.execute('Any D WHERE X name "Personne", X description D')[0][0],
+ 'blabla bla')
+ self.assertEqual(cnx.execute('Any D WHERE X name "titre", X description D')[0][0],
+ 'usually a title')
+ self.assertEqual(cnx.execute('Any D WHERE X relation_type RT, RT name "titre",'
'X from_entity FE, FE name "Personne",'
'X description D')[0][0],
- 'title for this person')
- rinorder = [n for n, in cursor.execute(
- 'Any N ORDERBY O,N WHERE X is CWAttribute, X relation_type RT, RT name N,'
- 'X from_entity FE, FE name "Personne",'
- 'X ordernum O')]
- expected = [u'nom', u'prenom', u'sexe', u'promo', u'ass', u'adel', u'titre',
- u'web', u'tel', u'fax', u'datenaiss', u'test', u'tzdatenaiss',
- u'description', u'firstname',
- u'creation_date', u'cwuri', u'modification_date']
- self.assertEqual(expected, rinorder)
+ 'title for this person')
+ rinorder = [n for n, in cnx.execute(
+ 'Any N ORDERBY O,N WHERE X is CWAttribute, X relation_type RT, RT name N,'
+ 'X from_entity FE, FE name "Personne",'
+ 'X ordernum O')]
+ expected = [u'nom', u'prenom', u'sexe', u'promo', u'ass', u'adel', u'titre',
+ u'web', u'tel', u'fax', u'datenaiss', u'test', u'tzdatenaiss',
+ u'description', u'firstname',
+ u'creation_date', u'cwuri', u'modification_date']
+ self.assertEqual(expected, rinorder)
- # test permissions synchronization ####################################
- # new rql expr to add note entity
- eexpr = self._erqlexpr_entity('add', 'Note')
- self.assertEqual(eexpr.expression,
- 'X ecrit_part PE, U in_group G, '
- 'PE require_permission P, P name "add_note", P require_group G')
- self.assertEqual([et.name for et in eexpr.reverse_add_permission], ['Note'])
- self.assertEqual(eexpr.reverse_read_permission, ())
- self.assertEqual(eexpr.reverse_delete_permission, ())
- self.assertEqual(eexpr.reverse_update_permission, ())
- self.assertTrue(self._rrqlexpr_rset('add', 'para'))
- # no rqlexpr to delete para attribute
- self.assertFalse(self._rrqlexpr_rset('delete', 'para'))
- # new rql expr to add ecrit_par relation
- rexpr = self._rrqlexpr_entity('add', 'ecrit_par')
- self.assertEqual(rexpr.expression,
- 'O require_permission P, P name "add_note", '
- 'U in_group G, P require_group G')
- self.assertEqual([rdef.rtype.name for rdef in rexpr.reverse_add_permission], ['ecrit_par'])
- self.assertEqual(rexpr.reverse_read_permission, ())
- self.assertEqual(rexpr.reverse_delete_permission, ())
- # no more rqlexpr to delete and add travaille relation
- self.assertFalse(self._rrqlexpr_rset('add', 'travaille'))
- self.assertFalse(self._rrqlexpr_rset('delete', 'travaille'))
- # no more rqlexpr to delete and update Societe entity
- self.assertFalse(self._erqlexpr_rset('update', 'Societe'))
- self.assertFalse(self._erqlexpr_rset('delete', 'Societe'))
- # no more rqlexpr to read Affaire entity
- self.assertFalse(self._erqlexpr_rset('read', 'Affaire'))
- # rqlexpr to update Affaire entity has been updated
- eexpr = self._erqlexpr_entity('update', 'Affaire')
- self.assertEqual(eexpr.expression, 'X concerne S, S owned_by U')
- # no change for rqlexpr to add and delete Affaire entity
- self.assertEqual(len(self._erqlexpr_rset('delete', 'Affaire')), 1)
- self.assertEqual(len(self._erqlexpr_rset('add', 'Affaire')), 1)
- # no change for rqlexpr to add and delete concerne relation
- self.assertEqual(len(self._rrqlexpr_rset('delete', 'concerne')), len(delete_concerne_rqlexpr))
- self.assertEqual(len(self._rrqlexpr_rset('add', 'concerne')), len(add_concerne_rqlexpr))
- # * migrschema involve:
- # * 7 erqlexprs deletions (2 in (Affaire + Societe + Note.para) + 1 Note.something
- # * 2 rrqlexprs deletions (travaille)
- # * 1 update (Affaire update)
- # * 2 new (Note add, ecrit_par add)
- # * 2 implicit new for attributes (Note.para, Person.test)
- # remaining orphan rql expr which should be deleted at commit (composite relation)
- # unattached expressions -> pending deletion on commit
- self.assertEqual(cursor.execute('Any COUNT(X) WHERE X is RQLExpression, X exprtype "ERQLExpression",'
- 'NOT ET1 read_permission X, NOT ET2 add_permission X, '
- 'NOT ET3 delete_permission X, NOT ET4 update_permission X')[0][0],
- 7)
- self.assertEqual(cursor.execute('Any COUNT(X) WHERE X is RQLExpression, X exprtype "RRQLExpression",'
- 'NOT ET1 read_permission X, NOT ET2 add_permission X, '
- 'NOT ET3 delete_permission X, NOT ET4 update_permission X')[0][0],
- 2)
- # finally
- self.assertEqual(cursor.execute('Any COUNT(X) WHERE X is RQLExpression')[0][0],
- nbrqlexpr_start + 1 + 2 + 2 + 2)
- self.mh.commit()
- # unique_together test
- self.assertEqual(len(self.schema.eschema('Personne')._unique_together), 1)
- self.assertCountEqual(self.schema.eschema('Personne')._unique_together[0],
- ('nom', 'prenom', 'datenaiss'))
- rset = cursor.execute('Any C WHERE C is CWUniqueTogetherConstraint, C constraint_of ET, ET name "Personne"')
- self.assertEqual(len(rset), 1)
- relations = [r.name for r in rset.get_entity(0, 0).relations]
- self.assertCountEqual(relations, ('nom', 'prenom', 'datenaiss'))
+ # test permissions synchronization ####################################
+ # new rql expr to add note entity
+ eexpr = self._erqlexpr_entity(cnx, 'add', 'Note')
+ self.assertEqual(eexpr.expression,
+ 'X ecrit_part PE, U in_group G, '
+ 'PE require_permission P, P name "add_note", P require_group G')
+ self.assertEqual([et.name for et in eexpr.reverse_add_permission], ['Note'])
+ self.assertEqual(eexpr.reverse_read_permission, ())
+ self.assertEqual(eexpr.reverse_delete_permission, ())
+ self.assertEqual(eexpr.reverse_update_permission, ())
+ self.assertTrue(self._rrqlexpr_rset(cnx, 'add', 'para'))
+ # no rqlexpr to delete para attribute
+ self.assertFalse(self._rrqlexpr_rset(cnx, 'delete', 'para'))
+ # new rql expr to add ecrit_par relation
+ rexpr = self._rrqlexpr_entity(cnx, 'add', 'ecrit_par')
+ self.assertEqual(rexpr.expression,
+ 'O require_permission P, P name "add_note", '
+ 'U in_group G, P require_group G')
+ self.assertEqual([rdef.rtype.name for rdef in rexpr.reverse_add_permission], ['ecrit_par'])
+ self.assertEqual(rexpr.reverse_read_permission, ())
+ self.assertEqual(rexpr.reverse_delete_permission, ())
+ # no more rqlexpr to delete and add travaille relation
+ self.assertFalse(self._rrqlexpr_rset(cnx, 'add', 'travaille'))
+ self.assertFalse(self._rrqlexpr_rset(cnx, 'delete', 'travaille'))
+ # no more rqlexpr to delete and update Societe entity
+ self.assertFalse(self._erqlexpr_rset(cnx, 'update', 'Societe'))
+ self.assertFalse(self._erqlexpr_rset(cnx, 'delete', 'Societe'))
+ # no more rqlexpr to read Affaire entity
+ self.assertFalse(self._erqlexpr_rset(cnx, 'read', 'Affaire'))
+ # rqlexpr to update Affaire entity has been updated
+ eexpr = self._erqlexpr_entity(cnx, 'update', 'Affaire')
+ self.assertEqual(eexpr.expression, 'X concerne S, S owned_by U')
+ # no change for rqlexpr to add and delete Affaire entity
+ self.assertEqual(len(self._erqlexpr_rset(cnx, 'delete', 'Affaire')), 1)
+ self.assertEqual(len(self._erqlexpr_rset(cnx, 'add', 'Affaire')), 1)
+ # no change for rqlexpr to add and delete concerne relation
+ self.assertEqual(len(self._rrqlexpr_rset(cnx, 'delete', 'concerne')),
+ len(delete_concerne_rqlexpr))
+ self.assertEqual(len(self._rrqlexpr_rset(cnx, 'add', 'concerne')),
+ len(add_concerne_rqlexpr))
+ # * migrschema involve:
+ # * 7 erqlexprs deletions (2 in (Affaire + Societe + Note.para) + 1 Note.something
+ # * 2 rrqlexprs deletions (travaille)
+ # * 1 update (Affaire update)
+ # * 2 new (Note add, ecrit_par add)
+ # * 2 implicit new for attributes (Note.para, Person.test)
+ # remaining orphan rql expr which should be deleted at commit (composite relation)
+ # unattached expressions -> pending deletion on commit
+ self.assertEqual(cnx.execute('Any COUNT(X) WHERE X is RQLExpression, X exprtype "ERQLExpression",'
+ 'NOT ET1 read_permission X, NOT ET2 add_permission X, '
+ 'NOT ET3 delete_permission X, NOT ET4 update_permission X')[0][0],
+ 7)
+ self.assertEqual(cnx.execute('Any COUNT(X) WHERE X is RQLExpression, X exprtype "RRQLExpression",'
+ 'NOT ET1 read_permission X, NOT ET2 add_permission X, '
+ 'NOT ET3 delete_permission X, NOT ET4 update_permission X')[0][0],
+ 2)
+ # finally
+ self.assertEqual(cnx.execute('Any COUNT(X) WHERE X is RQLExpression')[0][0],
+ nbrqlexpr_start + 1 + 2 + 2 + 2)
+ cnx.commit()
+ # unique_together test
+ self.assertEqual(len(self.schema.eschema('Personne')._unique_together), 1)
+ self.assertCountEqual(self.schema.eschema('Personne')._unique_together[0],
+ ('nom', 'prenom', 'datenaiss'))
+ rset = cnx.execute('Any C WHERE C is CWUniqueTogetherConstraint, C constraint_of ET, ET name "Personne"')
+ self.assertEqual(len(rset), 1)
+ relations = [r.name for r in rset.get_entity(0, 0).relations]
+ self.assertCountEqual(relations, ('nom', 'prenom', 'datenaiss'))
- def _erqlexpr_rset(self, action, ertype):
+ def _erqlexpr_rset(self, cnx, action, ertype):
rql = 'RQLExpression X WHERE ET is CWEType, ET %s_permission X, ET name %%(name)s' % action
- return self.mh.session.execute(rql, {'name': ertype})
- def _erqlexpr_entity(self, action, ertype):
- rset = self._erqlexpr_rset(action, ertype)
+ return cnx.execute(rql, {'name': ertype})
+
+ def _erqlexpr_entity(self, cnx, action, ertype):
+ rset = self._erqlexpr_rset(cnx, action, ertype)
self.assertEqual(len(rset), 1)
return rset.get_entity(0, 0)
- def _rrqlexpr_rset(self, action, ertype):
+
+ def _rrqlexpr_rset(self, cnx, action, ertype):
rql = 'RQLExpression X WHERE RT is CWRType, RDEF %s_permission X, RT name %%(name)s, RDEF relation_type RT' % action
- return self.mh.session.execute(rql, {'name': ertype})
- def _rrqlexpr_entity(self, action, ertype):
- rset = self._rrqlexpr_rset(action, ertype)
+ return cnx.execute(rql, {'name': ertype})
+
+ def _rrqlexpr_entity(self, cnx, action, ertype):
+ rset = self._rrqlexpr_rset(cnx, action, ertype)
self.assertEqual(len(rset), 1)
return rset.get_entity(0, 0)
def test_set_size_constraint(self):
- # existing previous value
- try:
- self.mh.cmd_set_size_constraint('CWEType', 'name', 128)
- finally:
- self.mh.cmd_set_size_constraint('CWEType', 'name', 64)
- # non existing previous value
- try:
- self.mh.cmd_set_size_constraint('CWEType', 'description', 256)
- finally:
- self.mh.cmd_set_size_constraint('CWEType', 'description', None)
+ with self.mh() as (cnx, mh):
+ # existing previous value
+ try:
+ mh.cmd_set_size_constraint('CWEType', 'name', 128)
+ finally:
+ mh.cmd_set_size_constraint('CWEType', 'name', 64)
+ # non existing previous value
+ try:
+ mh.cmd_set_size_constraint('CWEType', 'description', 256)
+ finally:
+ mh.cmd_set_size_constraint('CWEType', 'description', None)
@tag('longrun')
def test_add_remove_cube_and_deps(self):
- cubes = set(self.config.cubes())
- schema = self.repo.schema
- self.assertEqual(sorted((str(s), str(o)) for s, o in schema['see_also'].rdefs.iterkeys()),
- sorted([('EmailThread', 'EmailThread'), ('Folder', 'Folder'),
- ('Bookmark', 'Bookmark'), ('Bookmark', 'Note'),
- ('Note', 'Note'), ('Note', 'Bookmark')]))
- try:
+ with self.mh() as (cnx, mh):
+ schema = self.repo.schema
+ self.assertEqual(sorted((str(s), str(o)) for s, o in schema['see_also'].rdefs.iterkeys()),
+ sorted([('EmailThread', 'EmailThread'), ('Folder', 'Folder'),
+ ('Bookmark', 'Bookmark'), ('Bookmark', 'Note'),
+ ('Note', 'Note'), ('Note', 'Bookmark')]))
try:
- self.mh.cmd_remove_cube('email', removedeps=True)
+ mh.cmd_remove_cube('email', removedeps=True)
# file was there because it's an email dependancy, should have been removed
self.assertNotIn('email', self.config.cubes())
self.assertNotIn(self.config.cube_dir('email'), self.config.cubes_path())
@@ -538,121 +556,116 @@
('Note', 'Bookmark')]))
self.assertEqual(sorted(schema['see_also'].subjects()), ['Bookmark', 'Folder', 'Note'])
self.assertEqual(sorted(schema['see_also'].objects()), ['Bookmark', 'Folder', 'Note'])
- self.assertEqual(self.session.execute('Any X WHERE X pkey "system.version.email"').rowcount, 0)
- self.assertEqual(self.session.execute('Any X WHERE X pkey "system.version.file"').rowcount, 0)
- except :
- import traceback
- traceback.print_exc()
- raise
- finally:
- self.mh.cmd_add_cube('email')
- self.assertIn('email', self.config.cubes())
- self.assertIn(self.config.cube_dir('email'), self.config.cubes_path())
- self.assertIn('file', self.config.cubes())
- self.assertIn(self.config.cube_dir('file'), self.config.cubes_path())
- for ertype in ('Email', 'EmailThread', 'EmailPart', 'File',
- 'sender', 'in_thread', 'reply_to', 'data_format'):
- self.assertTrue(ertype in schema, ertype)
- self.assertEqual(sorted(schema['see_also'].rdefs.iterkeys()),
- sorted([('EmailThread', 'EmailThread'), ('Folder', 'Folder'),
- ('Bookmark', 'Bookmark'),
- ('Bookmark', 'Note'),
- ('Note', 'Note'),
- ('Note', 'Bookmark')]))
- self.assertEqual(sorted(schema['see_also'].subjects()), ['Bookmark', 'EmailThread', 'Folder', 'Note'])
- self.assertEqual(sorted(schema['see_also'].objects()), ['Bookmark', 'EmailThread', 'Folder', 'Note'])
- from cubes.email.__pkginfo__ import version as email_version
- from cubes.file.__pkginfo__ import version as file_version
- self.assertEqual(self.session.execute('Any V WHERE X value V, X pkey "system.version.email"')[0][0],
- email_version)
- self.assertEqual(self.session.execute('Any V WHERE X value V, X pkey "system.version.file"')[0][0],
- file_version)
- # trick: overwrite self.maxeid to avoid deletion of just reintroduced
- # types (and their associated tables!)
- self.maxeid = self.session.execute('Any MAX(X)')[0][0]
- # why this commit is necessary is unclear to me (though without it
- # next test may fail complaining of missing tables
- self.session.commit(free_cnxset=False)
+ self.assertEqual(cnx.execute('Any X WHERE X pkey "system.version.email"').rowcount, 0)
+ self.assertEqual(cnx.execute('Any X WHERE X pkey "system.version.file"').rowcount, 0)
+ finally:
+ mh.cmd_add_cube('email')
+ self.assertIn('email', self.config.cubes())
+ self.assertIn(self.config.cube_dir('email'), self.config.cubes_path())
+ self.assertIn('file', self.config.cubes())
+ self.assertIn(self.config.cube_dir('file'), self.config.cubes_path())
+ for ertype in ('Email', 'EmailThread', 'EmailPart', 'File',
+ 'sender', 'in_thread', 'reply_to', 'data_format'):
+ self.assertTrue(ertype in schema, ertype)
+ self.assertEqual(sorted(schema['see_also'].rdefs.iterkeys()),
+ sorted([('EmailThread', 'EmailThread'), ('Folder', 'Folder'),
+ ('Bookmark', 'Bookmark'),
+ ('Bookmark', 'Note'),
+ ('Note', 'Note'),
+ ('Note', 'Bookmark')]))
+ self.assertEqual(sorted(schema['see_also'].subjects()), ['Bookmark', 'EmailThread', 'Folder', 'Note'])
+ self.assertEqual(sorted(schema['see_also'].objects()), ['Bookmark', 'EmailThread', 'Folder', 'Note'])
+ from cubes.email.__pkginfo__ import version as email_version
+ from cubes.file.__pkginfo__ import version as file_version
+ self.assertEqual(cnx.execute('Any V WHERE X value V, X pkey "system.version.email"')[0][0],
+ email_version)
+ self.assertEqual(cnx.execute('Any V WHERE X value V, X pkey "system.version.file"')[0][0],
+ file_version)
+ # trick: overwrite self.maxeid to avoid deletion of just reintroduced
+ # types (and their associated tables!)
+ self.maxeid = cnx.execute('Any MAX(X)')[0][0]
+ # why this commit is necessary is unclear to me (though without it
+ # next test may fail complaining of missing tables
+ cnx.commit()
@tag('longrun')
def test_add_remove_cube_no_deps(self):
- cubes = set(self.config.cubes())
- schema = self.repo.schema
- try:
+ with self.mh() as (cnx, mh):
+ cubes = set(self.config.cubes())
+ schema = self.repo.schema
try:
- self.mh.cmd_remove_cube('email')
+ mh.cmd_remove_cube('email')
cubes.remove('email')
self.assertNotIn('email', self.config.cubes())
self.assertIn('file', self.config.cubes())
for ertype in ('Email', 'EmailThread', 'EmailPart',
'sender', 'in_thread', 'reply_to'):
self.assertFalse(ertype in schema, ertype)
- except :
- import traceback
- traceback.print_exc()
- raise
- finally:
- self.mh.cmd_add_cube('email')
- self.assertIn('email', self.config.cubes())
- # trick: overwrite self.maxeid to avoid deletion of just reintroduced
- # types (and their associated tables!)
- self.maxeid = self.session.execute('Any MAX(X)')[0][0]
- # why this commit is necessary is unclear to me (though without it
- # next test may fail complaining of missing tables
- self.session.commit(free_cnxset=False)
+ finally:
+ mh.cmd_add_cube('email')
+ self.assertIn('email', self.config.cubes())
+ # trick: overwrite self.maxeid to avoid deletion of just reintroduced
+ # types (and their associated tables!)
+ self.maxeid = cnx.execute('Any MAX(X)')[0][0] # XXXXXXX KILL KENNY
+ # why this commit is necessary is unclear to me (though without it
+ # next test may fail complaining of missing tables
+ cnx.commit()
def test_remove_dep_cube(self):
- with self.assertRaises(ConfigurationError) as cm:
- self.mh.cmd_remove_cube('file')
- self.assertEqual(str(cm.exception), "can't remove cube file, used as a dependency")
+ with self.mh() as (cnx, mh):
+ with self.assertRaises(ConfigurationError) as cm:
+ mh.cmd_remove_cube('file')
+ self.assertEqual(str(cm.exception), "can't remove cube file, used as a dependency")
@tag('longrun')
def test_introduce_base_class(self):
- self.mh.cmd_add_entity_type('Para')
- self.assertEqual(sorted(et.type for et in self.schema['Para'].specialized_by()),
- ['Note'])
- self.assertEqual(self.schema['Note'].specializes().type, 'Para')
- self.mh.cmd_add_entity_type('Text')
- self.assertEqual(sorted(et.type for et in self.schema['Para'].specialized_by()),
- ['Note', 'Text'])
- self.assertEqual(self.schema['Text'].specializes().type, 'Para')
- # test columns have been actually added
- text = self.session.execute('INSERT Text X: X para "hip", X summary "hop", X newattr "momo"').get_entity(0, 0)
- note = self.session.execute('INSERT Note X: X para "hip", X shortpara "hop", X newattr "momo", X unique_id "x"').get_entity(0, 0)
- aff = self.session.execute('INSERT Affaire X').get_entity(0, 0)
- self.assertTrue(self.session.execute('SET X newnotinlined Y WHERE X eid %(x)s, Y eid %(y)s',
- {'x': text.eid, 'y': aff.eid}))
- self.assertTrue(self.session.execute('SET X newnotinlined Y WHERE X eid %(x)s, Y eid %(y)s',
- {'x': note.eid, 'y': aff.eid}))
- self.assertTrue(self.session.execute('SET X newinlined Y WHERE X eid %(x)s, Y eid %(y)s',
- {'x': text.eid, 'y': aff.eid}))
- self.assertTrue(self.session.execute('SET X newinlined Y WHERE X eid %(x)s, Y eid %(y)s',
- {'x': note.eid, 'y': aff.eid}))
- # XXX remove specializes by ourselves, else tearDown fails when removing
- # Para because of Note inheritance. This could be fixed by putting the
- # MemSchemaCWETypeDel(session, name) operation in the
- # after_delete_entity(CWEType) hook, since in that case the MemSchemaSpecializesDel
- # operation would be removed before, but I'm not sure this is a desired behaviour.
- #
- # also we need more tests about introducing/removing base classes or
- # specialization relationship...
- self.session.execute('DELETE X specializes Y WHERE Y name "Para"')
- self.session.commit(free_cnxset=False)
- self.assertEqual(sorted(et.type for et in self.schema['Para'].specialized_by()),
- [])
- self.assertEqual(self.schema['Note'].specializes(), None)
- self.assertEqual(self.schema['Text'].specializes(), None)
+ with self.mh() as (cnx, mh):
+ mh.cmd_add_entity_type('Para')
+ self.assertEqual(sorted(et.type for et in self.schema['Para'].specialized_by()),
+ ['Note'])
+ self.assertEqual(self.schema['Note'].specializes().type, 'Para')
+ mh.cmd_add_entity_type('Text')
+ self.assertEqual(sorted(et.type for et in self.schema['Para'].specialized_by()),
+ ['Note', 'Text'])
+ self.assertEqual(self.schema['Text'].specializes().type, 'Para')
+ # test columns have been actually added
+ text = cnx.execute('INSERT Text X: X para "hip", X summary "hop", X newattr "momo"').get_entity(0, 0)
+ note = cnx.execute('INSERT Note X: X para "hip", X shortpara "hop", X newattr "momo", X unique_id "x"').get_entity(0, 0)
+ aff = cnx.execute('INSERT Affaire X').get_entity(0, 0)
+ self.assertTrue(cnx.execute('SET X newnotinlined Y WHERE X eid %(x)s, Y eid %(y)s',
+ {'x': text.eid, 'y': aff.eid}))
+ self.assertTrue(cnx.execute('SET X newnotinlined Y WHERE X eid %(x)s, Y eid %(y)s',
+ {'x': note.eid, 'y': aff.eid}))
+ self.assertTrue(cnx.execute('SET X newinlined Y WHERE X eid %(x)s, Y eid %(y)s',
+ {'x': text.eid, 'y': aff.eid}))
+ self.assertTrue(cnx.execute('SET X newinlined Y WHERE X eid %(x)s, Y eid %(y)s',
+ {'x': note.eid, 'y': aff.eid}))
+ # XXX remove specializes by ourselves, else tearDown fails when removing
+ # Para because of Note inheritance. This could be fixed by putting the
+ # MemSchemaCWETypeDel(session, name) operation in the
+ # after_delete_entity(CWEType) hook, since in that case the MemSchemaSpecializesDel
+ # operation would be removed before, but I'm not sure this is a desired behaviour.
+ #
+ # also we need more tests about introducing/removing base classes or
+ # specialization relationship...
+ cnx.execute('DELETE X specializes Y WHERE Y name "Para"')
+ cnx.commit()
+ self.assertEqual(sorted(et.type for et in self.schema['Para'].specialized_by()),
+ [])
+ self.assertEqual(self.schema['Note'].specializes(), None)
+ self.assertEqual(self.schema['Text'].specializes(), None)
def test_add_symmetric_relation_type(self):
- same_as_sql = self.mh.sqlexec("SELECT sql FROM sqlite_master WHERE type='table' "
- "and name='same_as_relation'")
- self.assertFalse(same_as_sql)
- self.mh.cmd_add_relation_type('same_as')
- same_as_sql = self.mh.sqlexec("SELECT sql FROM sqlite_master WHERE type='table' "
- "and name='same_as_relation'")
- self.assertTrue(same_as_sql)
+ with self.mh() as (cnx, mh):
+ same_as_sql = mh.sqlexec("SELECT sql FROM sqlite_master WHERE type='table' "
+ "and name='same_as_relation'")
+ self.assertFalse(same_as_sql)
+ mh.cmd_add_relation_type('same_as')
+ same_as_sql = mh.sqlexec("SELECT sql FROM sqlite_master WHERE type='table' "
+ "and name='same_as_relation'")
+ self.assertTrue(same_as_sql)
if __name__ == '__main__':
unittest_main()
--- a/server/test/unittest_postgres.py Fri May 23 18:35:13 2014 +0200
+++ b/server/test/unittest_postgres.py Fri Jun 27 11:48:26 2014 +0200
@@ -52,16 +52,17 @@
self.assertEqual(set(), set(range1) & set(range2))
def test_occurence_count(self):
- req = self.request()
- c1 = req.create_entity('Card', title=u'c1',
- content=u'cubicweb cubicweb cubicweb')
- c2 = req.create_entity('Card', title=u'c3',
- content=u'cubicweb')
- c3 = req.create_entity('Card', title=u'c2',
- content=u'cubicweb cubicweb')
- self.commit()
- self.assertEqual(req.execute('Card X ORDERBY FTIRANK(X) DESC WHERE X has_text "cubicweb"').rows,
- [[c1.eid,], [c3.eid,], [c2.eid,]])
+ with self.admin_access.repo_cnx() as cnx:
+ c1 = cnx.create_entity('Card', title=u'c1',
+ content=u'cubicweb cubicweb cubicweb')
+ c2 = cnx.create_entity('Card', title=u'c3',
+ content=u'cubicweb')
+ c3 = cnx.create_entity('Card', title=u'c2',
+ content=u'cubicweb cubicweb')
+ cnx.commit()
+ self.assertEqual(cnx.execute('Card X ORDERBY FTIRANK(X) DESC '
+ 'WHERE X has_text "cubicweb"').rows,
+ [[c1.eid,], [c3.eid,], [c2.eid,]])
def test_attr_weight(self):
@@ -69,43 +70,48 @@
__select__ = is_instance('Card')
attr_weight = {'title': 'A'}
with self.temporary_appobjects(CardIFTIndexableAdapter):
- req = self.request()
- c1 = req.create_entity('Card', title=u'c1',
- content=u'cubicweb cubicweb cubicweb')
- c2 = req.create_entity('Card', title=u'c2',
- content=u'cubicweb cubicweb')
- c3 = req.create_entity('Card', title=u'cubicweb',
- content=u'autre chose')
- self.commit()
- self.assertEqual(req.execute('Card X ORDERBY FTIRANK(X) DESC WHERE X has_text "cubicweb"').rows,
- [[c3.eid,], [c1.eid,], [c2.eid,]])
+ with self.admin_access.repo_cnx() as cnx:
+ c1 = cnx.create_entity('Card', title=u'c1',
+ content=u'cubicweb cubicweb cubicweb')
+ c2 = cnx.create_entity('Card', title=u'c2',
+ content=u'cubicweb cubicweb')
+ c3 = cnx.create_entity('Card', title=u'cubicweb',
+ content=u'autre chose')
+ cnx.commit()
+ self.assertEqual(cnx.execute('Card X ORDERBY FTIRANK(X) DESC '
+ 'WHERE X has_text "cubicweb"').rows,
+ [[c3.eid,], [c1.eid,], [c2.eid,]])
def test_entity_weight(self):
class PersonneIFTIndexableAdapter(IFTIndexableAdapter):
__select__ = is_instance('Personne')
entity_weight = 2.0
with self.temporary_appobjects(PersonneIFTIndexableAdapter):
- req = self.request()
- c1 = req.create_entity('Personne', nom=u'c1', prenom=u'cubicweb')
- c2 = req.create_entity('Comment', content=u'cubicweb cubicweb', comments=c1)
- c3 = req.create_entity('Comment', content=u'cubicweb cubicweb cubicweb', comments=c1)
- self.commit()
- self.assertEqual(req.execute('Any X ORDERBY FTIRANK(X) DESC WHERE X has_text "cubicweb"').rows,
- [[c1.eid,], [c3.eid,], [c2.eid,]])
+ with self.admin_access.repo_cnx() as cnx:
+ c1 = cnx.create_entity('Personne', nom=u'c1', prenom=u'cubicweb')
+ c2 = cnx.create_entity('Comment', content=u'cubicweb cubicweb',
+ comments=c1)
+ c3 = cnx.create_entity('Comment', content=u'cubicweb cubicweb cubicweb',
+ comments=c1)
+ cnx.commit()
+ self.assertEqual(cnx.execute('Any X ORDERBY FTIRANK(X) DESC '
+ 'WHERE X has_text "cubicweb"').rows,
+ [[c1.eid,], [c3.eid,], [c2.eid,]])
def test_tz_datetime(self):
- self.execute("INSERT Personne X: X nom 'bob', X tzdatenaiss %(date)s",
- {'date': datetime(1977, 6, 7, 2, 0, tzinfo=FixedOffset(1))})
- datenaiss = self.execute("Any XD WHERE X nom 'bob', X tzdatenaiss XD")[0][0]
- self.assertEqual(datenaiss.tzinfo, None)
- self.assertEqual(datenaiss.utctimetuple()[:5], (1977, 6, 7, 1, 0))
- self.commit()
- self.execute("INSERT Personne X: X nom 'boby', X tzdatenaiss %(date)s",
- {'date': datetime(1977, 6, 7, 2, 0)})
- datenaiss = self.execute("Any XD WHERE X nom 'boby', X tzdatenaiss XD")[0][0]
- self.assertEqual(datenaiss.tzinfo, None)
- self.assertEqual(datenaiss.utctimetuple()[:5], (1977, 6, 7, 2, 0))
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.execute("INSERT Personne X: X nom 'bob', X tzdatenaiss %(date)s",
+ {'date': datetime(1977, 6, 7, 2, 0, tzinfo=FixedOffset(1))})
+ datenaiss = cnx.execute("Any XD WHERE X nom 'bob', X tzdatenaiss XD")[0][0]
+ self.assertEqual(datenaiss.tzinfo, None)
+ self.assertEqual(datenaiss.utctimetuple()[:5], (1977, 6, 7, 1, 0))
+ cnx.commit()
+ cnx.execute("INSERT Personne X: X nom 'boby', X tzdatenaiss %(date)s",
+ {'date': datetime(1977, 6, 7, 2, 0)})
+ datenaiss = cnx.execute("Any XD WHERE X nom 'boby', X tzdatenaiss XD")[0][0]
+ self.assertEqual(datenaiss.tzinfo, None)
+ self.assertEqual(datenaiss.utctimetuple()[:5], (1977, 6, 7, 2, 0))
if __name__ == '__main__':
from logilab.common.testlib import unittest_main
--- a/server/test/unittest_repository.py Fri May 23 18:35:13 2014 +0200
+++ b/server/test/unittest_repository.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,5 +1,5 @@
# -*- coding: iso-8859-1 -*-
-# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -18,25 +18,19 @@
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""unit tests for module cubicweb.server.repository"""
-import os
-import sys
import threading
import time
import logging
-from copy import deepcopy
-from datetime import datetime
-
-from logilab.common.testlib import TestCase, unittest_main
from yams.constraints import UniqueConstraint
from yams import register_base_type, unregister_base_type
from logilab.database import get_db_helper
-from cubicweb import (BadConnectionId, RepositoryError, ValidationError,
+from cubicweb import (BadConnectionId, ValidationError,
UnknownEid, AuthenticationError, Unauthorized, QueryError)
from cubicweb.predicates import is_instance
-from cubicweb.schema import CubicWebSchema, RQLConstraint
+from cubicweb.schema import RQLConstraint
from cubicweb.dbapi import connect, multiple_connections_unfix
from cubicweb.devtools.testlib import CubicWebTC
from cubicweb.devtools.repotest import tuplify
@@ -53,15 +47,16 @@
"""
def test_unique_together_constraint(self):
- self.execute('INSERT Societe S: S nom "Logilab", S type "SSLL", S cp "75013"')
- with self.assertRaises(ValidationError) as wraperr:
- self.execute('INSERT Societe S: S nom "Logilab", S type "SSLL", S cp "75013"')
- self.assertEqual(
- {'cp': u'cp is part of violated unicity constraint',
- 'nom': u'nom is part of violated unicity constraint',
- 'type': u'type is part of violated unicity constraint',
- 'unicity constraint': u'some relations violate a unicity constraint'},
- wraperr.exception.args[1])
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.execute('INSERT Societe S: S nom "Logilab", S type "SSLL", S cp "75013"')
+ with self.assertRaises(ValidationError) as wraperr:
+ cnx.execute('INSERT Societe S: S nom "Logilab", S type "SSLL", S cp "75013"')
+ self.assertEqual(
+ {'cp': u'cp is part of violated unicity constraint',
+ 'nom': u'nom is part of violated unicity constraint',
+ 'type': u'type is part of violated unicity constraint',
+ 'unicity constraint': u'some relations violate a unicity constraint'},
+ wraperr.exception.args[1])
def test_unique_together_schema(self):
person = self.repo.schema.eschema('Personne')
@@ -70,13 +65,16 @@
('nom', 'prenom', 'inline2'))
def test_all_entities_have_owner(self):
- self.assertFalse(self.execute('Any X WHERE NOT X owned_by U'))
+ with self.admin_access.repo_cnx() as cnx:
+ self.assertFalse(cnx.execute('Any X WHERE NOT X owned_by U'))
def test_all_entities_have_is(self):
- self.assertFalse(self.execute('Any X WHERE NOT X is ET'))
+ with self.admin_access.repo_cnx() as cnx:
+ self.assertFalse(cnx.execute('Any X WHERE NOT X is ET'))
def test_all_entities_have_cw_source(self):
- self.assertFalse(self.execute('Any X WHERE NOT X cw_source S'))
+ with self.admin_access.repo_cnx() as cnx:
+ self.assertFalse(cnx.execute('Any X WHERE NOT X cw_source S'))
def test_connect(self):
cnxid = self.repo.connect(self.admlogin, password=self.admpassword)
@@ -131,15 +129,17 @@
events = ('after_update_entity',)
def __call__(self):
raise ValidationError(self.entity.eid, {})
- with self.temporary_appobjects(ValidationErrorAfterHook):
- self.assertRaises(ValidationError,
- self.execute, 'SET X name "toto" WHERE X is CWGroup, X name "guests"')
- self.assertTrue(self.execute('Any X WHERE X is CWGroup, X name "toto"'))
- with self.assertRaises(QueryError) as cm:
- self.commit()
- self.assertEqual(str(cm.exception), 'transaction must be rolled back')
- self.rollback()
- self.assertFalse(self.execute('Any X WHERE X is CWGroup, X name "toto"'))
+
+ with self.admin_access.repo_cnx() as cnx:
+ with self.temporary_appobjects(ValidationErrorAfterHook):
+ self.assertRaises(ValidationError,
+ cnx.execute, 'SET X name "toto" WHERE X is CWGroup, X name "guests"')
+ self.assertTrue(cnx.execute('Any X WHERE X is CWGroup, X name "toto"'))
+ with self.assertRaises(QueryError) as cm:
+ cnx.commit()
+ self.assertEqual(str(cm.exception), 'transaction must be rolled back')
+ cnx.rollback()
+ self.assertFalse(cnx.execute('Any X WHERE X is CWGroup, X name "toto"'))
def test_rollback_on_execute_unauthorized(self):
class UnauthorizedAfterHook(Hook):
@@ -148,15 +148,17 @@
events = ('after_update_entity',)
def __call__(self):
raise Unauthorized()
- with self.temporary_appobjects(UnauthorizedAfterHook):
- self.assertRaises(Unauthorized,
- self.execute, 'SET X name "toto" WHERE X is CWGroup, X name "guests"')
- self.assertTrue(self.execute('Any X WHERE X is CWGroup, X name "toto"'))
- with self.assertRaises(QueryError) as cm:
- self.commit()
- self.assertEqual(str(cm.exception), 'transaction must be rolled back')
- self.rollback()
- self.assertFalse(self.execute('Any X WHERE X is CWGroup, X name "toto"'))
+
+ with self.admin_access.repo_cnx() as cnx:
+ with self.temporary_appobjects(UnauthorizedAfterHook):
+ self.assertRaises(Unauthorized,
+ cnx.execute, 'SET X name "toto" WHERE X is CWGroup, X name "guests"')
+ self.assertTrue(cnx.execute('Any X WHERE X is CWGroup, X name "toto"'))
+ with self.assertRaises(QueryError) as cm:
+ cnx.commit()
+ self.assertEqual(str(cm.exception), 'transaction must be rolled back')
+ cnx.rollback()
+ self.assertFalse(cnx.execute('Any X WHERE X is CWGroup, X name "toto"'))
def test_close(self):
@@ -234,7 +236,6 @@
cnxid = repo.connect(self.admlogin, password=self.admpassword)
# rollback state change which trigger TrInfo insertion
session = repo._get_session(cnxid)
- session.set_cnxset()
user = session.user
user.cw_adapt_to('IWorkflowable').fire_transition('deactivate')
rset = repo.execute(cnxid, 'TrInfo T WHERE T wf_info_for X, X eid %(x)s', {'x': user.eid})
@@ -244,9 +245,6 @@
self.assertEqual(len(rset), 0)
repo.close(cnxid)
- def test_transaction_interleaved(self):
- self.skipTest('implement me')
-
def test_close_kill_processing_request(self):
repo = self.repo
cnxid = repo.connect(self.admlogin, password=self.admpassword)
@@ -462,8 +460,9 @@
self.assertRaises(BadConnectionId, repo.get_shared_data, cnxid, 'data')
def test_schema_is_relation(self):
- no_is_rset = self.execute('Any X WHERE NOT X is ET')
- self.assertFalse(no_is_rset, no_is_rset.description)
+ with self.admin_access.repo_cnx() as cnx:
+ no_is_rset = cnx.execute('Any X WHERE NOT X is ET')
+ self.assertFalse(no_is_rset, no_is_rset.description)
# def test_perfo(self):
# self.set_debug(True)
@@ -476,28 +475,29 @@
# print 'test time: %.3f (time) %.3f (cpu)' % ((time() - t), clock() - c)
def test_delete_if_singlecard1(self):
- note = self.request().create_entity('Affaire')
- p1 = self.request().create_entity('Personne', nom=u'toto')
- self.execute('SET A todo_by P WHERE A eid %(x)s, P eid %(p)s',
- {'x': note.eid, 'p': p1.eid})
- rset = self.execute('Any P WHERE A todo_by P, A eid %(x)s',
- {'x': note.eid})
- self.assertEqual(len(rset), 1)
- p2 = self.request().create_entity('Personne', nom=u'tutu')
- self.execute('SET A todo_by P WHERE A eid %(x)s, P eid %(p)s',
- {'x': note.eid, 'p': p2.eid})
- rset = self.execute('Any P WHERE A todo_by P, A eid %(x)s',
- {'x': note.eid})
- self.assertEqual(len(rset), 1)
- self.assertEqual(rset.rows[0][0], p2.eid)
+ with self.admin_access.repo_cnx() as cnx:
+ note = cnx.create_entity('Affaire')
+ p1 = cnx.create_entity('Personne', nom=u'toto')
+ cnx.execute('SET A todo_by P WHERE A eid %(x)s, P eid %(p)s',
+ {'x': note.eid, 'p': p1.eid})
+ rset = cnx.execute('Any P WHERE A todo_by P, A eid %(x)s',
+ {'x': note.eid})
+ self.assertEqual(len(rset), 1)
+ p2 = cnx.create_entity('Personne', nom=u'tutu')
+ cnx.execute('SET A todo_by P WHERE A eid %(x)s, P eid %(p)s',
+ {'x': note.eid, 'p': p2.eid})
+ rset = cnx.execute('Any P WHERE A todo_by P, A eid %(x)s',
+ {'x': note.eid})
+ self.assertEqual(len(rset), 1)
+ self.assertEqual(rset.rows[0][0], p2.eid)
def test_delete_if_object_inlined_singlecard(self):
- req = self.request()
- c = req.create_entity('Card', title=u'Carte')
- req.create_entity('Personne', nom=u'Vincent', fiche=c)
- req.create_entity('Personne', nom=u'Florent', fiche=c)
- self.commit()
- self.assertEqual(len(c.reverse_fiche), 1)
+ with self.admin_access.repo_cnx() as cnx:
+ c = cnx.create_entity('Card', title=u'Carte')
+ cnx.create_entity('Personne', nom=u'Vincent', fiche=c)
+ cnx.create_entity('Personne', nom=u'Florent', fiche=c)
+ cnx.commit()
+ self.assertEqual(len(c.reverse_fiche), 1)
def test_cw_set_in_before_update(self):
# local hook
@@ -512,13 +512,14 @@
if self.entity.eid not in pendings:
pendings.add(self.entity.eid)
self.entity.cw_set(alias=u'foo')
- with self.temporary_appobjects(DummyBeforeHook):
- req = self.request()
- addr = req.create_entity('EmailAddress', address=u'a@b.fr')
- addr.cw_set(address=u'a@b.com')
- rset = self.execute('Any A,AA WHERE X eid %(x)s, X address A, X alias AA',
- {'x': addr.eid})
- self.assertEqual(rset.rows, [[u'a@b.com', u'foo']])
+
+ with self.admin_access.repo_cnx() as cnx:
+ with self.temporary_appobjects(DummyBeforeHook):
+ addr = cnx.create_entity('EmailAddress', address=u'a@b.fr')
+ addr.cw_set(address=u'a@b.com')
+ rset = cnx.execute('Any A,AA WHERE X eid %(x)s, X address A, X alias AA',
+ {'x': addr.eid})
+ self.assertEqual(rset.rows, [[u'a@b.com', u'foo']])
def test_cw_set_in_before_add(self):
# local hook
@@ -529,11 +530,12 @@
def __call__(self):
# cw_set is forbidden within before_add_entity()
self.entity.cw_set(alias=u'foo')
- with self.temporary_appobjects(DummyBeforeHook):
- req = self.request()
- # XXX will fail with python -O
- self.assertRaises(AssertionError, req.create_entity,
- 'EmailAddress', address=u'a@b.fr')
+
+ with self.admin_access.repo_cnx() as cnx:
+ with self.temporary_appobjects(DummyBeforeHook):
+ # XXX will fail with python -O
+ self.assertRaises(AssertionError, cnx.create_entity,
+ 'EmailAddress', address=u'a@b.fr')
def test_multiple_edit_cw_set(self):
"""make sure cw_edited doesn't get cluttered
@@ -550,11 +552,12 @@
self._test.assertFalse('invoiced' in self.entity.cw_edited,
'cw_edited cluttered by previous update')
self.entity.cw_edited['invoiced'] = 10
- with self.temporary_appobjects(DummyBeforeHook):
- req = self.request()
- req.create_entity('Affaire', ref=u'AFF01')
- req.create_entity('Affaire', ref=u'AFF02')
- req.execute('SET A duration 10 WHERE A is Affaire')
+
+ with self.admin_access.repo_cnx() as cnx:
+ with self.temporary_appobjects(DummyBeforeHook):
+ cnx.create_entity('Affaire', ref=u'AFF01')
+ cnx.create_entity('Affaire', ref=u'AFF02')
+ cnx.execute('SET A duration 10 WHERE A is Affaire')
def test_user_friendly_error(self):
@@ -564,20 +567,20 @@
def raise_user_exception(self):
raise ValidationError(self.entity.eid, {'hip': 'hop'})
- with self.temporary_appobjects(MyIUserFriendlyUniqueTogether):
- req = self.request()
- s = req.create_entity('Societe', nom=u'Logilab', type=u'ssll', cp=u'75013')
- self.commit()
- with self.assertRaises(ValidationError) as cm:
- req.create_entity('Societe', nom=u'Logilab', type=u'ssll', cp=u'75013')
- self.assertEqual(cm.exception.errors, {'hip': 'hop'})
- self.rollback()
- req.create_entity('Societe', nom=u'Logilab', type=u'ssll', cp=u'31400')
- with self.assertRaises(ValidationError) as cm:
- s.cw_set(cp=u'31400')
- self.assertEqual(cm.exception.entity, s.eid)
- self.assertEqual(cm.exception.errors, {'hip': 'hop'})
- self.rollback()
+ with self.admin_access.repo_cnx() as cnx:
+ with self.temporary_appobjects(MyIUserFriendlyUniqueTogether):
+ s = cnx.create_entity('Societe', nom=u'Logilab', type=u'ssll', cp=u'75013')
+ cnx.commit()
+ with self.assertRaises(ValidationError) as cm:
+ cnx.create_entity('Societe', nom=u'Logilab', type=u'ssll', cp=u'75013')
+ self.assertEqual(cm.exception.errors, {'hip': 'hop'})
+ cnx.rollback()
+ cnx.create_entity('Societe', nom=u'Logilab', type=u'ssll', cp=u'31400')
+ with self.assertRaises(ValidationError) as cm:
+ s.cw_set(cp=u'31400')
+ self.assertEqual(cm.exception.entity, s.eid)
+ self.assertEqual(cm.exception.errors, {'hip': 'hop'})
+ cnx.rollback()
class SchemaDeserialTC(CubicWebTC):
@@ -615,35 +618,39 @@
table = SQL_PREFIX + 'CWEType'
namecol = SQL_PREFIX + 'name'
finalcol = SQL_PREFIX + 'final'
- self.session.set_cnxset()
- cu = self.session.system_sql('SELECT %s FROM %s WHERE %s is NULL' % (
- namecol, table, finalcol))
- self.assertEqual(cu.fetchall(), [])
- cu = self.session.system_sql('SELECT %s FROM %s WHERE %s=%%(final)s ORDER BY %s'
- % (namecol, table, finalcol, namecol), {'final': True})
- self.assertEqual(cu.fetchall(), [(u'BabarTestType',),
- (u'BigInt',), (u'Boolean',), (u'Bytes',),
- (u'Date',), (u'Datetime',),
- (u'Decimal',),(u'Float',),
- (u'Int',),
- (u'Interval',), (u'Password',),
- (u'String',),
- (u'TZDatetime',), (u'TZTime',), (u'Time',)])
- sql = ("SELECT etype.cw_eid, etype.cw_name, cstr.cw_eid, rel.eid_to "
- "FROM cw_CWUniqueTogetherConstraint as cstr, "
- " relations_relation as rel, "
- " cw_CWEType as etype "
- "WHERE cstr.cw_eid = rel.eid_from "
- " AND cstr.cw_constraint_of = etype.cw_eid "
- " AND etype.cw_name = 'Personne' "
- ";")
- cu = self.session.system_sql(sql)
- rows = cu.fetchall()
- self.assertEqual(len(rows), 3)
- person = self.repo.schema.eschema('Personne')
- self.assertEqual(len(person._unique_together), 1)
- self.assertItemsEqual(person._unique_together[0],
- ('nom', 'prenom', 'inline2'))
+ with self.admin_access.repo_cnx() as cnx:
+ with cnx.ensure_cnx_set:
+ cu = cnx.system_sql('SELECT %s FROM %s WHERE %s is NULL'
+ % (namecol, table, finalcol))
+ self.assertEqual(cu.fetchall(), [])
+ cu = cnx.system_sql('SELECT %s FROM %s '
+ 'WHERE %s=%%(final)s ORDER BY %s'
+ % (namecol, table, finalcol, namecol),
+ {'final': True})
+ self.assertEqual(cu.fetchall(),
+ [(u'BabarTestType',),
+ (u'BigInt',), (u'Boolean',), (u'Bytes',),
+ (u'Date',), (u'Datetime',),
+ (u'Decimal',),(u'Float',),
+ (u'Int',),
+ (u'Interval',), (u'Password',),
+ (u'String',),
+ (u'TZDatetime',), (u'TZTime',), (u'Time',)])
+ sql = ("SELECT etype.cw_eid, etype.cw_name, cstr.cw_eid, rel.eid_to "
+ "FROM cw_CWUniqueTogetherConstraint as cstr, "
+ " relations_relation as rel, "
+ " cw_CWEType as etype "
+ "WHERE cstr.cw_eid = rel.eid_from "
+ " AND cstr.cw_constraint_of = etype.cw_eid "
+ " AND etype.cw_name = 'Personne' "
+ ";")
+ cu = cnx.system_sql(sql)
+ rows = cu.fetchall()
+ self.assertEqual(len(rows), 3)
+ person = self.repo.schema.eschema('Personne')
+ self.assertEqual(len(person._unique_together), 1)
+ self.assertItemsEqual(person._unique_together[0],
+ ('nom', 'prenom', 'inline2'))
finally:
self.repo.set_schema(origshema)
@@ -664,80 +671,90 @@
class DataHelpersTC(CubicWebTC):
- def test_create_eid(self):
- self.session.set_cnxset()
- self.assert_(self.repo.system_source.create_eid(self.session))
-
def test_type_from_eid(self):
- self.session.set_cnxset()
- self.assertEqual(self.repo.type_from_eid(2, self.session), 'CWGroup')
+ with self.admin_access.repo_cnx() as cnx:
+ with cnx.ensure_cnx_set:
+ self.assertEqual(self.repo.type_from_eid(2, cnx), 'CWGroup')
def test_type_from_eid_raise(self):
- self.session.set_cnxset()
- self.assertRaises(UnknownEid, self.repo.type_from_eid, -2, self.session)
+ with self.admin_access.repo_cnx() as cnx:
+ with cnx.ensure_cnx_set:
+ self.assertRaises(UnknownEid, self.repo.type_from_eid, -2, cnx)
def test_add_delete_info(self):
- entity = self.repo.vreg['etypes'].etype_class('Personne')(self.session)
- entity.eid = -1
- entity.complete = lambda x: None
- self.session.set_cnxset()
- self.repo.add_info(self.session, entity, self.repo.system_source)
- cu = self.session.system_sql('SELECT * FROM entities WHERE eid = -1')
- data = cu.fetchall()
- self.assertEqual(tuplify(data), [(-1, 'Personne', 'system', None)])
- self.repo.delete_info(self.session, entity, 'system')
- #self.repo.commit()
- cu = self.session.system_sql('SELECT * FROM entities WHERE eid = -1')
- data = cu.fetchall()
- self.assertEqual(data, [])
+ with self.admin_access.repo_cnx() as cnx:
+ with cnx.ensure_cnx_set:
+ cnx.mode = 'write'
+ entity = self.repo.vreg['etypes'].etype_class('Personne')(cnx)
+ entity.eid = -1
+ entity.complete = lambda x: None
+ self.repo.add_info(cnx, entity, self.repo.system_source)
+ cu = cnx.system_sql('SELECT * FROM entities WHERE eid = -1')
+ data = cu.fetchall()
+ self.assertEqual(tuplify(data), [(-1, 'Personne', 'system', None)])
+ self.repo.delete_info(cnx, entity, 'system')
+ #self.repo.commit()
+ cu = cnx.system_sql('SELECT * FROM entities WHERE eid = -1')
+ data = cu.fetchall()
+ self.assertEqual(data, [])
class FTITC(CubicWebTC):
def test_fulltext_container_entity(self):
- assert self.schema.rschema('use_email').fulltext_container == 'subject'
- req = self.request()
- toto = req.create_entity('EmailAddress', address=u'toto@logilab.fr')
- self.commit()
- rset = req.execute('Any X WHERE X has_text %(t)s', {'t': 'toto'})
- self.assertEqual(rset.rows, [])
- req.user.cw_set(use_email=toto)
- self.commit()
- rset = req.execute('Any X WHERE X has_text %(t)s', {'t': 'toto'})
- self.assertEqual(rset.rows, [[req.user.eid]])
- req.execute('DELETE X use_email Y WHERE X login "admin", Y eid %(y)s',
- {'y': toto.eid})
- self.commit()
- rset = req.execute('Any X WHERE X has_text %(t)s', {'t': 'toto'})
- self.assertEqual(rset.rows, [])
- tutu = req.create_entity('EmailAddress', address=u'tutu@logilab.fr')
- req.user.cw_set(use_email=tutu)
- self.commit()
- rset = req.execute('Any X WHERE X has_text %(t)s', {'t': 'tutu'})
- self.assertEqual(rset.rows, [[req.user.eid]])
- tutu.cw_set(address=u'hip@logilab.fr')
- self.commit()
- rset = req.execute('Any X WHERE X has_text %(t)s', {'t': 'tutu'})
- self.assertEqual(rset.rows, [])
- rset = req.execute('Any X WHERE X has_text %(t)s', {'t': 'hip'})
- self.assertEqual(rset.rows, [[req.user.eid]])
+ with self.admin_access.repo_cnx() as cnx:
+ assert self.schema.rschema('use_email').fulltext_container == 'subject'
+ toto = cnx.create_entity('EmailAddress', address=u'toto@logilab.fr')
+ cnx.commit()
+ rset = cnx.execute('Any X WHERE X has_text %(t)s', {'t': 'toto'})
+ self.assertEqual(rset.rows, [])
+ cnx.user.cw_set(use_email=toto)
+ cnx.commit()
+ rset = cnx.execute('Any X WHERE X has_text %(t)s', {'t': 'toto'})
+ self.assertEqual(rset.rows, [[cnx.user.eid]])
+ cnx.execute('DELETE X use_email Y WHERE X login "admin", Y eid %(y)s',
+ {'y': toto.eid})
+ cnx.commit()
+ rset = cnx.execute('Any X WHERE X has_text %(t)s', {'t': 'toto'})
+ self.assertEqual(rset.rows, [])
+ tutu = cnx.create_entity('EmailAddress', address=u'tutu@logilab.fr')
+ cnx.user.cw_set(use_email=tutu)
+ cnx.commit()
+ rset = cnx.execute('Any X WHERE X has_text %(t)s', {'t': 'tutu'})
+ self.assertEqual(rset.rows, [[cnx.user.eid]])
+ tutu.cw_set(address=u'hip@logilab.fr')
+ cnx.commit()
+ rset = cnx.execute('Any X WHERE X has_text %(t)s', {'t': 'tutu'})
+ self.assertEqual(rset.rows, [])
+ rset = cnx.execute('Any X WHERE X has_text %(t)s', {'t': 'hip'})
+ self.assertEqual(rset.rows, [[cnx.user.eid]])
def test_no_uncessary_ftiindex_op(self):
- req = self.request()
- req.create_entity('Workflow', name=u'dummy workflow', description=u'huuuuu')
- self.assertFalse(any(x for x in self.session.pending_operations
- if isinstance(x, native.FTIndexEntityOp)))
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.create_entity('Workflow',
+ name=u'dummy workflow',
+ description=u'huuuuu')
+ self.assertFalse(any(x for x in cnx.pending_operations
+ if isinstance(x, native.FTIndexEntityOp)))
class DBInitTC(CubicWebTC):
def test_versions_inserted(self):
- inserted = [r[0] for r in self.execute('Any K ORDERBY K WHERE P pkey K, P pkey ~= "system.version.%"')]
- self.assertEqual(inserted,
- [u'system.version.basket', u'system.version.card', u'system.version.comment',
- u'system.version.cubicweb', u'system.version.email',
- u'system.version.file', u'system.version.folder',
- u'system.version.localperms', u'system.version.tag'])
+ with self.admin_access.repo_cnx() as cnx:
+ inserted = [r[0]
+ for r in cnx.execute('Any K ORDERBY K '
+ 'WHERE P pkey K, P pkey ~= "system.version.%"')]
+ self.assertEqual(inserted,
+ [u'system.version.basket',
+ u'system.version.card',
+ u'system.version.comment',
+ u'system.version.cubicweb',
+ u'system.version.email',
+ u'system.version.file',
+ u'system.version.folder',
+ u'system.version.localperms',
+ u'system.version.tag'])
CALLED = []
@@ -748,11 +765,9 @@
CubicWebTC.setUp(self)
CALLED[:] = ()
- def _after_relation_hook(self, cnxset, fromeid, rtype, toeid):
- self.called.append((fromeid, rtype, toeid))
-
def test_inline_relation(self):
"""make sure <event>_relation hooks are called for inlined relation"""
+
class EcritParHook(hook.Hook):
__regid__ = 'inlinedrelhook'
__select__ = hook.Hook.__select__ & hook.match_rtype('ecrit_par')
@@ -762,47 +777,51 @@
CALLED.append((self.event, self.eidfrom, self.rtype, self.eidto))
with self.temporary_appobjects(EcritParHook):
- eidp = self.execute('INSERT Personne X: X nom "toto"')[0][0]
- eidn = self.execute('INSERT Note X: X type "T"')[0][0]
- self.execute('SET N ecrit_par Y WHERE N type "T", Y nom "toto"')
- self.assertEqual(CALLED, [('before_add_relation', eidn, 'ecrit_par', eidp),
- ('after_add_relation', eidn, 'ecrit_par', eidp)])
- CALLED[:] = ()
- self.execute('DELETE N ecrit_par Y WHERE N type "T", Y nom "toto"')
- self.assertEqual(CALLED, [('before_delete_relation', eidn, 'ecrit_par', eidp),
- ('after_delete_relation', eidn, 'ecrit_par', eidp)])
- CALLED[:] = ()
- eidn = self.execute('INSERT Note N: N ecrit_par P WHERE P nom "toto"')[0][0]
- self.assertEqual(CALLED, [('before_add_relation', eidn, 'ecrit_par', eidp),
- ('after_add_relation', eidn, 'ecrit_par', eidp)])
+ with self.admin_access.repo_cnx() as cnx:
+ eidp = cnx.execute('INSERT Personne X: X nom "toto"')[0][0]
+ eidn = cnx.execute('INSERT Note X: X type "T"')[0][0]
+ cnx.execute('SET N ecrit_par Y WHERE N type "T", Y nom "toto"')
+ self.assertEqual(CALLED, [('before_add_relation', eidn, 'ecrit_par', eidp),
+ ('after_add_relation', eidn, 'ecrit_par', eidp)])
+ CALLED[:] = ()
+ cnx.execute('DELETE N ecrit_par Y WHERE N type "T", Y nom "toto"')
+ self.assertEqual(CALLED, [('before_delete_relation', eidn, 'ecrit_par', eidp),
+ ('after_delete_relation', eidn, 'ecrit_par', eidp)])
+ CALLED[:] = ()
+ eidn = cnx.execute('INSERT Note N: N ecrit_par P WHERE P nom "toto"')[0][0]
+ self.assertEqual(CALLED, [('before_add_relation', eidn, 'ecrit_par', eidp),
+ ('after_add_relation', eidn, 'ecrit_par', eidp)])
def test_unique_contraint(self):
- req = self.request()
- toto = req.create_entity('Personne', nom=u'toto')
- a01 = req.create_entity('Affaire', ref=u'A01', todo_by=toto)
- req.cnx.commit()
- req = self.request()
- req.create_entity('Note', type=u'todo', inline1=a01)
- req.cnx.commit()
- req = self.request()
- req.create_entity('Note', type=u'todo', inline1=a01)
- with self.assertRaises(ValidationError) as cm:
- req.cnx.commit()
- self.assertEqual(cm.exception.errors, {'inline1-subject': u'RQLUniqueConstraint S type T, S inline1 A1, A1 todo_by C, Y type T, Y inline1 A2, A2 todo_by C failed'})
+ with self.admin_access.repo_cnx() as cnx:
+ toto = cnx.create_entity('Personne', nom=u'toto')
+ a01 = cnx.create_entity('Affaire', ref=u'A01', todo_by=toto)
+ cnx.commit()
+ cnx.create_entity('Note', type=u'todo', inline1=a01)
+ cnx.commit()
+ cnx.create_entity('Note', type=u'todo', inline1=a01)
+ with self.assertRaises(ValidationError) as cm:
+ cnx.commit()
+ self.assertEqual(cm.exception.errors,
+ {'inline1-subject': u'RQLUniqueConstraint S type T, S inline1 A1, '
+ 'A1 todo_by C, Y type T, Y inline1 A2, A2 todo_by C failed'})
def test_add_relations_at_creation_with_del_existing_rel(self):
- req = self.request()
- person = req.create_entity('Personne', nom=u'Toto', prenom=u'Lanturlu', sexe=u'M')
- users_rql = 'Any U WHERE U is CWGroup, U name "users"'
- users = self.execute(users_rql).get_entity(0, 0)
- req.create_entity('CWUser',
- login=u'Toto',
- upassword=u'firstname',
- firstname=u'firstname',
- surname=u'surname',
- reverse_login_user=person,
- in_group=users)
- self.commit()
+ with self.admin_access.repo_cnx() as cnx:
+ person = cnx.create_entity('Personne',
+ nom=u'Toto',
+ prenom=u'Lanturlu',
+ sexe=u'M')
+ users_rql = 'Any U WHERE U is CWGroup, U name "users"'
+ users = cnx.execute(users_rql).get_entity(0, 0)
+ cnx.create_entity('CWUser',
+ login=u'Toto',
+ upassword=u'firstname',
+ firstname=u'firstname',
+ surname=u'surname',
+ reverse_login_user=person,
+ in_group=users)
+ cnx.commit()
class PerformanceTest(CubicWebTC):
@@ -819,160 +838,161 @@
logger.setLevel(logging.CRITICAL)
def test_composite_deletion(self):
- req = self.request()
- personnes = []
- t0 = time.time()
- for i in xrange(2000):
- p = req.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M')
- personnes.append(p)
- abraham = req.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M')
- for j in xrange(0, 2000, 100):
- abraham.cw_set(personne_composite=personnes[j:j+100])
- t1 = time.time()
- self.info('creation: %.2gs', (t1 - t0))
- req.cnx.commit()
- t2 = time.time()
- self.info('commit creation: %.2gs', (t2 - t1))
- self.execute('DELETE Personne P WHERE P eid %(eid)s', {'eid': abraham.eid})
- t3 = time.time()
- self.info('deletion: %.2gs', (t3 - t2))
- req.cnx.commit()
- t4 = time.time()
- self.info("commit deletion: %2gs", (t4 - t3))
+ with self.admin_access.repo_cnx() as cnx:
+ personnes = []
+ t0 = time.time()
+ for i in xrange(2000):
+ p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M')
+ personnes.append(p)
+ abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M')
+ for j in xrange(0, 2000, 100):
+ abraham.cw_set(personne_composite=personnes[j:j+100])
+ t1 = time.time()
+ self.info('creation: %.2gs', (t1 - t0))
+ cnx.commit()
+ t2 = time.time()
+ self.info('commit creation: %.2gs', (t2 - t1))
+ cnx.execute('DELETE Personne P WHERE P eid %(eid)s', {'eid': abraham.eid})
+ t3 = time.time()
+ self.info('deletion: %.2gs', (t3 - t2))
+ cnx.commit()
+ t4 = time.time()
+ self.info("commit deletion: %2gs", (t4 - t3))
def test_add_relation_non_inlined(self):
- req = self.request()
- personnes = []
- for i in xrange(2000):
- p = req.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M')
- personnes.append(p)
- req.cnx.commit()
- t0 = time.time()
- abraham = req.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M',
- personne_composite=personnes[:100])
- t1 = time.time()
- self.info('creation: %.2gs', (t1 - t0))
- for j in xrange(100, 2000, 100):
- abraham.cw_set(personne_composite=personnes[j:j+100])
- t2 = time.time()
- self.info('more relations: %.2gs', (t2-t1))
- req.cnx.commit()
- t3 = time.time()
- self.info('commit creation: %.2gs', (t3 - t2))
+ with self.admin_access.repo_cnx() as cnx:
+ personnes = []
+ for i in xrange(2000):
+ p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M')
+ personnes.append(p)
+ cnx.commit()
+ t0 = time.time()
+ abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M',
+ personne_composite=personnes[:100])
+ t1 = time.time()
+ self.info('creation: %.2gs', (t1 - t0))
+ for j in xrange(100, 2000, 100):
+ abraham.cw_set(personne_composite=personnes[j:j+100])
+ t2 = time.time()
+ self.info('more relations: %.2gs', (t2-t1))
+ cnx.commit()
+ t3 = time.time()
+ self.info('commit creation: %.2gs', (t3 - t2))
def test_add_relation_inlined(self):
- req = self.request()
- personnes = []
- for i in xrange(2000):
- p = req.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M')
- personnes.append(p)
- req.cnx.commit()
- t0 = time.time()
- abraham = req.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M',
- personne_inlined=personnes[:100])
- t1 = time.time()
- self.info('creation: %.2gs', (t1 - t0))
- for j in xrange(100, 2000, 100):
- abraham.cw_set(personne_inlined=personnes[j:j+100])
- t2 = time.time()
- self.info('more relations: %.2gs', (t2-t1))
- req.cnx.commit()
- t3 = time.time()
- self.info('commit creation: %.2gs', (t3 - t2))
+ with self.admin_access.repo_cnx() as cnx:
+ personnes = []
+ for i in xrange(2000):
+ p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M')
+ personnes.append(p)
+ cnx.commit()
+ t0 = time.time()
+ abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M',
+ personne_inlined=personnes[:100])
+ t1 = time.time()
+ self.info('creation: %.2gs', (t1 - t0))
+ for j in xrange(100, 2000, 100):
+ abraham.cw_set(personne_inlined=personnes[j:j+100])
+ t2 = time.time()
+ self.info('more relations: %.2gs', (t2-t1))
+ cnx.commit()
+ t3 = time.time()
+ self.info('commit creation: %.2gs', (t3 - t2))
def test_session_add_relation(self):
""" to be compared with test_session_add_relations"""
- req = self.request()
- personnes = []
- for i in xrange(2000):
- p = req.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M')
- personnes.append(p)
- abraham = req.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M')
- req.cnx.commit()
- t0 = time.time()
- add_relation = self.session.add_relation
- for p in personnes:
- add_relation(abraham.eid, 'personne_composite', p.eid)
- req.cnx.commit()
- t1 = time.time()
- self.info('add relation: %.2gs', t1-t0)
+ with self.admin_access.repo_cnx() as cnx:
+ personnes = []
+ for i in xrange(2000):
+ p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M')
+ personnes.append(p)
+ abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M')
+ cnx.commit()
+ t0 = time.time()
+ add_relation = cnx.add_relation
+ for p in personnes:
+ add_relation(abraham.eid, 'personne_composite', p.eid)
+ cnx.commit()
+ t1 = time.time()
+ self.info('add relation: %.2gs', t1-t0)
def test_session_add_relations (self):
""" to be compared with test_session_add_relation"""
- req = self.request()
- personnes = []
- for i in xrange(2000):
- p = req.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M')
- personnes.append(p)
- abraham = req.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M')
- req.cnx.commit()
- t0 = time.time()
- add_relations = self.session.add_relations
- relations = [('personne_composite', [(abraham.eid, p.eid) for p in personnes])]
- add_relations(relations)
- req.cnx.commit()
- t1 = time.time()
- self.info('add relations: %.2gs', t1-t0)
+ with self.admin_access.repo_cnx() as cnx:
+ personnes = []
+ for i in xrange(2000):
+ p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M')
+ personnes.append(p)
+ abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M')
+ cnx.commit()
+ t0 = time.time()
+ add_relations = cnx.add_relations
+ relations = [('personne_composite', [(abraham.eid, p.eid) for p in personnes])]
+ add_relations(relations)
+ cnx.commit()
+ t1 = time.time()
+ self.info('add relations: %.2gs', t1-t0)
def test_session_add_relation_inlined(self):
""" to be compared with test_session_add_relations"""
- req = self.request()
- personnes = []
- for i in xrange(2000):
- p = req.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M')
- personnes.append(p)
- abraham = req.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M')
- req.cnx.commit()
- t0 = time.time()
- add_relation = self.session.add_relation
- for p in personnes:
- add_relation(abraham.eid, 'personne_inlined', p.eid)
- req.cnx.commit()
- t1 = time.time()
- self.info('add relation (inlined): %.2gs', t1-t0)
+ with self.admin_access.repo_cnx() as cnx:
+ personnes = []
+ for i in xrange(2000):
+ p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M')
+ personnes.append(p)
+ abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M')
+ cnx.commit()
+ t0 = time.time()
+ add_relation = cnx.add_relation
+ for p in personnes:
+ add_relation(abraham.eid, 'personne_inlined', p.eid)
+ cnx.commit()
+ t1 = time.time()
+ self.info('add relation (inlined): %.2gs', t1-t0)
def test_session_add_relations_inlined (self):
""" to be compared with test_session_add_relation"""
- req = self.request()
- personnes = []
- for i in xrange(2000):
- p = req.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M')
- personnes.append(p)
- abraham = req.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M')
- req.cnx.commit()
- t0 = time.time()
- add_relations = self.session.add_relations
- relations = [('personne_inlined', [(abraham.eid, p.eid) for p in personnes])]
- add_relations(relations)
- req.cnx.commit()
- t1 = time.time()
- self.info('add relations (inlined): %.2gs', t1-t0)
+ with self.admin_access.repo_cnx() as cnx:
+ personnes = []
+ for i in xrange(2000):
+ p = cnx.create_entity('Personne', nom=u'Doe%03d'%i, prenom=u'John', sexe=u'M')
+ personnes.append(p)
+ abraham = cnx.create_entity('Personne', nom=u'Abraham', prenom=u'John', sexe=u'M')
+ cnx.commit()
+ t0 = time.time()
+ add_relations = cnx.add_relations
+ relations = [('personne_inlined', [(abraham.eid, p.eid) for p in personnes])]
+ add_relations(relations)
+ cnx.commit()
+ t1 = time.time()
+ self.info('add relations (inlined): %.2gs', t1-t0)
def test_optional_relation_reset_1(self):
- req = self.request()
- p1 = req.create_entity('Personne', nom=u'Vincent')
- p2 = req.create_entity('Personne', nom=u'Florent')
- w = req.create_entity('Affaire', ref=u'wc')
- w.cw_set(todo_by=[p1,p2])
- w.cw_clear_all_caches()
- self.commit()
- self.assertEqual(len(w.todo_by), 1)
- self.assertEqual(w.todo_by[0].eid, p2.eid)
+ with self.admin_access.repo_cnx() as cnx:
+ p1 = cnx.create_entity('Personne', nom=u'Vincent')
+ p2 = cnx.create_entity('Personne', nom=u'Florent')
+ w = cnx.create_entity('Affaire', ref=u'wc')
+ w.cw_set(todo_by=[p1,p2])
+ w.cw_clear_all_caches()
+ cnx.commit()
+ self.assertEqual(len(w.todo_by), 1)
+ self.assertEqual(w.todo_by[0].eid, p2.eid)
def test_optional_relation_reset_2(self):
- req = self.request()
- p1 = req.create_entity('Personne', nom=u'Vincent')
- p2 = req.create_entity('Personne', nom=u'Florent')
- w = req.create_entity('Affaire', ref=u'wc')
- w.cw_set(todo_by=p1)
- self.commit()
- w.cw_set(todo_by=p2)
- w.cw_clear_all_caches()
- self.commit()
- self.assertEqual(len(w.todo_by), 1)
- self.assertEqual(w.todo_by[0].eid, p2.eid)
+ with self.admin_access.repo_cnx() as cnx:
+ p1 = cnx.create_entity('Personne', nom=u'Vincent')
+ p2 = cnx.create_entity('Personne', nom=u'Florent')
+ w = cnx.create_entity('Affaire', ref=u'wc')
+ w.cw_set(todo_by=p1)
+ cnx.commit()
+ w.cw_set(todo_by=p2)
+ w.cw_clear_all_caches()
+ cnx.commit()
+ self.assertEqual(len(w.todo_by), 1)
+ self.assertEqual(w.todo_by[0].eid, p2.eid)
if __name__ == '__main__':
+ from logilab.common.testlib import unittest_main
unittest_main()
--- a/server/test/unittest_rql2sql.py Fri May 23 18:35:13 2014 +0200
+++ b/server/test/unittest_rql2sql.py Fri Jun 27 11:48:26 2014 +0200
@@ -1882,12 +1882,12 @@
for t in self._parse([("Any X WHERE X creation_date TODAY, X is Affaire",
'''SELECT _X.cw_eid
FROM cw_Affaire AS _X
-WHERE DATE(_X.cw_creation_date)=CURRENT_DATE'''),
+WHERE DATE(_X.cw_creation_date)=%s''' % self.dbhelper.sql_current_date()),
("Personne P where not P datenaiss TODAY",
'''SELECT _P.cw_eid
FROM cw_Personne AS _P
-WHERE NOT (DATE(_P.cw_datenaiss)=CURRENT_DATE)'''),
+WHERE NOT (DATE(_P.cw_datenaiss)=%s)''' % self.dbhelper.sql_current_date()),
]):
yield t
--- a/server/test/unittest_schemaserial.py Fri May 23 18:35:13 2014 +0200
+++ b/server/test/unittest_schemaserial.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -26,6 +26,10 @@
from cubicweb.schema import CubicWebSchemaLoader
from cubicweb.devtools import TestServerConfiguration
+from cubicweb.server.schemaserial import (updateeschema2rql, updaterschema2rql, rschema2rql,
+ eschema2rql, rdef2rql, specialize2rql,
+ _erperms2rql as erperms2rql)
+
from logilab.database import get_db_helper
from yams import register_base_type, unregister_base_type
@@ -53,9 +57,6 @@
helper.TYPE_MAPPING.pop('BabarTestType', None)
helper.TYPE_CONVERTERS.pop('BabarTestType', None)
-from cubicweb.server.schemaserial import *
-from cubicweb.server.schemaserial import _erperms2rql as erperms2rql
-
cstrtypemap = {'RQLConstraint': 'RQLConstraint_eid',
'SizeConstraint': 'SizeConstraint_eid',
'StaticVocabularyConstraint': 'StaticVocabularyConstraint_eid',
@@ -67,7 +68,9 @@
def test_eschema2rql1(self):
self.assertListEqual([
('INSERT CWEType X: X description %(description)s,X final %(final)s,X name %(name)s',
- {'description': u'define a final relation: link a final relation type from a non final entity to a final entity type. used to build the instance schema',
+ {'description': u'define a final relation: '
+ 'link a final relation type from a non final entity '
+ 'to a final entity type. used to build the instance schema',
'name': u'CWAttribute', 'final': False})],
list(eschema2rql(schema.eschema('CWAttribute'))))
@@ -86,7 +89,8 @@
sorted(specialize2rql(schema)))
def test_esche2rql_custom_type(self):
- expected = [('INSERT CWEType X: X description %(description)s,X final %(final)s,X name %(name)s',
+ expected = [('INSERT CWEType X: X description %(description)s,X final %(final)s,'
+ 'X name %(name)s',
{'description': u'',
'name': u'BabarTestType', 'final': True},)]
got = list(eschema2rql(schema.eschema('BabarTestType')))
@@ -94,69 +98,180 @@
def test_rschema2rql1(self):
self.assertListEqual([
- ('INSERT CWRType X: X description %(description)s,X final %(final)s,X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,X name %(name)s,X symmetric %(symmetric)s',
- {'description': u'link a relation definition to its relation type', 'symmetric': False, 'name': u'relation_type', 'final' : False, 'fulltext_container': None, 'inlined': True}),
+ ('INSERT CWRType X: X description %(description)s,X final %(final)s,'
+ 'X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,'
+ 'X name %(name)s,X symmetric %(symmetric)s',
+ {'description': u'link a relation definition to its relation type',
+ 'symmetric': False,
+ 'name': u'relation_type',
+ 'final' : False,
+ 'fulltext_container': None,
+ 'inlined': True}),
- ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
+ ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,'
+ 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,'
+ 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
{'se': None, 'rt': None, 'oe': None,
- 'description': u'', 'composite': u'object', 'cardinality': u'1*',
+ 'description': u'',
+ 'composite': u'object',
+ 'cardinality': u'1*',
'ordernum': 1}),
- ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT eid %(ct)s, EDEF eid %(x)s',
- {'x': None, 'ct': u'RQLConstraint_eid', 'value': u';O;O final TRUE\n'}),
+ ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X '
+ 'WHERE CT eid %(ct)s, EDEF eid %(x)s',
+ {'x': None, 'ct': u'RQLConstraint_eid',
+ 'value': u';O;O final TRUE\n'}),
- ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
+ ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,'
+ 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,'
+ 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
{'se': None, 'rt': None, 'oe': None,
- 'description': u'', 'composite': u'object',
+ 'description': u'', 'composite': u'object',
'ordernum': 1, 'cardinality': u'1*'}),
- ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT eid %(ct)s, EDEF eid %(x)s',
+ ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X '
+ 'WHERE CT eid %(ct)s, EDEF eid %(x)s',
{'x': None, 'ct': u'RQLConstraint_eid', 'value': u';O;O final FALSE\n'}),
],
list(rschema2rql(schema.rschema('relation_type'), cstrtypemap)))
def test_rschema2rql2(self):
self.assertListEqual([
- ('INSERT CWRType X: X description %(description)s,X final %(final)s,X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,X name %(name)s,X symmetric %(symmetric)s', {'description': u'', 'symmetric': False, 'name': u'add_permission', 'final': False, 'fulltext_container': None, 'inlined': False}),
+ ('INSERT CWRType X: X description %(description)s,X final %(final)s,'
+ 'X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,'
+ 'X name %(name)s,X symmetric %(symmetric)s',
+ {'description': u'',
+ 'symmetric': False,
+ 'name': u'add_permission',
+ 'final': False,
+ 'fulltext_container': None,
+ 'inlined': False}),
- ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
- {'se': None, 'rt': None, 'oe': None,
- 'description': u'groups allowed to add entities/relations of this type', 'composite': None, 'ordernum': 9999, 'cardinality': u'**'}),
- ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
- {'se': None, 'rt': None, 'oe': None,
- 'description': u'rql expression allowing to add entities/relations of this type', 'composite': 'subject', 'ordernum': 9999, 'cardinality': u'*?'}),
+ ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,'
+ 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,'
+ 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
+ {'se': None,
+ 'rt': None,
+ 'oe': None,
+ 'description': u'groups allowed to add entities/relations of this type',
+ 'composite': None,
+ 'ordernum': 9999,
+ 'cardinality': u'**'}),
+ ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,'
+ 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,'
+ 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
+ {'se': None,
+ 'rt': None,
+ 'oe': None,
+ 'description': u'rql expression allowing to add entities/relations of this type',
+ 'composite': 'subject',
+ 'ordernum': 9999,
+ 'cardinality': u'*?'}),
- ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
- {'se': None, 'rt': None, 'oe': None,
- 'description': u'groups allowed to add entities/relations of this type', 'composite': None, 'ordernum': 9999, 'cardinality': u'**'}),
- ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
- {'se': None, 'rt': None, 'oe': None,
- 'description': u'rql expression allowing to add entities/relations of this type', 'composite': 'subject', 'ordernum': 9999, 'cardinality': u'*?'}),
- ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
- {'cardinality': u'**', 'composite': None, 'description': u'groups allowed to add entities/relations of this type',
- 'oe': None, 'ordernum': 9999, 'rt': None, 'se': None}),
- ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
- {'cardinality': u'*?', 'composite': u'subject', 'description': u'rql expression allowing to add entities/relations of this type', 'oe': None, 'ordernum': 9999, 'rt': None, 'se': None})],
+ ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,'
+ 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,'
+ 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
+ {'se': None,
+ 'rt': None,
+ 'oe': None,
+ 'description': u'groups allowed to add entities/relations of this type',
+ 'composite': None,
+ 'ordernum': 9999,
+ 'cardinality': u'**'}),
+ ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,'
+ 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,'
+ 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
+ {'se': None,
+ 'rt': None,
+ 'oe': None,
+ 'description': u'rql expression allowing to add entities/relations of this type',
+ 'composite': 'subject',
+ 'ordernum': 9999,
+ 'cardinality': u'*?'}),
+ ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,'
+ 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,'
+ 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
+ {'cardinality': u'**',
+ 'composite': None,
+ 'description': u'groups allowed to add entities/relations of this type',
+ 'oe': None,
+ 'ordernum': 9999,
+ 'rt': None,
+ 'se': None}),
+ ('INSERT CWRelation X: X cardinality %(cardinality)s,X composite %(composite)s,'
+ 'X description %(description)s,X ordernum %(ordernum)s,X relation_type ER,'
+ 'X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
+ {'cardinality': u'*?',
+ 'composite': u'subject',
+ 'description': u'rql expression allowing to add entities/relations of this type',
+ 'oe': None,
+ 'ordernum': 9999,
+ 'rt': None,
+ 'se': None})],
list(rschema2rql(schema.rschema('add_permission'), cstrtypemap)))
def test_rschema2rql3(self):
self.assertListEqual([
- ('INSERT CWRType X: X description %(description)s,X final %(final)s,X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,X name %(name)s,X symmetric %(symmetric)s',
- {'description': u'', 'symmetric': False, 'name': u'cardinality', 'final': True, 'fulltext_container': None, 'inlined': False}),
+ ('INSERT CWRType X: X description %(description)s,X final %(final)s,'
+ 'X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,'
+ 'X name %(name)s,X symmetric %(symmetric)s',
+ {'description': u'',
+ 'symmetric': False,
+ 'name': u'cardinality',
+ 'final': True,
+ 'fulltext_container': None,
+ 'inlined': False}),
- ('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,X description %(description)s,X fulltextindexed %(fulltextindexed)s,X indexed %(indexed)s,X internationalizable %(internationalizable)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
- {'se': None, 'rt': None, 'oe': None,
- 'description': u'subject/object cardinality', 'internationalizable': True, 'fulltextindexed': False, 'ordernum': 5, 'defaultval': None, 'indexed': False, 'cardinality': u'?1'}),
- ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT eid %(ct)s, EDEF eid %(x)s',
- {'x': None, 'ct': u'SizeConstraint_eid', 'value': u'max=2'}),
- ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT eid %(ct)s, EDEF eid %(x)s',
- {'x': None, 'ct': u'StaticVocabularyConstraint_eid', 'value': u"u'?1', u'11'"}),
+ ('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,'
+ 'X description %(description)s,X fulltextindexed %(fulltextindexed)s,'
+ 'X indexed %(indexed)s,X internationalizable %(internationalizable)s,'
+ 'X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,'
+ 'X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
+ {'se': None,
+ 'rt': None,
+ 'oe': None,
+ 'description': u'subject/object cardinality',
+ 'internationalizable': True,
+ 'fulltextindexed': False,
+ 'ordernum': 5,
+ 'defaultval': None,
+ 'indexed': False,
+ 'cardinality': u'?1'}),
+ ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X '
+ 'WHERE CT eid %(ct)s, EDEF eid %(x)s',
+ {'x': None,
+ 'ct': u'SizeConstraint_eid',
+ 'value': u'max=2'}),
+ ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X '
+ 'WHERE CT eid %(ct)s, EDEF eid %(x)s',
+ {'x': None,
+ 'ct': u'StaticVocabularyConstraint_eid',
+ 'value': u"u'?1', u'11'"}),
- ('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,X description %(description)s,X fulltextindexed %(fulltextindexed)s,X indexed %(indexed)s,X internationalizable %(internationalizable)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
- {'se': None, 'rt': None, 'oe': None,
- 'description': u'subject/object cardinality', 'internationalizable': True, 'fulltextindexed': False, 'ordernum': 5, 'defaultval': None, 'indexed': False, 'cardinality': u'?1'}),
- ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT eid %(ct)s, EDEF eid %(x)s',
- {'x': None, 'ct': u'SizeConstraint_eid', 'value': u'max=2'}),
- ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT eid %(ct)s, EDEF eid %(x)s',
- {'x': None, 'ct': u'StaticVocabularyConstraint_eid', 'value': u"u'?*', u'1*', u'+*', u'**', u'?+', u'1+', u'++', u'*+', u'?1', u'11', u'+1', u'*1', u'??', u'1?', u'+?', u'*?'"})],
+ ('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,'
+ 'X description %(description)s,X fulltextindexed %(fulltextindexed)s,'
+ 'X indexed %(indexed)s,X internationalizable %(internationalizable)s,'
+ 'X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE '
+ 'WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
+ {'se': None,
+ 'rt': None,
+ 'oe': None,
+ 'description': u'subject/object cardinality',
+ 'internationalizable': True,
+ 'fulltextindexed': False,
+ 'ordernum': 5,
+ 'defaultval': None,
+ 'indexed': False,
+ 'cardinality': u'?1'}),
+ ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X '
+ 'WHERE CT eid %(ct)s, EDEF eid %(x)s',
+ {'x': None,
+ 'ct': u'SizeConstraint_eid',
+ 'value': u'max=2'}),
+ ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X '
+ 'WHERE CT eid %(ct)s, EDEF eid %(x)s',
+ {'x': None,
+ 'ct': u'StaticVocabularyConstraint_eid',
+ 'value': (u"u'?*', u'1*', u'+*', u'**', u'?+', u'1+', u'++', u'*+', u'?1', "
+ "u'11', u'+1', u'*1', u'??', u'1?', u'+?', u'*?'")})],
list(rschema2rql(schema.rschema('cardinality'), cstrtypemap)))
def test_rschema2rql_custom_type(self):
@@ -196,41 +311,74 @@
def test_rdef2rql(self):
self.assertListEqual([
- ('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,X description %(description)s,X fulltextindexed %(fulltextindexed)s,X indexed %(indexed)s,X internationalizable %(internationalizable)s,X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
- {'se': None, 'rt': None, 'oe': None,
- 'description': u'', 'internationalizable': True, 'fulltextindexed': False,
- 'ordernum': 3, 'defaultval': Binary('text/plain'), 'indexed': False, 'cardinality': u'?1'}),
- ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT eid %(ct)s, EDEF eid %(x)s',
- {'x': None, 'value': u'None', 'ct': 'FormatConstraint_eid'}),
- ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X WHERE CT eid %(ct)s, EDEF eid %(x)s',
- {'x': None, 'value': u'max=50', 'ct': 'SizeConstraint_eid'})],
- list(rdef2rql(schema['description_format'].rdefs[('CWRType', 'String')], cstrtypemap)))
+ ('INSERT CWAttribute X: X cardinality %(cardinality)s,X defaultval %(defaultval)s,'
+ 'X description %(description)s,X fulltextindexed %(fulltextindexed)s,'
+ 'X indexed %(indexed)s,X internationalizable %(internationalizable)s,'
+ 'X ordernum %(ordernum)s,X relation_type ER,X from_entity SE,'
+ 'X to_entity OE WHERE SE eid %(se)s,ER eid %(rt)s,OE eid %(oe)s',
+ {'se': None,
+ 'rt': None,
+ 'oe': None,
+ 'description': u'',
+ 'internationalizable': True,
+ 'fulltextindexed': False,
+ 'ordernum': 3,
+ 'defaultval': Binary('text/plain'),
+ 'indexed': False,
+ 'cardinality': u'?1'}),
+ ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X '
+ 'WHERE CT eid %(ct)s, EDEF eid %(x)s',
+ {'x': None,
+ 'value': u'None',
+ 'ct': 'FormatConstraint_eid'}),
+ ('INSERT CWConstraint X: X value %(value)s, X cstrtype CT, EDEF constrained_by X '
+ 'WHERE CT eid %(ct)s, EDEF eid %(x)s',
+ {'x': None,
+ 'value': u'max=50',
+ 'ct': 'SizeConstraint_eid'})],
+ list(rdef2rql(schema['description_format'].rdefs[('CWRType', 'String')],
+ cstrtypemap)))
def test_updateeschema2rql1(self):
- self.assertListEqual([('SET X description %(description)s,X final %(final)s,X name %(name)s WHERE X eid %(x)s',
- {'description': u'define a final relation: link a final relation type from a non final entity to a final entity type. used to build the instance schema', 'x': 1, 'final': False, 'name': u'CWAttribute'})],
+ self.assertListEqual([('SET X description %(description)s,X final %(final)s,'
+ 'X name %(name)s WHERE X eid %(x)s',
+ {'description': u'define a final relation: link a final relation type from'
+ ' a non final entity to a final entity type. used to build the instance schema',
+ 'x': 1, 'final': False, 'name': u'CWAttribute'})],
list(updateeschema2rql(schema.eschema('CWAttribute'), 1)))
def test_updateeschema2rql2(self):
- self.assertListEqual([('SET X description %(description)s,X final %(final)s,X name %(name)s WHERE X eid %(x)s',
+ self.assertListEqual([('SET X description %(description)s,X final %(final)s,'
+ 'X name %(name)s WHERE X eid %(x)s',
{'description': u'', 'x': 1, 'final': True, 'name': u'String'})],
list(updateeschema2rql(schema.eschema('String'), 1)))
def test_updaterschema2rql1(self):
self.assertListEqual([
- ('SET X description %(description)s,X final %(final)s,X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,X name %(name)s,X symmetric %(symmetric)s WHERE X eid %(x)s',
- {'x': 1, 'symmetric': False,
+ ('SET X description %(description)s,X final %(final)s,'
+ 'X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,'
+ 'X name %(name)s,X symmetric %(symmetric)s WHERE X eid %(x)s',
+ {'x': 1,
+ 'symmetric': False,
'description': u'link a relation definition to its relation type',
- 'final': False, 'fulltext_container': None, 'inlined': True, 'name': u'relation_type'})],
+ 'final': False, 'fulltext_container': None,
+ 'inlined': True,
+ 'name': u'relation_type'})],
list(updaterschema2rql(schema.rschema('relation_type'), 1)))
def test_updaterschema2rql2(self):
expected = [
- ('SET X description %(description)s,X final %(final)s,X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,X name %(name)s,X symmetric %(symmetric)s WHERE X eid %(x)s',
- {'x': 1, 'symmetric': False,
- 'description': u'', 'final': False, 'fulltext_container': None,
- 'inlined': False, 'name': u'add_permission'})
+ ('SET X description %(description)s,X final %(final)s,'
+ 'X fulltext_container %(fulltext_container)s,X inlined %(inlined)s,'
+ 'X name %(name)s,X symmetric %(symmetric)s WHERE X eid %(x)s',
+ {'x': 1,
+ 'symmetric': False,
+ 'description': u'',
+ 'final': False,
+ 'fulltext_container': None,
+ 'inlined': False,
+ 'name': u'add_permission'})
]
for i, (rql, args) in enumerate(updaterschema2rql(schema.rschema('add_permission'), 1)):
yield self.assertEqual, expected[i], (rql, args)
--- a/server/test/unittest_security.py Fri May 23 18:35:13 2014 +0200
+++ b/server/test/unittest_security.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -17,11 +17,7 @@
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
"""functional tests for server'security"""
-import sys
-
-from logilab.common.testlib import unittest_main, TestCase
-
-from rql import RQLException
+from logilab.common.testlib import unittest_main
from cubicweb.devtools.testlib import CubicWebTC
from cubicweb import Unauthorized, ValidationError, QueryError, Binary
@@ -34,9 +30,10 @@
def setup_database(self):
super(BaseSecurityTC, self).setup_database()
- self.create_user(self.request(), 'iaminusersgrouponly')
- hash = _CRYPTO_CTX.encrypt('oldpassword', scheme='des_crypt')
- self.create_user(self.request(), 'oldpassword', password=Binary(hash))
+ with self.admin_access.client_cnx() as cnx:
+ self.create_user(cnx, 'iaminusersgrouponly')
+ hash = _CRYPTO_CTX.encrypt('oldpassword', scheme='des_crypt')
+ self.create_user(cnx, 'oldpassword', password=Binary(hash))
class LowLevelSecurityFunctionTC(BaseSecurityTC):
@@ -44,34 +41,40 @@
rql = u'Personne U where U nom "managers"'
rqlst = self.repo.vreg.rqlhelper.parse(rql).children[0]
with self.temporary_permissions(Personne={'read': ('users', 'managers')}):
- self.repo.vreg.solutions(self.session, rqlst, None)
- solution = rqlst.solutions[0]
- check_read_access(self.session, rqlst, solution, {})
- with self.login('anon') as cu:
+ with self.admin_access.repo_cnx() as cnx:
+ self.repo.vreg.solutions(cnx, rqlst, None)
+ solution = rqlst.solutions[0]
+ check_read_access(cnx, rqlst, solution, {})
+ with self.new_access('anon').repo_cnx() as cnx:
self.assertRaises(Unauthorized,
check_read_access,
- self.session, rqlst, solution, {})
- self.assertRaises(Unauthorized, cu.execute, rql)
+ cnx, rqlst, solution, {})
+ self.assertRaises(Unauthorized, cnx.execute, rql)
def test_upassword_not_selectable(self):
- self.assertRaises(Unauthorized,
- self.execute, 'Any X,P WHERE X is CWUser, X upassword P')
- self.rollback()
- with self.login('iaminusersgrouponly') as cu:
+ with self.admin_access.repo_cnx() as cnx:
self.assertRaises(Unauthorized,
- cu.execute, 'Any X,P WHERE X is CWUser, X upassword P')
+ cnx.execute, 'Any X,P WHERE X is CWUser, X upassword P')
+ with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+ self.assertRaises(Unauthorized,
+ cnx.execute, 'Any X,P WHERE X is CWUser, X upassword P')
def test_update_password(self):
- """Ensure that if a user's password is stored with a deprecated hash, it will be updated on next login"""
- oldhash = str(self.session.system_sql("SELECT cw_upassword FROM cw_CWUser WHERE cw_login = 'oldpassword'").fetchone()[0])
- with self.login('oldpassword') as cu:
- pass
- newhash = str(self.session.system_sql("SELECT cw_upassword FROM cw_CWUser WHERE cw_login = 'oldpassword'").fetchone()[0])
- self.assertNotEqual(oldhash, newhash)
- self.assertTrue(newhash.startswith('$6$'))
- with self.login('oldpassword') as cu:
- pass
- self.assertEqual(newhash, str(self.session.system_sql("SELECT cw_upassword FROM cw_CWUser WHERE cw_login = 'oldpassword'").fetchone()[0]))
+ """Ensure that if a user's password is stored with a deprecated hash,
+ it will be updated on next login
+ """
+ with self.repo.internal_cnx() as cnx:
+ oldhash = str(cnx.system_sql("SELECT cw_upassword FROM cw_CWUser "
+ "WHERE cw_login = 'oldpassword'").fetchone()[0])
+ self.repo.close(self.repo.connect('oldpassword', password='oldpassword'))
+ newhash = str(cnx.system_sql("SELECT cw_upassword FROM cw_CWUser "
+ "WHERE cw_login = 'oldpassword'").fetchone()[0])
+ self.assertNotEqual(oldhash, newhash)
+ self.assertTrue(newhash.startswith('$6$'))
+ self.repo.close(self.repo.connect('oldpassword', password='oldpassword'))
+ self.assertEqual(newhash,
+ str(cnx.system_sql("SELECT cw_upassword FROM cw_CWUser WHERE "
+ "cw_login = 'oldpassword'").fetchone()[0]))
class SecurityRewritingTC(BaseSecurityTC):
@@ -86,84 +89,88 @@
super(SecurityRewritingTC, self).tearDown()
def test_not_relation_read_security(self):
- with self.login('iaminusersgrouponly'):
+ with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
self.hijack_source_execute()
- self.execute('Any U WHERE NOT A todo_by U, A is Affaire')
+ cnx.execute('Any U WHERE NOT A todo_by U, A is Affaire')
self.assertEqual(self.query[0][1].as_string(),
'Any U WHERE NOT EXISTS(A todo_by U), A is Affaire')
- self.execute('Any U WHERE NOT EXISTS(A todo_by U), A is Affaire')
+ cnx.execute('Any U WHERE NOT EXISTS(A todo_by U), A is Affaire')
self.assertEqual(self.query[0][1].as_string(),
'Any U WHERE NOT EXISTS(A todo_by U), A is Affaire')
class SecurityTC(BaseSecurityTC):
def setUp(self):
- BaseSecurityTC.setUp(self)
+ super(SecurityTC, self).setUp()
# implicitly test manager can add some entities
- self.execute("INSERT Affaire X: X sujet 'cool'")
- self.execute("INSERT Societe X: X nom 'logilab'")
- self.execute("INSERT Personne X: X nom 'bidule'")
- self.execute('INSERT CWGroup X: X name "staff"')
- self.commit()
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.execute("INSERT Affaire X: X sujet 'cool'")
+ cnx.execute("INSERT Societe X: X nom 'logilab'")
+ cnx.execute("INSERT Personne X: X nom 'bidule'")
+ cnx.execute('INSERT CWGroup X: X name "staff"')
+ cnx.commit()
def test_insert_security(self):
- with self.login('anon') as cu:
- cu.execute("INSERT Personne X: X nom 'bidule'")
- self.assertRaises(Unauthorized, self.commit)
- self.assertEqual(cu.execute('Personne X').rowcount, 1)
+ with self.new_access('anon').repo_cnx() as cnx:
+ cnx.execute("INSERT Personne X: X nom 'bidule'")
+ self.assertRaises(Unauthorized, cnx.commit)
+ self.assertEqual(cnx.execute('Personne X').rowcount, 1)
def test_insert_rql_permission(self):
# test user can only add une affaire related to a societe he owns
- with self.login('iaminusersgrouponly') as cu:
- cu.execute("INSERT Affaire X: X sujet 'cool'")
- self.assertRaises(Unauthorized, self.commit)
+ with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+ cnx.execute("INSERT Affaire X: X sujet 'cool'")
+ self.assertRaises(Unauthorized, cnx.commit)
# test nothing has actually been inserted
- self.assertEqual(self.execute('Affaire X').rowcount, 1)
- with self.login('iaminusersgrouponly') as cu:
- cu.execute("INSERT Affaire X: X sujet 'cool'")
- cu.execute("INSERT Societe X: X nom 'chouette'")
- cu.execute("SET A concerne S WHERE A sujet 'cool', S nom 'chouette'")
- self.commit()
+ with self.admin_access.repo_cnx() as cnx:
+ self.assertEqual(cnx.execute('Affaire X').rowcount, 1)
+ with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+ cnx.execute("INSERT Affaire X: X sujet 'cool'")
+ cnx.execute("INSERT Societe X: X nom 'chouette'")
+ cnx.execute("SET A concerne S WHERE A sujet 'cool', S nom 'chouette'")
+ cnx.commit()
def test_update_security_1(self):
- with self.login('anon') as cu:
+ with self.new_access('anon').repo_cnx() as cnx:
# local security check
- cu.execute( "SET X nom 'bidulechouette' WHERE X is Personne")
- self.assertRaises(Unauthorized, self.commit)
- self.assertEqual(self.execute('Personne X WHERE X nom "bidulechouette"').rowcount, 0)
+ cnx.execute( "SET X nom 'bidulechouette' WHERE X is Personne")
+ self.assertRaises(Unauthorized, cnx.commit)
+ with self.admin_access.repo_cnx() as cnx:
+ self.assertEqual(cnx.execute('Personne X WHERE X nom "bidulechouette"').rowcount, 0)
def test_update_security_2(self):
with self.temporary_permissions(Personne={'read': ('users', 'managers'),
'add': ('guests', 'users', 'managers')}):
- with self.login('anon') as cu:
- self.assertRaises(Unauthorized, cu.execute, "SET X nom 'bidulechouette' WHERE X is Personne")
- self.rollback()
- # self.assertRaises(Unauthorized, cnx.commit)
+ with self.new_access('anon').repo_cnx() as cnx:
+ self.assertRaises(Unauthorized, cnx.execute,
+ "SET X nom 'bidulechouette' WHERE X is Personne")
# test nothing has actually been inserted
- self.assertEqual(self.execute('Personne X WHERE X nom "bidulechouette"').rowcount, 0)
+ with self.admin_access.repo_cnx() as cnx:
+ self.assertEqual(cnx.execute('Personne X WHERE X nom "bidulechouette"').rowcount, 0)
def test_update_security_3(self):
- with self.login('iaminusersgrouponly') as cu:
- cu.execute("INSERT Personne X: X nom 'biduuule'")
- cu.execute("INSERT Societe X: X nom 'looogilab'")
- cu.execute("SET X travaille S WHERE X nom 'biduuule', S nom 'looogilab'")
+ with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+ cnx.execute("INSERT Personne X: X nom 'biduuule'")
+ cnx.execute("INSERT Societe X: X nom 'looogilab'")
+ cnx.execute("SET X travaille S WHERE X nom 'biduuule', S nom 'looogilab'")
def test_update_rql_permission(self):
- self.execute("SET A concerne S WHERE A is Affaire, S is Societe")
- self.commit()
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe")
+ cnx.commit()
# test user can only update une affaire related to a societe he owns
- with self.login('iaminusersgrouponly') as cu:
- cu.execute("SET X sujet 'pascool' WHERE X is Affaire")
+ with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+ cnx.execute("SET X sujet 'pascool' WHERE X is Affaire")
# this won't actually do anything since the selection query won't return anything
- self.commit()
+ cnx.commit()
# to actually get Unauthorized exception, try to update an entity we can read
- cu.execute("SET X nom 'toto' WHERE X is Societe")
- self.assertRaises(Unauthorized, self.commit)
- cu.execute("INSERT Affaire X: X sujet 'pascool'")
- cu.execute("INSERT Societe X: X nom 'chouette'")
- cu.execute("SET A concerne S WHERE A sujet 'pascool', S nom 'chouette'")
- cu.execute("SET X sujet 'habahsicestcool' WHERE X sujet 'pascool'")
- self.commit()
+ cnx.execute("SET X nom 'toto' WHERE X is Societe")
+ self.assertRaises(Unauthorized, cnx.commit)
+ cnx.execute("INSERT Affaire X: X sujet 'pascool'")
+ cnx.execute("INSERT Societe X: X nom 'chouette'")
+ cnx.execute("SET A concerne S WHERE A sujet 'pascool', S nom 'chouette'")
+ cnx.execute("SET X sujet 'habahsicestcool' WHERE X sujet 'pascool'")
+ cnx.commit()
def test_delete_security(self):
# FIXME: sample below fails because we don't detect "owner" can't delete
@@ -173,199 +180,205 @@
#self.assertRaises(Unauthorized,
# self.o.execute, user, "DELETE CWUser X WHERE X login 'bidule'")
# check local security
- with self.login('iaminusersgrouponly') as cu:
- self.assertRaises(Unauthorized, cu.execute, "DELETE CWGroup Y WHERE Y name 'staff'")
- self.rollback()
+ with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+ self.assertRaises(Unauthorized, cnx.execute, "DELETE CWGroup Y WHERE Y name 'staff'")
def test_delete_rql_permission(self):
- self.execute("SET A concerne S WHERE A is Affaire, S is Societe")
- self.commit()
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe")
+ cnx.commit()
# test user can only dele une affaire related to a societe he owns
- with self.login('iaminusersgrouponly') as cu:
+ with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
# this won't actually do anything since the selection query won't return anything
- cu.execute("DELETE Affaire X")
- self.commit()
+ cnx.execute("DELETE Affaire X")
+ cnx.commit()
# to actually get Unauthorized exception, try to delete an entity we can read
- self.assertRaises(Unauthorized, cu.execute, "DELETE Societe S")
- self.assertRaises(QueryError, self.commit) # can't commit anymore
- self.rollback() # required after Unauthorized
- cu.execute("INSERT Affaire X: X sujet 'pascool'")
- cu.execute("INSERT Societe X: X nom 'chouette'")
- cu.execute("SET A concerne S WHERE A sujet 'pascool', S nom 'chouette'")
- self.commit()
+ self.assertRaises(Unauthorized, cnx.execute, "DELETE Societe S")
+ self.assertRaises(QueryError, cnx.commit) # can't commit anymore
+ cnx.rollback()
+ cnx.execute("INSERT Affaire X: X sujet 'pascool'")
+ cnx.execute("INSERT Societe X: X nom 'chouette'")
+ cnx.execute("SET A concerne S WHERE A sujet 'pascool', S nom 'chouette'")
+ cnx.commit()
## # this one should fail since it will try to delete two affaires, one authorized
## # and the other not
-## self.assertRaises(Unauthorized, cu.execute, "DELETE Affaire X")
- cu.execute("DELETE Affaire X WHERE X sujet 'pascool'")
- self.commit()
-
+## self.assertRaises(Unauthorized, cnx.execute, "DELETE Affaire X")
+ cnx.execute("DELETE Affaire X WHERE X sujet 'pascool'")
+ cnx.commit()
def test_insert_relation_rql_permission(self):
- with self.login('iaminusersgrouponly') as cu:
- cu.execute("SET A concerne S WHERE A is Affaire, S is Societe")
+ with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+ cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe")
# should raise Unauthorized since user don't own S though this won't
# actually do anything since the selection query won't return
# anything
- self.commit()
+ cnx.commit()
# to actually get Unauthorized exception, try to insert a relation
# were we can read both entities
- rset = cu.execute('Personne P')
+ rset = cnx.execute('Personne P')
self.assertEqual(len(rset), 1)
ent = rset.get_entity(0, 0)
- self.assertFalse(cu.execute('Any P,S WHERE P travaille S,P is Personne, S is Societe'))
+ self.assertFalse(cnx.execute('Any P,S WHERE P travaille S,P is Personne, S is Societe'))
self.assertRaises(Unauthorized, ent.cw_check_perm, 'update')
self.assertRaises(Unauthorized,
- cu.execute, "SET P travaille S WHERE P is Personne, S is Societe")
- self.assertRaises(QueryError, self.commit) # can't commit anymore
- self.rollback()
+ cnx.execute, "SET P travaille S WHERE P is Personne, S is Societe")
+ self.assertRaises(QueryError, cnx.commit) # can't commit anymore
+ cnx.rollback()
# test nothing has actually been inserted:
- self.assertFalse(cu.execute('Any P,S WHERE P travaille S,P is Personne, S is Societe'))
- cu.execute("INSERT Societe X: X nom 'chouette'")
- cu.execute("SET A concerne S WHERE A is Affaire, S nom 'chouette'")
- self.commit()
+ self.assertFalse(cnx.execute('Any P,S WHERE P travaille S,P is Personne, S is Societe'))
+ cnx.execute("INSERT Societe X: X nom 'chouette'")
+ cnx.execute("SET A concerne S WHERE A is Affaire, S nom 'chouette'")
+ cnx.commit()
def test_delete_relation_rql_permission(self):
- self.execute("SET A concerne S WHERE A is Affaire, S is Societe")
- self.commit()
- with self.login('iaminusersgrouponly') as cu:
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe")
+ cnx.commit()
+ with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
# this won't actually do anything since the selection query won't return anything
- cu.execute("DELETE A concerne S")
- self.commit()
- # to actually get Unauthorized exception, try to delete a relation we can read
- eid = self.execute("INSERT Affaire X: X sujet 'pascool'")[0][0]
- self.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', {'x': eid})
- self.execute("SET A concerne S WHERE A sujet 'pascool', S is Societe")
- self.commit()
- with self.login('iaminusersgrouponly') as cu:
- self.assertRaises(Unauthorized, cu.execute, "DELETE A concerne S")
- self.assertRaises(QueryError, self.commit) # can't commit anymore
- self.rollback() # required after Unauthorized
- cu.execute("INSERT Societe X: X nom 'chouette'")
- cu.execute("SET A concerne S WHERE A is Affaire, S nom 'chouette'")
- self.commit()
- cu.execute("DELETE A concerne S WHERE S nom 'chouette'")
- self.commit()
+ cnx.execute("DELETE A concerne S")
+ cnx.commit()
+ with self.admin_access.repo_cnx() as cnx:
+ # to actually get Unauthorized exception, try to delete a relation we can read
+ eid = cnx.execute("INSERT Affaire X: X sujet 'pascool'")[0][0]
+ cnx.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"',
+ {'x': eid})
+ cnx.execute("SET A concerne S WHERE A sujet 'pascool', S is Societe")
+ cnx.commit()
+ with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+ self.assertRaises(Unauthorized, cnx.execute, "DELETE A concerne S")
+ self.assertRaises(QueryError, cnx.commit) # can't commit anymore
+ cnx.rollback()
+ cnx.execute("INSERT Societe X: X nom 'chouette'")
+ cnx.execute("SET A concerne S WHERE A is Affaire, S nom 'chouette'")
+ cnx.commit()
+ cnx.execute("DELETE A concerne S WHERE S nom 'chouette'")
+ cnx.commit()
def test_user_can_change_its_upassword(self):
- req = self.request()
- ueid = self.create_user(req, 'user').eid
- with self.login('user') as cu:
- cu.execute('SET X upassword %(passwd)s WHERE X eid %(x)s',
+ with self.admin_access.repo_cnx() as cnx:
+ ueid = self.create_user(cnx, 'user').eid
+ with self.new_access('user').repo_cnx() as cnx:
+ cnx.execute('SET X upassword %(passwd)s WHERE X eid %(x)s',
{'x': ueid, 'passwd': 'newpwd'})
- self.commit()
- cnx = self.login('user', password='newpwd')
- cnx.close()
+ cnx.commit()
+ self.repo.close(self.repo.connect('user', password='newpwd'))
def test_user_cant_change_other_upassword(self):
- req = self.request()
- ueid = self.create_user(req, 'otheruser').eid
- with self.login('iaminusersgrouponly') as cu:
- cu.execute('SET X upassword %(passwd)s WHERE X eid %(x)s',
+ with self.admin_access.repo_cnx() as cnx:
+ ueid = self.create_user(cnx, 'otheruser').eid
+ with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+ cnx.execute('SET X upassword %(passwd)s WHERE X eid %(x)s',
{'x': ueid, 'passwd': 'newpwd'})
- self.assertRaises(Unauthorized, self.commit)
+ self.assertRaises(Unauthorized, cnx.commit)
# read security test
def test_read_base(self):
with self.temporary_permissions(Personne={'read': ('users', 'managers')}):
- with self.login('anon') as cu:
+ with self.new_access('anon').repo_cnx() as cnx:
self.assertRaises(Unauthorized,
- cu.execute, 'Personne U where U nom "managers"')
- self.rollback()
+ cnx.execute, 'Personne U where U nom "managers"')
def test_read_erqlexpr_base(self):
- eid = self.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
- self.commit()
- with self.login('iaminusersgrouponly') as cu:
- rset = cu.execute('Affaire X')
+ with self.admin_access.repo_cnx() as cnx:
+ eid = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
+ cnx.commit()
+ with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+ rset = cnx.execute('Affaire X')
self.assertEqual(rset.rows, [])
- self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x': eid})
+ self.assertRaises(Unauthorized, cnx.execute, 'Any X WHERE X eid %(x)s', {'x': eid})
# cache test
- self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x': eid})
- aff2 = cu.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
- soc1 = cu.execute("INSERT Societe X: X nom 'chouette'")[0][0]
- cu.execute("SET A concerne S WHERE A is Affaire, S is Societe")
- self.commit()
- rset = cu.execute('Any X WHERE X eid %(x)s', {'x': aff2})
+ self.assertRaises(Unauthorized, cnx.execute, 'Any X WHERE X eid %(x)s', {'x': eid})
+ aff2 = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
+ soc1 = cnx.execute("INSERT Societe X: X nom 'chouette'")[0][0]
+ cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe")
+ cnx.commit()
+ rset = cnx.execute('Any X WHERE X eid %(x)s', {'x': aff2})
self.assertEqual(rset.rows, [[aff2]])
# more cache test w/ NOT eid
- rset = cu.execute('Affaire X WHERE NOT X eid %(x)s', {'x': eid})
+ rset = cnx.execute('Affaire X WHERE NOT X eid %(x)s', {'x': eid})
self.assertEqual(rset.rows, [[aff2]])
- rset = cu.execute('Affaire X WHERE NOT X eid %(x)s', {'x': aff2})
+ rset = cnx.execute('Affaire X WHERE NOT X eid %(x)s', {'x': aff2})
self.assertEqual(rset.rows, [])
# test can't update an attribute of an entity that can't be readen
- self.assertRaises(Unauthorized, cu.execute, 'SET X sujet "hacked" WHERE X eid %(x)s', {'x': eid})
- self.rollback()
+ self.assertRaises(Unauthorized, cnx.execute,
+ 'SET X sujet "hacked" WHERE X eid %(x)s', {'x': eid})
def test_entity_created_in_transaction(self):
affschema = self.schema['Affaire']
with self.temporary_permissions(Affaire={'read': affschema.permissions['add']}):
- with self.login('iaminusersgrouponly') as cu:
- aff2 = cu.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
+ with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+ aff2 = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
# entity created in transaction are readable *by eid*
- self.assertTrue(cu.execute('Any X WHERE X eid %(x)s', {'x':aff2}))
+ self.assertTrue(cnx.execute('Any X WHERE X eid %(x)s', {'x':aff2}))
# XXX would be nice if it worked
- rset = cu.execute("Affaire X WHERE X sujet 'cool'")
+ rset = cnx.execute("Affaire X WHERE X sujet 'cool'")
self.assertEqual(len(rset), 0)
- self.assertRaises(Unauthorized, self.commit)
+ self.assertRaises(Unauthorized, cnx.commit)
def test_read_erqlexpr_has_text1(self):
- aff1 = self.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
- card1 = self.execute("INSERT Card X: X title 'cool'")[0][0]
- self.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', {'x': card1})
- self.commit()
- with self.login('iaminusersgrouponly') as cu:
- aff2 = cu.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
- soc1 = cu.execute("INSERT Societe X: X nom 'chouette'")[0][0]
- cu.execute("SET A concerne S WHERE A eid %(a)s, S eid %(s)s", {'a': aff2, 's': soc1})
- self.commit()
- self.assertRaises(Unauthorized, cu.execute, 'Any X WHERE X eid %(x)s', {'x':aff1})
- self.assertTrue(cu.execute('Any X WHERE X eid %(x)s', {'x':aff2}))
- self.assertTrue(cu.execute('Any X WHERE X eid %(x)s', {'x':card1}))
- rset = cu.execute("Any X WHERE X has_text 'cool'")
+ with self.admin_access.repo_cnx() as cnx:
+ aff1 = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
+ card1 = cnx.execute("INSERT Card X: X title 'cool'")[0][0]
+ cnx.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"',
+ {'x': card1})
+ cnx.commit()
+ with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+ aff2 = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
+ soc1 = cnx.execute("INSERT Societe X: X nom 'chouette'")[0][0]
+ cnx.execute("SET A concerne S WHERE A eid %(a)s, S eid %(s)s", {'a': aff2, 's': soc1})
+ cnx.commit()
+ self.assertRaises(Unauthorized, cnx.execute, 'Any X WHERE X eid %(x)s', {'x':aff1})
+ self.assertTrue(cnx.execute('Any X WHERE X eid %(x)s', {'x':aff2}))
+ self.assertTrue(cnx.execute('Any X WHERE X eid %(x)s', {'x':card1}))
+ rset = cnx.execute("Any X WHERE X has_text 'cool'")
self.assertEqual(sorted(eid for eid, in rset.rows),
[card1, aff2])
- self.rollback()
def test_read_erqlexpr_has_text2(self):
- self.execute("INSERT Personne X: X nom 'bidule'")
- self.execute("INSERT Societe X: X nom 'bidule'")
- self.commit()
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.execute("INSERT Personne X: X nom 'bidule'")
+ cnx.execute("INSERT Societe X: X nom 'bidule'")
+ cnx.commit()
with self.temporary_permissions(Personne={'read': ('managers',)}):
- with self.login('iaminusersgrouponly') as cu:
- rset = cu.execute('Any N WHERE N has_text "bidule"')
+ with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+ rset = cnx.execute('Any N WHERE N has_text "bidule"')
self.assertEqual(len(rset.rows), 1, rset.rows)
- rset = cu.execute('Any N WITH N BEING (Any N WHERE N has_text "bidule")')
+ rset = cnx.execute('Any N WITH N BEING (Any N WHERE N has_text "bidule")')
self.assertEqual(len(rset.rows), 1, rset.rows)
def test_read_erqlexpr_optional_rel(self):
- self.execute("INSERT Personne X: X nom 'bidule'")
- self.execute("INSERT Societe X: X nom 'bidule'")
- self.commit()
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.execute("INSERT Personne X: X nom 'bidule'")
+ cnx.execute("INSERT Societe X: X nom 'bidule'")
+ cnx.commit()
with self.temporary_permissions(Personne={'read': ('managers',)}):
- with self.login('anon') as cu:
- rset = cu.execute('Any N,U WHERE N has_text "bidule", N owned_by U?')
+ with self.new_access('anon').repo_cnx() as cnx:
+ rset = cnx.execute('Any N,U WHERE N has_text "bidule", N owned_by U?')
self.assertEqual(len(rset.rows), 1, rset.rows)
def test_read_erqlexpr_aggregat(self):
- self.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
- self.commit()
- with self.login('iaminusersgrouponly') as cu:
- rset = cu.execute('Any COUNT(X) WHERE X is Affaire')
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
+ cnx.commit()
+ with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+ rset = cnx.execute('Any COUNT(X) WHERE X is Affaire')
self.assertEqual(rset.rows, [[0]])
- aff2 = cu.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
- soc1 = cu.execute("INSERT Societe X: X nom 'chouette'")[0][0]
- cu.execute("SET A concerne S WHERE A is Affaire, S is Societe")
- self.commit()
- rset = cu.execute('Any COUNT(X) WHERE X is Affaire')
+ aff2 = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0]
+ soc1 = cnx.execute("INSERT Societe X: X nom 'chouette'")[0][0]
+ cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe")
+ cnx.commit()
+ rset = cnx.execute('Any COUNT(X) WHERE X is Affaire')
self.assertEqual(rset.rows, [[1]])
- rset = cu.execute('Any ETN, COUNT(X) GROUPBY ETN WHERE X is ET, ET name ETN')
+ rset = cnx.execute('Any ETN, COUNT(X) GROUPBY ETN WHERE X is ET, ET name ETN')
values = dict(rset)
self.assertEqual(values['Affaire'], 1)
self.assertEqual(values['Societe'], 2)
- rset = cu.execute('Any ETN, COUNT(X) GROUPBY ETN WHERE X is ET, ET name ETN WITH X BEING ((Affaire X) UNION (Societe X))')
+ rset = cnx.execute('Any ETN, COUNT(X) GROUPBY ETN WHERE X is ET, ET name ETN '
+ 'WITH X BEING ((Affaire X) UNION (Societe X))')
self.assertEqual(len(rset), 2)
values = dict(rset)
self.assertEqual(values['Affaire'], 1)
@@ -373,64 +386,71 @@
def test_attribute_security(self):
- # only managers should be able to edit the 'test' attribute of Personne entities
- eid = self.execute("INSERT Personne X: X nom 'bidule', X web 'http://www.debian.org', X test TRUE")[0][0]
- self.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid})
- self.commit()
- with self.login('iaminusersgrouponly') as cu:
- cu.execute("INSERT Personne X: X nom 'bidule', X web 'http://www.debian.org', X test TRUE")
- self.assertRaises(Unauthorized, self.commit)
- cu.execute("INSERT Personne X: X nom 'bidule', X web 'http://www.debian.org', X test FALSE")
- self.assertRaises(Unauthorized, self.commit)
- eid = cu.execute("INSERT Personne X: X nom 'bidule', X web 'http://www.debian.org'")[0][0]
- self.commit()
- cu.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid})
- self.assertRaises(Unauthorized, self.commit)
- cu.execute('SET X test TRUE WHERE X eid %(x)s', {'x': eid})
- self.assertRaises(Unauthorized, self.commit)
- cu.execute('SET X web "http://www.logilab.org" WHERE X eid %(x)s', {'x': eid})
- self.commit()
+ with self.admin_access.repo_cnx() as cnx:
+ # only managers should be able to edit the 'test' attribute of Personne entities
+ eid = cnx.execute("INSERT Personne X: X nom 'bidule', "
+ "X web 'http://www.debian.org', X test TRUE")[0][0]
+ cnx.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid})
+ cnx.commit()
+ with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+ cnx.execute("INSERT Personne X: X nom 'bidule', "
+ "X web 'http://www.debian.org', X test TRUE")
+ self.assertRaises(Unauthorized, cnx.commit)
+ cnx.execute("INSERT Personne X: X nom 'bidule', "
+ "X web 'http://www.debian.org', X test FALSE")
+ self.assertRaises(Unauthorized, cnx.commit)
+ eid = cnx.execute("INSERT Personne X: X nom 'bidule', "
+ "X web 'http://www.debian.org'")[0][0]
+ cnx.commit()
+ cnx.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid})
+ self.assertRaises(Unauthorized, cnx.commit)
+ cnx.execute('SET X test TRUE WHERE X eid %(x)s', {'x': eid})
+ self.assertRaises(Unauthorized, cnx.commit)
+ cnx.execute('SET X web "http://www.logilab.org" WHERE X eid %(x)s', {'x': eid})
+ cnx.commit()
def test_attribute_security_rqlexpr(self):
- # Note.para attribute editable by managers or if the note is in "todo" state
- note = self.execute("INSERT Note X: X para 'bidule'").get_entity(0, 0)
- self.commit()
- note.cw_adapt_to('IWorkflowable').fire_transition('markasdone')
- self.execute('SET X para "truc" WHERE X eid %(x)s', {'x': note.eid})
- self.commit()
- with self.login('iaminusersgrouponly') as cu:
- cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note.eid})
- self.assertRaises(Unauthorized, self.commit)
- note2 = cu.execute("INSERT Note X: X para 'bidule'").get_entity(0, 0)
- self.commit()
+ with self.admin_access.repo_cnx() as cnx:
+ # Note.para attribute editable by managers or if the note is in "todo" state
+ note = cnx.execute("INSERT Note X: X para 'bidule'").get_entity(0, 0)
+ cnx.commit()
+ note.cw_adapt_to('IWorkflowable').fire_transition('markasdone')
+ cnx.execute('SET X para "truc" WHERE X eid %(x)s', {'x': note.eid})
+ cnx.commit()
+ with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+ cnx.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note.eid})
+ self.assertRaises(Unauthorized, cnx.commit)
+ note2 = cnx.execute("INSERT Note X: X para 'bidule'").get_entity(0, 0)
+ cnx.commit()
note2.cw_adapt_to('IWorkflowable').fire_transition('markasdone')
- self.commit()
- self.assertEqual(len(cu.execute('Any X WHERE X in_state S, S name "todo", X eid %(x)s', {'x': note2.eid})),
+ cnx.commit()
+ self.assertEqual(len(cnx.execute('Any X WHERE X in_state S, S name "todo", X eid %(x)s',
+ {'x': note2.eid})),
0)
- cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid})
- self.assertRaises(Unauthorized, self.commit)
+ cnx.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid})
+ self.assertRaises(Unauthorized, cnx.commit)
note2.cw_adapt_to('IWorkflowable').fire_transition('redoit')
- self.commit()
- cu.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid})
- self.commit()
- cu.execute("INSERT Note X: X something 'A'")
- self.assertRaises(Unauthorized, self.commit)
- cu.execute("INSERT Note X: X para 'zogzog', X something 'A'")
- self.commit()
- note = cu.execute("INSERT Note X").get_entity(0,0)
- self.commit()
+ cnx.commit()
+ cnx.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note2.eid})
+ cnx.commit()
+ cnx.execute("INSERT Note X: X something 'A'")
+ self.assertRaises(Unauthorized, cnx.commit)
+ cnx.execute("INSERT Note X: X para 'zogzog', X something 'A'")
+ cnx.commit()
+ note = cnx.execute("INSERT Note X").get_entity(0,0)
+ cnx.commit()
note.cw_set(something=u'B')
- self.commit()
+ cnx.commit()
note.cw_set(something=None, para=u'zogzog')
- self.commit()
+ cnx.commit()
def test_attribute_read_security(self):
# anon not allowed to see users'login, but they can see users
login_rdef = self.repo.schema['CWUser'].rdef('login')
with self.temporary_permissions((login_rdef, {'read': ('users', 'managers')}),
CWUser={'read': ('guests', 'users', 'managers')}):
- with self.login('anon') as cu:
- rset = cu.execute('CWUser X')
+ with self.new_access('anon').repo_cnx() as cnx:
+ rset = cnx.execute('CWUser X')
self.assertTrue(rset)
x = rset.get_entity(0, 0)
self.assertEqual(x.login, None)
@@ -441,17 +461,19 @@
self.assertTrue(x.creation_date)
def test_yams_inheritance_and_security_bug(self):
- with self.temporary_permissions(Division={'read': ('managers', ERQLExpression('X owned_by U'))}):
- with self.login('iaminusersgrouponly'):
- querier = self.repo.querier
+ with self.temporary_permissions(Division={'read': ('managers',
+ ERQLExpression('X owned_by U'))}):
+ with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+ querier = cnx.repo.querier
rqlst = querier.parse('Any X WHERE X is_instance_of Societe')
- querier.solutions(self.session, rqlst, {})
+ querier.solutions(cnx, rqlst, {})
querier._annotate(rqlst)
- plan = querier.plan_factory(rqlst, {}, self.session)
+ plan = querier.plan_factory(rqlst, {}, cnx)
plan.preprocess(rqlst)
self.assertEqual(
rqlst.as_string(),
- '(Any X WHERE X is IN(SubDivision, Societe)) UNION (Any X WHERE X is Division, EXISTS(X owned_by %(B)s))')
+ '(Any X WHERE X is IN(SubDivision, Societe)) UNION '
+ '(Any X WHERE X is Division, EXISTS(X owned_by %(B)s))')
class BaseSchemaSecurityTC(BaseSecurityTC):
@@ -459,159 +481,155 @@
def test_user_can_delete_object_he_created(self):
# even if some other user have changed object'state
- with self.login('iaminusersgrouponly') as cu:
+ with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
# due to security test, affaire has to concerne a societe the user owns
- cu.execute('INSERT Societe X: X nom "ARCTIA"')
- cu.execute('INSERT Affaire X: X ref "ARCT01", X concerne S WHERE S nom "ARCTIA"')
- self.commit()
- affaire = self.execute('Any X WHERE X ref "ARCT01"').get_entity(0, 0)
- affaire.cw_adapt_to('IWorkflowable').fire_transition('abort')
- self.commit()
- self.assertEqual(len(self.execute('TrInfo X WHERE X wf_info_for A, A ref "ARCT01"')),
- 1)
- self.assertEqual(len(self.execute('TrInfo X WHERE X wf_info_for A, A ref "ARCT01",'
- 'X owned_by U, U login "admin"')),
- 1) # TrInfo at the above state change
- with self.login('iaminusersgrouponly') as cu:
- cu.execute('DELETE Affaire X WHERE X ref "ARCT01"')
- self.commit()
- self.assertFalse(cu.execute('Affaire X'))
+ cnx.execute('INSERT Societe X: X nom "ARCTIA"')
+ cnx.execute('INSERT Affaire X: X ref "ARCT01", X concerne S WHERE S nom "ARCTIA"')
+ cnx.commit()
+ with self.admin_access.repo_cnx() as cnx:
+ affaire = cnx.execute('Any X WHERE X ref "ARCT01"').get_entity(0, 0)
+ affaire.cw_adapt_to('IWorkflowable').fire_transition('abort')
+ cnx.commit()
+ self.assertEqual(len(cnx.execute('TrInfo X WHERE X wf_info_for A, A ref "ARCT01"')),
+ 1)
+ self.assertEqual(len(cnx.execute('TrInfo X WHERE X wf_info_for A, A ref "ARCT01",'
+ 'X owned_by U, U login "admin"')),
+ 1) # TrInfo at the above state change
+ with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
+ cnx.execute('DELETE Affaire X WHERE X ref "ARCT01"')
+ cnx.commit()
+ self.assertFalse(cnx.execute('Affaire X'))
def test_users_and_groups_non_readable_by_guests(self):
- with self.login('anon') as cu:
- anon = cu.connection.user(self.session)
+ with self.repo.internal_cnx() as cnx:
+ admineid = cnx.execute('CWUser U WHERE U login "admin"').rows[0][0]
+ with self.new_access('anon').repo_cnx() as cnx:
+ anon = cnx.user
# anonymous user can only read itself
- rset = cu.execute('Any L WHERE X owned_by U, U login L')
+ rset = cnx.execute('Any L WHERE X owned_by U, U login L')
self.assertEqual([['anon']], rset.rows)
- rset = cu.execute('CWUser X')
+ rset = cnx.execute('CWUser X')
self.assertEqual([[anon.eid]], rset.rows)
# anonymous user can read groups (necessary to check allowed transitions for instance)
- self.assert_(cu.execute('CWGroup X'))
+ self.assert_(cnx.execute('CWGroup X'))
# should only be able to read the anonymous user, not another one
- origuser = self.adminsession.user
self.assertRaises(Unauthorized,
- cu.execute, 'CWUser X WHERE X eid %(x)s', {'x': origuser.eid})
- # nothing selected, nothing updated, no exception raised
- #self.assertRaises(Unauthorized,
- # cu.execute, 'SET X login "toto" WHERE X eid %(x)s',
- # {'x': self.user.eid})
-
- rset = cu.execute('CWUser X WHERE X eid %(x)s', {'x': anon.eid})
+ cnx.execute, 'CWUser X WHERE X eid %(x)s', {'x': admineid})
+ rset = cnx.execute('CWUser X WHERE X eid %(x)s', {'x': anon.eid})
self.assertEqual([[anon.eid]], rset.rows)
# but can't modify it
- cu.execute('SET X login "toto" WHERE X eid %(x)s', {'x': anon.eid})
- self.assertRaises(Unauthorized, self.commit)
+ cnx.execute('SET X login "toto" WHERE X eid %(x)s', {'x': anon.eid})
+ self.assertRaises(Unauthorized, cnx.commit)
def test_in_group_relation(self):
- with self.login('iaminusersgrouponly') as cu:
+ with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
rql = u"DELETE U in_group G WHERE U login 'admin'"
- self.assertRaises(Unauthorized, cu.execute, rql)
+ self.assertRaises(Unauthorized, cnx.execute, rql)
rql = u"SET U in_group G WHERE U login 'admin', G name 'users'"
- self.assertRaises(Unauthorized, cu.execute, rql)
- self.rollback()
+ self.assertRaises(Unauthorized, cnx.execute, rql)
def test_owned_by(self):
- self.execute("INSERT Personne X: X nom 'bidule'")
- self.commit()
- with self.login('iaminusersgrouponly') as cu:
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.execute("INSERT Personne X: X nom 'bidule'")
+ cnx.commit()
+ with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
rql = u"SET X owned_by U WHERE U login 'iaminusersgrouponly', X is Personne"
- self.assertRaises(Unauthorized, cu.execute, rql)
- self.rollback()
+ self.assertRaises(Unauthorized, cnx.execute, rql)
def test_bookmarked_by_guests_security(self):
- beid1 = self.execute('INSERT Bookmark B: B path "?vid=manage", B title "manage"')[0][0]
- beid2 = self.execute('INSERT Bookmark B: B path "?vid=index", B title "index", B bookmarked_by U WHERE U login "anon"')[0][0]
- self.commit()
- with self.login('anon') as cu:
- anoneid = self.session.user.eid
- self.assertEqual(cu.execute('Any T,P ORDERBY lower(T) WHERE B is Bookmark,B title T,B path P,'
+ with self.admin_access.repo_cnx() as cnx:
+ beid1 = cnx.execute('INSERT Bookmark B: B path "?vid=manage", B title "manage"')[0][0]
+ beid2 = cnx.execute('INSERT Bookmark B: B path "?vid=index", B title "index", '
+ 'B bookmarked_by U WHERE U login "anon"')[0][0]
+ cnx.commit()
+ with self.new_access('anon').repo_cnx() as cnx:
+ anoneid = cnx.user.eid
+ self.assertEqual(cnx.execute('Any T,P ORDERBY lower(T) WHERE B is Bookmark,B title T,B path P,'
'B bookmarked_by U, U eid %s' % anoneid).rows,
[['index', '?vid=index']])
- self.assertEqual(cu.execute('Any T,P ORDERBY lower(T) WHERE B is Bookmark,B title T,B path P,'
+ self.assertEqual(cnx.execute('Any T,P ORDERBY lower(T) WHERE B is Bookmark,B title T,B path P,'
'B bookmarked_by U, U eid %(x)s', {'x': anoneid}).rows,
[['index', '?vid=index']])
# can read others bookmarks as well
- self.assertEqual(cu.execute('Any B where B is Bookmark, NOT B bookmarked_by U').rows,
+ self.assertEqual(cnx.execute('Any B where B is Bookmark, NOT B bookmarked_by U').rows,
[[beid1]])
- self.assertRaises(Unauthorized, cu.execute,'DELETE B bookmarked_by U')
+ self.assertRaises(Unauthorized, cnx.execute,'DELETE B bookmarked_by U')
self.assertRaises(Unauthorized,
- cu.execute, 'SET B bookmarked_by U WHERE U eid %(x)s, B eid %(b)s',
+ cnx.execute, 'SET B bookmarked_by U WHERE U eid %(x)s, B eid %(b)s',
{'x': anoneid, 'b': beid1})
- self.rollback()
def test_ambigous_ordered(self):
- with self.login('anon') as cu:
- names = [t for t, in cu.execute('Any N ORDERBY lower(N) WHERE X name N')]
+ with self.new_access('anon').repo_cnx() as cnx:
+ names = [t for t, in cnx.execute('Any N ORDERBY lower(N) WHERE X name N')]
self.assertEqual(names, sorted(names, key=lambda x: x.lower()))
def test_in_state_without_update_perm(self):
"""check a user change in_state without having update permission on the
subject
"""
- eid = self.execute('INSERT Affaire X: X ref "ARCT01"')[0][0]
- self.commit()
- with self.login('iaminusersgrouponly') as cu:
- session = self.session
- # needed to avoid check_perm error
- session.set_cnxset()
+ with self.admin_access.repo_cnx() as cnx:
+ eid = cnx.execute('INSERT Affaire X: X ref "ARCT01"')[0][0]
+ cnx.commit()
+ with self.new_access('iaminusersgrouponly').repo_cnx() as cnx:
# needed to remove rql expr granting update perm to the user
affschema = self.schema['Affaire']
with self.temporary_permissions(Affaire={'update': affschema.get_groups('update'),
'read': ('users',)}):
self.assertRaises(Unauthorized,
- affschema.check_perm, session, 'update', eid=eid)
- aff = cu.execute('Any X WHERE X ref "ARCT01"').get_entity(0, 0)
+ affschema.check_perm, cnx, 'update', eid=eid)
+ aff = cnx.execute('Any X WHERE X ref "ARCT01"').get_entity(0, 0)
aff.cw_adapt_to('IWorkflowable').fire_transition('abort')
- self.commit()
+ cnx.commit()
# though changing a user state (even logged user) is reserved to managers
- user = self.user(session)
- session.set_cnxset()
+ user = cnx.user
# XXX wether it should raise Unauthorized or ValidationError is not clear
# the best would probably ValidationError if the transition doesn't exist
# from the current state but Unauthorized if it exists but user can't pass it
self.assertRaises(ValidationError,
user.cw_adapt_to('IWorkflowable').fire_transition, 'deactivate')
- self.rollback() # else will fail on login cm exit
def test_trinfo_security(self):
- aff = self.execute('INSERT Affaire X: X ref "ARCT01"').get_entity(0, 0)
- iworkflowable = aff.cw_adapt_to('IWorkflowable')
- self.commit()
- iworkflowable.fire_transition('abort')
- self.commit()
- # can change tr info comment
- self.execute('SET TI comment %(c)s WHERE TI wf_info_for X, X ref "ARCT01"',
- {'c': u'bouh!'})
- self.commit()
- aff.cw_clear_relation_cache('wf_info_for', 'object')
- trinfo = iworkflowable.latest_trinfo()
- self.assertEqual(trinfo.comment, 'bouh!')
- # but not from_state/to_state
- aff.cw_clear_relation_cache('wf_info_for', role='object')
- self.assertRaises(Unauthorized,
- self.execute, 'SET TI from_state S WHERE TI eid %(ti)s, S name "ben non"',
- {'ti': trinfo.eid})
- self.assertRaises(Unauthorized,
- self.execute, 'SET TI to_state S WHERE TI eid %(ti)s, S name "pitetre"',
- {'ti': trinfo.eid})
+ with self.admin_access.repo_cnx() as cnx:
+ aff = cnx.execute('INSERT Affaire X: X ref "ARCT01"').get_entity(0, 0)
+ iworkflowable = aff.cw_adapt_to('IWorkflowable')
+ cnx.commit()
+ iworkflowable.fire_transition('abort')
+ cnx.commit()
+ # can change tr info comment
+ cnx.execute('SET TI comment %(c)s WHERE TI wf_info_for X, X ref "ARCT01"',
+ {'c': u'bouh!'})
+ cnx.commit()
+ aff.cw_clear_relation_cache('wf_info_for', 'object')
+ trinfo = iworkflowable.latest_trinfo()
+ self.assertEqual(trinfo.comment, 'bouh!')
+ # but not from_state/to_state
+ aff.cw_clear_relation_cache('wf_info_for', role='object')
+ self.assertRaises(Unauthorized, cnx.execute,
+ 'SET TI from_state S WHERE TI eid %(ti)s, S name "ben non"',
+ {'ti': trinfo.eid})
+ self.assertRaises(Unauthorized, cnx.execute,
+ 'SET TI to_state S WHERE TI eid %(ti)s, S name "pitetre"',
+ {'ti': trinfo.eid})
def test_emailaddress_security(self):
# check for prexisting email adresse
- if self.execute('Any X WHERE X is EmailAddress'):
- rset = self.execute('Any X, U WHERE X is EmailAddress, U use_email X')
- msg = ['Preexisting email readable by anon found!']
- tmpl = ' - "%s" used by user "%s"'
- for i in xrange(len(rset)):
- email, user = rset.get_entity(i, 0), rset.get_entity(i, 1)
- msg.append(tmpl % (email.dc_title(), user.dc_title()))
- raise RuntimeError('\n'.join(msg))
- # actual test
- self.execute('INSERT EmailAddress X: X address "hop"').get_entity(0, 0)
- self.execute('INSERT EmailAddress X: X address "anon", U use_email X WHERE U login "anon"').get_entity(0, 0)
- self.commit()
- self.assertEqual(len(self.execute('Any X WHERE X is EmailAddress')), 2)
- self.login('anon')
- self.assertEqual(len(self.execute('Any X WHERE X is EmailAddress')), 1)
+ with self.admin_access.repo_cnx() as cnx:
+ if cnx.execute('Any X WHERE X is EmailAddress'):
+ rset = cnx.execute('Any X, U WHERE X is EmailAddress, U use_email X')
+ msg = ['Preexisting email readable by anon found!']
+ tmpl = ' - "%s" used by user "%s"'
+ for i in xrange(len(rset)):
+ email, user = rset.get_entity(i, 0), rset.get_entity(i, 1)
+ msg.append(tmpl % (email.dc_title(), user.dc_title()))
+ raise RuntimeError('\n'.join(msg))
+ # actual test
+ cnx.execute('INSERT EmailAddress X: X address "hop"').get_entity(0, 0)
+ cnx.execute('INSERT EmailAddress X: X address "anon", '
+ 'U use_email X WHERE U login "anon"').get_entity(0, 0)
+ cnx.commit()
+ self.assertEqual(len(cnx.execute('Any X WHERE X is EmailAddress')), 2)
+ with self.new_access('anon').repo_cnx() as cnx:
+ self.assertEqual(len(cnx.execute('Any X WHERE X is EmailAddress')), 1)
if __name__ == '__main__':
unittest_main()
--- a/server/test/unittest_session.py Fri May 23 18:35:13 2014 +0200
+++ b/server/test/unittest_session.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -17,7 +17,7 @@
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
from cubicweb.devtools.testlib import CubicWebTC
-from cubicweb.server.session import HOOKS_ALLOW_ALL, HOOKS_DENY_ALL, Connection
+from cubicweb.server.session import HOOKS_ALLOW_ALL, HOOKS_DENY_ALL
class InternalSessionTC(CubicWebTC):
def test_dbapi_query(self):
--- a/server/test/unittest_sqlutils.py Fri May 23 18:35:13 2014 +0200
+++ b/server/test/unittest_sqlutils.py Fri Jun 27 11:48:26 2014 +0200
@@ -51,18 +51,18 @@
class SQLUtilsTC(CubicWebTC):
def test_group_concat(self):
- req = self.request()
- g = req.create_entity('CWGroup', name=u'héhé')
- u = req.create_entity('CWUser', login=u'toto', upassword=u'',
- in_group=g.eid)
- rset = self.execute(u'Any L,GROUP_CONCAT(G) GROUPBY L WHERE X login L,'
- u'X in_group G, G name GN, NOT G name IN ("users", "héhé")')
- self.assertEqual([[u'admin', u'3'], [u'anon', u'2']],
- rset.rows)
- rset = self.execute('Any L,GROUP_CONCAT(GN) GROUPBY L WHERE X login L,'
- 'X in_group G, G name GN, NOT G name "users"')
- self.assertEqual([[u'admin', u'managers'], [u'anon', u'guests'], [u'toto', u'héhé']],
- rset.rows)
+ with self.admin_access.repo_cnx() as cnx:
+ g = cnx.create_entity('CWGroup', name=u'héhé')
+ u = cnx.create_entity('CWUser', login=u'toto', upassword=u'',
+ in_group=g.eid)
+ rset = cnx.execute(u'Any L,GROUP_CONCAT(G) GROUPBY L WHERE X login L,'
+ u'X in_group G, G name GN, NOT G name IN ("users", "héhé")')
+ self.assertEqual([[u'admin', u'3'], [u'anon', u'2']],
+ rset.rows)
+ rset = cnx.execute('Any L,GROUP_CONCAT(GN) GROUPBY L WHERE X login L,'
+ 'X in_group G, G name GN, NOT G name "users"')
+ self.assertEqual([[u'admin', u'managers'], [u'anon', u'guests'], [u'toto', u'héhé']],
+ rset.rows)
if __name__ == '__main__':
unittest_main()
--- a/server/test/unittest_storage.py Fri May 23 18:35:13 2014 +0200
+++ b/server/test/unittest_storage.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -28,7 +28,7 @@
from cubicweb import Binary, QueryError
from cubicweb.predicates import is_instance
from cubicweb.server.sources import storages
-from cubicweb.server.hook import Hook, Operation
+from cubicweb.server.hook import Hook
class DummyBeforeHook(Hook):
__regid__ = 'dummy-before-hook'
@@ -50,7 +50,7 @@
assert oldvalue == self.entity.data.getvalue()
class StorageTC(CubicWebTC):
-
+ tempdir = None
tags = CubicWebTC.tags | Tags('Storage', 'BFSS')
def setup_database(self):
@@ -65,255 +65,273 @@
shutil.rmtree(self.tempdir)
- def create_file(self, content='the-data'):
- req = self.request()
- return req.create_entity('File', data=Binary(content),
- data_format=u'text/plain', data_name=u'foo.pdf')
+ def create_file(self, cnx, content='the-data'):
+ return cnx.create_entity('File', data=Binary(content),
+ data_format=u'text/plain',
+ data_name=u'foo.pdf')
- def fspath(self, entity):
- fspath = self.execute('Any fspath(D) WHERE F eid %(f)s, F data D',
- {'f': entity.eid})[0][0]
+ def fspath(self, cnx, entity):
+ fspath = cnx.execute('Any fspath(D) WHERE F eid %(f)s, F data D',
+ {'f': entity.eid})[0][0]
return fspath.getvalue()
def test_bfss_wrong_fspath_usage(self):
- f1 = self.create_file()
- self.execute('Any fspath(D) WHERE F eid %(f)s, F data D', {'f': f1.eid})
- with self.assertRaises(NotImplementedError) as cm:
- self.execute('Any fspath(F) WHERE F eid %(f)s', {'f': f1.eid})
- self.assertEqual(str(cm.exception),
- 'This callback is only available for BytesFileSystemStorage '
- 'managed attribute. Is FSPATH() argument BFSS managed?')
+ with self.admin_access.repo_cnx() as cnx:
+ f1 = self.create_file(cnx)
+ cnx.execute('Any fspath(D) WHERE F eid %(f)s, F data D', {'f': f1.eid})
+ with self.assertRaises(NotImplementedError) as cm:
+ cnx.execute('Any fspath(F) WHERE F eid %(f)s', {'f': f1.eid})
+ self.assertEqual(str(cm.exception),
+ 'This callback is only available for BytesFileSystemStorage '
+ 'managed attribute. Is FSPATH() argument BFSS managed?')
def test_bfss_storage(self):
- f1 = self.create_file()
- expected_filepath = osp.join(self.tempdir, '%s_data_%s' %
- (f1.eid, f1.data_name))
- self.assertTrue(osp.isfile(expected_filepath))
- # file should be read only
- self.assertFalse(os.access(expected_filepath, os.W_OK))
- self.assertEqual(file(expected_filepath).read(), 'the-data')
- self.rollback()
- self.assertFalse(osp.isfile(expected_filepath))
- f1 = self.create_file()
- self.commit()
- self.assertEqual(file(expected_filepath).read(), 'the-data')
- f1.cw_set(data=Binary('the new data'))
- self.rollback()
- self.assertEqual(file(expected_filepath).read(), 'the-data')
- f1.cw_delete()
- self.assertTrue(osp.isfile(expected_filepath))
- self.rollback()
- self.assertTrue(osp.isfile(expected_filepath))
- f1.cw_delete()
- self.commit()
- self.assertFalse(osp.isfile(expected_filepath))
+ with self.admin_access.repo_cnx() as cnx:
+ f1 = self.create_file(cnx)
+ expected_filepath = osp.join(self.tempdir, '%s_data_%s' %
+ (f1.eid, f1.data_name))
+ self.assertTrue(osp.isfile(expected_filepath))
+ # file should be read only
+ self.assertFalse(os.access(expected_filepath, os.W_OK))
+ self.assertEqual(file(expected_filepath).read(), 'the-data')
+ cnx.rollback()
+ self.assertFalse(osp.isfile(expected_filepath))
+ f1 = self.create_file(cnx)
+ cnx.commit()
+ self.assertEqual(file(expected_filepath).read(), 'the-data')
+ f1.cw_set(data=Binary('the new data'))
+ cnx.rollback()
+ self.assertEqual(file(expected_filepath).read(), 'the-data')
+ f1.cw_delete()
+ self.assertTrue(osp.isfile(expected_filepath))
+ cnx.rollback()
+ self.assertTrue(osp.isfile(expected_filepath))
+ f1.cw_delete()
+ cnx.commit()
+ self.assertFalse(osp.isfile(expected_filepath))
def test_bfss_sqlite_fspath(self):
- f1 = self.create_file()
- expected_filepath = osp.join(self.tempdir, '%s_data_%s' % (f1.eid, f1.data_name))
- self.assertEqual(self.fspath(f1), expected_filepath)
+ with self.admin_access.repo_cnx() as cnx:
+ f1 = self.create_file(cnx)
+ expected_filepath = osp.join(self.tempdir, '%s_data_%s' % (f1.eid, f1.data_name))
+ self.assertEqual(self.fspath(cnx, f1), expected_filepath)
def test_bfss_fs_importing_doesnt_touch_path(self):
- self.session.transaction_data['fs_importing'] = True
- filepath = osp.abspath(__file__)
- f1 = self.request().create_entity('File', data=Binary(filepath),
- data_format=u'text/plain', data_name=u'foo')
- self.assertEqual(self.fspath(f1), filepath)
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.transaction_data['fs_importing'] = True
+ filepath = osp.abspath(__file__)
+ f1 = cnx.create_entity('File', data=Binary(filepath),
+ data_format=u'text/plain', data_name=u'foo')
+ self.assertEqual(self.fspath(cnx, f1), filepath)
def test_source_storage_transparency(self):
- with self.temporary_appobjects(DummyBeforeHook, DummyAfterHook):
- self.create_file()
+ with self.admin_access.repo_cnx() as cnx:
+ with self.temporary_appobjects(DummyBeforeHook, DummyAfterHook):
+ self.create_file(cnx)
def test_source_mapped_attribute_error_cases(self):
- with self.assertRaises(QueryError) as cm:
- self.execute('Any X WHERE X data ~= "hop", X is File')
- self.assertEqual(str(cm.exception), 'can\'t use File.data (X data ILIKE "hop") in restriction')
- with self.assertRaises(QueryError) as cm:
- self.execute('Any X, Y WHERE X data D, Y data D, '
- 'NOT X identity Y, X is File, Y is File')
- self.assertEqual(str(cm.exception), "can't use D as a restriction variable")
- # query returning mix of mapped / regular attributes (only file.data
- # mapped, not image.data for instance)
- with self.assertRaises(QueryError) as cm:
- self.execute('Any X WITH X BEING ('
- ' (Any NULL)'
- ' UNION '
- ' (Any D WHERE X data D, X is File)'
- ')')
- self.assertEqual(str(cm.exception), 'query fetch some source mapped attribute, some not')
- with self.assertRaises(QueryError) as cm:
- self.execute('(Any D WHERE X data D, X is File)'
- ' UNION '
- '(Any D WHERE X title D, X is Bookmark)')
- self.assertEqual(str(cm.exception), 'query fetch some source mapped attribute, some not')
+ with self.admin_access.repo_cnx() as cnx:
+ with self.assertRaises(QueryError) as cm:
+ cnx.execute('Any X WHERE X data ~= "hop", X is File')
+ self.assertEqual(str(cm.exception), 'can\'t use File.data (X data ILIKE "hop") in restriction')
+ with self.assertRaises(QueryError) as cm:
+ cnx.execute('Any X, Y WHERE X data D, Y data D, '
+ 'NOT X identity Y, X is File, Y is File')
+ self.assertEqual(str(cm.exception), "can't use D as a restriction variable")
+ # query returning mix of mapped / regular attributes (only file.data
+ # mapped, not image.data for instance)
+ with self.assertRaises(QueryError) as cm:
+ cnx.execute('Any X WITH X BEING ('
+ ' (Any NULL)'
+ ' UNION '
+ ' (Any D WHERE X data D, X is File)'
+ ')')
+ self.assertEqual(str(cm.exception), 'query fetch some source mapped attribute, some not')
+ with self.assertRaises(QueryError) as cm:
+ cnx.execute('(Any D WHERE X data D, X is File)'
+ ' UNION '
+ '(Any D WHERE X title D, X is Bookmark)')
+ self.assertEqual(str(cm.exception), 'query fetch some source mapped attribute, some not')
- storages.set_attribute_storage(self.repo, 'State', 'name',
- storages.BytesFileSystemStorage(self.tempdir))
- try:
- with self.assertRaises(QueryError) as cm:
- self.execute('Any D WHERE X name D, X is IN (State, Transition)')
- self.assertEqual(str(cm.exception), 'query fetch some source mapped attribute, some not')
- finally:
- storages.unset_attribute_storage(self.repo, 'State', 'name')
+ storages.set_attribute_storage(self.repo, 'State', 'name',
+ storages.BytesFileSystemStorage(self.tempdir))
+ try:
+ with self.assertRaises(QueryError) as cm:
+ cnx.execute('Any D WHERE X name D, X is IN (State, Transition)')
+ self.assertEqual(str(cm.exception), 'query fetch some source mapped attribute, some not')
+ finally:
+ storages.unset_attribute_storage(self.repo, 'State', 'name')
def test_source_mapped_attribute_advanced(self):
- f1 = self.create_file()
- rset = self.execute('Any X,D WITH D,X BEING ('
- ' (Any D, X WHERE X eid %(x)s, X data D)'
- ' UNION '
- ' (Any D, X WHERE X eid %(x)s, X data D)'
- ')', {'x': f1.eid})
- self.assertEqual(len(rset), 2)
- self.assertEqual(rset[0][0], f1.eid)
- self.assertEqual(rset[1][0], f1.eid)
- self.assertEqual(rset[0][1].getvalue(), 'the-data')
- self.assertEqual(rset[1][1].getvalue(), 'the-data')
- rset = self.execute('Any X,LENGTH(D) WHERE X eid %(x)s, X data D',
- {'x': f1.eid})
- self.assertEqual(len(rset), 1)
- self.assertEqual(rset[0][0], f1.eid)
- self.assertEqual(rset[0][1], len('the-data'))
- rset = self.execute('Any X,LENGTH(D) WITH D,X BEING ('
- ' (Any D, X WHERE X eid %(x)s, X data D)'
- ' UNION '
- ' (Any D, X WHERE X eid %(x)s, X data D)'
- ')', {'x': f1.eid})
- self.assertEqual(len(rset), 2)
- self.assertEqual(rset[0][0], f1.eid)
- self.assertEqual(rset[1][0], f1.eid)
- self.assertEqual(rset[0][1], len('the-data'))
- self.assertEqual(rset[1][1], len('the-data'))
- with self.assertRaises(QueryError) as cm:
- self.execute('Any X,UPPER(D) WHERE X eid %(x)s, X data D',
- {'x': f1.eid})
- self.assertEqual(str(cm.exception), 'UPPER can not be called on mapped attribute')
+ with self.admin_access.repo_cnx() as cnx:
+ f1 = self.create_file(cnx)
+ rset = cnx.execute('Any X,D WITH D,X BEING ('
+ ' (Any D, X WHERE X eid %(x)s, X data D)'
+ ' UNION '
+ ' (Any D, X WHERE X eid %(x)s, X data D)'
+ ')', {'x': f1.eid})
+ self.assertEqual(len(rset), 2)
+ self.assertEqual(rset[0][0], f1.eid)
+ self.assertEqual(rset[1][0], f1.eid)
+ self.assertEqual(rset[0][1].getvalue(), 'the-data')
+ self.assertEqual(rset[1][1].getvalue(), 'the-data')
+ rset = cnx.execute('Any X,LENGTH(D) WHERE X eid %(x)s, X data D',
+ {'x': f1.eid})
+ self.assertEqual(len(rset), 1)
+ self.assertEqual(rset[0][0], f1.eid)
+ self.assertEqual(rset[0][1], len('the-data'))
+ rset = cnx.execute('Any X,LENGTH(D) WITH D,X BEING ('
+ ' (Any D, X WHERE X eid %(x)s, X data D)'
+ ' UNION '
+ ' (Any D, X WHERE X eid %(x)s, X data D)'
+ ')', {'x': f1.eid})
+ self.assertEqual(len(rset), 2)
+ self.assertEqual(rset[0][0], f1.eid)
+ self.assertEqual(rset[1][0], f1.eid)
+ self.assertEqual(rset[0][1], len('the-data'))
+ self.assertEqual(rset[1][1], len('the-data'))
+ with self.assertRaises(QueryError) as cm:
+ cnx.execute('Any X,UPPER(D) WHERE X eid %(x)s, X data D',
+ {'x': f1.eid})
+ self.assertEqual(str(cm.exception), 'UPPER can not be called on mapped attribute')
def test_bfss_fs_importing_transparency(self):
- self.session.transaction_data['fs_importing'] = True
- filepath = osp.abspath(__file__)
- f1 = self.session.create_entity('File', data=Binary(filepath),
- data_format=u'text/plain', data_name=u'foo')
- cw_value = f1.data.getvalue()
- fs_value = file(filepath).read()
- if cw_value != fs_value:
- self.fail('cw value %r is different from file content' % cw_value)
-
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.transaction_data['fs_importing'] = True
+ filepath = osp.abspath(__file__)
+ f1 = cnx.create_entity('File', data=Binary(filepath),
+ data_format=u'text/plain', data_name=u'foo')
+ cw_value = f1.data.getvalue()
+ fs_value = file(filepath).read()
+ if cw_value != fs_value:
+ self.fail('cw value %r is different from file content' % cw_value)
@tag('update')
def test_bfss_update_with_existing_data(self):
- # use self.session to use server-side cache
- f1 = self.session.create_entity('File', data=Binary('some data'),
- data_format=u'text/plain', data_name=u'foo')
- # NOTE: do not use cw_set() which would automatically
- # update f1's local dict. We want the pure rql version to work
- self.execute('SET F data %(d)s WHERE F eid %(f)s',
- {'d': Binary('some other data'), 'f': f1.eid})
- self.assertEqual(f1.data.getvalue(), 'some other data')
- self.commit()
- f2 = self.execute('Any F WHERE F eid %(f)s, F is File', {'f': f1.eid}).get_entity(0, 0)
- self.assertEqual(f2.data.getvalue(), 'some other data')
+ with self.admin_access.repo_cnx() as cnx:
+ f1 = cnx.create_entity('File', data=Binary('some data'),
+ data_format=u'text/plain', data_name=u'foo')
+ # NOTE: do not use cw_set() which would automatically
+ # update f1's local dict. We want the pure rql version to work
+ cnx.execute('SET F data %(d)s WHERE F eid %(f)s',
+ {'d': Binary('some other data'), 'f': f1.eid})
+ self.assertEqual(f1.data.getvalue(), 'some other data')
+ cnx.commit()
+ f2 = cnx.execute('Any F WHERE F eid %(f)s, F is File', {'f': f1.eid}).get_entity(0, 0)
+ self.assertEqual(f2.data.getvalue(), 'some other data')
@tag('update', 'extension', 'commit')
def test_bfss_update_with_different_extension_commited(self):
- # use self.session to use server-side cache
- f1 = self.session.create_entity('File', data=Binary('some data'),
- data_format=u'text/plain', data_name=u'foo.txt')
- # NOTE: do not use cw_set() which would automatically
- # update f1's local dict. We want the pure rql version to work
- self.commit()
- old_path = self.fspath(f1)
- self.assertTrue(osp.isfile(old_path))
- self.assertEqual(osp.splitext(old_path)[1], '.txt')
- self.execute('SET F data %(d)s, F data_name %(dn)s, F data_format %(df)s WHERE F eid %(f)s',
- {'d': Binary('some other data'), 'f': f1.eid, 'dn': u'bar.jpg', 'df': u'image/jpeg'})
- self.commit()
- # the new file exists with correct extension
- # the old file is dead
- f2 = self.execute('Any F WHERE F eid %(f)s, F is File', {'f': f1.eid}).get_entity(0, 0)
- new_path = self.fspath(f2)
- self.assertFalse(osp.isfile(old_path))
- self.assertTrue(osp.isfile(new_path))
- self.assertEqual(osp.splitext(new_path)[1], '.jpg')
+ with self.admin_access.repo_cnx() as cnx:
+ f1 = cnx.create_entity('File', data=Binary('some data'),
+ data_format=u'text/plain', data_name=u'foo.txt')
+ # NOTE: do not use cw_set() which would automatically
+ # update f1's local dict. We want the pure rql version to work
+ cnx.commit()
+ old_path = self.fspath(cnx, f1)
+ self.assertTrue(osp.isfile(old_path))
+ self.assertEqual(osp.splitext(old_path)[1], '.txt')
+ cnx.execute('SET F data %(d)s, F data_name %(dn)s, '
+ 'F data_format %(df)s WHERE F eid %(f)s',
+ {'d': Binary('some other data'), 'f': f1.eid,
+ 'dn': u'bar.jpg', 'df': u'image/jpeg'})
+ cnx.commit()
+ # the new file exists with correct extension
+ # the old file is dead
+ f2 = cnx.execute('Any F WHERE F eid %(f)s, F is File', {'f': f1.eid}).get_entity(0, 0)
+ new_path = self.fspath(cnx, f2)
+ self.assertFalse(osp.isfile(old_path))
+ self.assertTrue(osp.isfile(new_path))
+ self.assertEqual(osp.splitext(new_path)[1], '.jpg')
@tag('update', 'extension', 'rollback')
def test_bfss_update_with_different_extension_rolled_back(self):
- # use self.session to use server-side cache
- f1 = self.session.create_entity('File', data=Binary('some data'),
- data_format=u'text/plain', data_name=u'foo.txt')
- # NOTE: do not use cw_set() which would automatically
- # update f1's local dict. We want the pure rql version to work
- self.commit()
- old_path = self.fspath(f1)
- old_data = f1.data.getvalue()
- self.assertTrue(osp.isfile(old_path))
- self.assertEqual(osp.splitext(old_path)[1], '.txt')
- self.execute('SET F data %(d)s, F data_name %(dn)s, F data_format %(df)s WHERE F eid %(f)s',
- {'d': Binary('some other data'), 'f': f1.eid, 'dn': u'bar.jpg', 'df': u'image/jpeg'})
- self.rollback()
- # the new file exists with correct extension
- # the old file is dead
- f2 = self.execute('Any F WHERE F eid %(f)s, F is File', {'f': f1.eid}).get_entity(0, 0)
- new_path = self.fspath(f2)
- new_data = f2.data.getvalue()
- self.assertTrue(osp.isfile(new_path))
- self.assertEqual(osp.splitext(new_path)[1], '.txt')
- self.assertEqual(old_path, new_path)
- self.assertEqual(old_data, new_data)
+ with self.admin_access.repo_cnx() as cnx:
+ f1 = cnx.create_entity('File', data=Binary('some data'),
+ data_format=u'text/plain', data_name=u'foo.txt')
+ # NOTE: do not use cw_set() which would automatically
+ # update f1's local dict. We want the pure rql version to work
+ cnx.commit()
+ old_path = self.fspath(cnx, f1)
+ old_data = f1.data.getvalue()
+ self.assertTrue(osp.isfile(old_path))
+ self.assertEqual(osp.splitext(old_path)[1], '.txt')
+ cnx.execute('SET F data %(d)s, F data_name %(dn)s, '
+ 'F data_format %(df)s WHERE F eid %(f)s',
+ {'d': Binary('some other data'),
+ 'f': f1.eid,
+ 'dn': u'bar.jpg',
+ 'df': u'image/jpeg'})
+ cnx.rollback()
+ # the new file exists with correct extension
+ # the old file is dead
+ f2 = cnx.execute('Any F WHERE F eid %(f)s, F is File',
+ {'f': f1.eid}).get_entity(0, 0)
+ new_path = self.fspath(cnx, f2)
+ new_data = f2.data.getvalue()
+ self.assertTrue(osp.isfile(new_path))
+ self.assertEqual(osp.splitext(new_path)[1], '.txt')
+ self.assertEqual(old_path, new_path)
+ self.assertEqual(old_data, new_data)
@tag('update', 'NULL')
def test_bfss_update_to_None(self):
- f = self.session.create_entity('Affaire', opt_attr=Binary('toto'))
- self.session.commit()
- self.session.set_cnxset()
- f.cw_set(opt_attr=None)
- self.session.commit()
+ with self.admin_access.repo_cnx() as cnx:
+ f = cnx.create_entity('Affaire', opt_attr=Binary('toto'))
+ cnx.commit()
+ f.cw_set(opt_attr=None)
+ cnx.commit()
@tag('fs_importing', 'update')
def test_bfss_update_with_fs_importing(self):
- # use self.session to use server-side cache
- f1 = self.session.create_entity('File', data=Binary('some data'),
- data_format=u'text/plain', data_name=u'foo')
- old_fspath = self.fspath(f1)
- self.session.transaction_data['fs_importing'] = True
- new_fspath = osp.join(self.tempdir, 'newfile.txt')
- file(new_fspath, 'w').write('the new data')
- self.execute('SET F data %(d)s WHERE F eid %(f)s',
- {'d': Binary(new_fspath), 'f': f1.eid})
- self.commit()
- self.assertEqual(f1.data.getvalue(), 'the new data')
- self.assertEqual(self.fspath(f1), new_fspath)
- self.assertFalse(osp.isfile(old_fspath))
+ with self.admin_access.repo_cnx() as cnx:
+ f1 = cnx.create_entity('File', data=Binary('some data'),
+ data_format=u'text/plain',
+ data_name=u'foo')
+ old_fspath = self.fspath(cnx, f1)
+ cnx.transaction_data['fs_importing'] = True
+ new_fspath = osp.join(self.tempdir, 'newfile.txt')
+ file(new_fspath, 'w').write('the new data')
+ cnx.execute('SET F data %(d)s WHERE F eid %(f)s',
+ {'d': Binary(new_fspath), 'f': f1.eid})
+ cnx.commit()
+ self.assertEqual(f1.data.getvalue(), 'the new data')
+ self.assertEqual(self.fspath(cnx, f1), new_fspath)
+ self.assertFalse(osp.isfile(old_fspath))
@tag('fsimport')
def test_clean(self):
- fsimport = storages.fsimport
- td = self.session.transaction_data
- self.assertNotIn('fs_importing', td)
- with fsimport(self.session):
- self.assertIn('fs_importing', td)
- self.assertTrue(td['fs_importing'])
- self.assertNotIn('fs_importing', td)
+ with self.admin_access.repo_cnx() as cnx:
+ fsimport = storages.fsimport
+ td = cnx.transaction_data
+ self.assertNotIn('fs_importing', td)
+ with fsimport(cnx):
+ self.assertIn('fs_importing', td)
+ self.assertTrue(td['fs_importing'])
+ self.assertNotIn('fs_importing', td)
@tag('fsimport')
def test_true(self):
- fsimport = storages.fsimport
- td = self.session.transaction_data
- td['fs_importing'] = True
- with fsimport(self.session):
- self.assertIn('fs_importing', td)
+ with self.admin_access.repo_cnx() as cnx:
+ fsimport = storages.fsimport
+ td = cnx.transaction_data
+ td['fs_importing'] = True
+ with fsimport(cnx):
+ self.assertIn('fs_importing', td)
+ self.assertTrue(td['fs_importing'])
self.assertTrue(td['fs_importing'])
- self.assertTrue(td['fs_importing'])
@tag('fsimport')
def test_False(self):
- fsimport = storages.fsimport
- td = self.session.transaction_data
- td['fs_importing'] = False
- with fsimport(self.session):
- self.assertIn('fs_importing', td)
- self.assertTrue(td['fs_importing'])
- self.assertFalse(td['fs_importing'])
+ with self.admin_access.repo_cnx() as cnx:
+ fsimport = storages.fsimport
+ td = cnx.transaction_data
+ td['fs_importing'] = False
+ with fsimport(cnx):
+ self.assertIn('fs_importing', td)
+ self.assertTrue(td['fs_importing'])
+ self.assertFalse(td['fs_importing'])
if __name__ == '__main__':
unittest_main()
--- a/server/test/unittest_undo.py Fri May 23 18:35:13 2014 +0200
+++ b/server/test/unittest_undo.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -21,18 +21,23 @@
from cubicweb.devtools.testlib import CubicWebTC
import cubicweb.server.session
from cubicweb.server.session import Connection as OldConnection
-from cubicweb.transaction import *
from cubicweb.server.sources.native import UndoTransactionException, _UndoException
+from cubicweb.transaction import NoSuchTransaction
class UndoableTransactionTC(CubicWebTC):
def setup_database(self):
- req = self.request()
- self.toto = self.create_user(req, 'toto', password='toto', groups=('users',),
- commit=False)
- self.txuuid = self.commit()
+ with self.admin_access.repo_cnx() as cnx:
+ self.totoeid = self.create_user(cnx, 'toto',
+ password='toto',
+ groups=('users',),
+ commit=False).eid
+ self.txuuid = cnx.commit()
+
+ def toto(self, cnx):
+ return cnx.entity_from_eid(self.totoeid)
def setUp(self):
class Connection(OldConnection):
@@ -44,23 +49,22 @@
def tearDown(self):
cubicweb.server.session.Connection = OldConnection
self.restore_connection()
- self.session.undo_support = set()
super(UndoableTransactionTC, self).tearDown()
- def check_transaction_deleted(self, txuuid):
+ def check_transaction_deleted(self, cnx, txuuid):
# also check transaction actions have been properly deleted
- cu = self.session.system_sql(
+ cu = cnx.system_sql(
"SELECT * from tx_entity_actions WHERE tx_uuid='%s'" % txuuid)
self.assertFalse(cu.fetchall())
- cu = self.session.system_sql(
+ cu = cnx.system_sql(
"SELECT * from tx_relation_actions WHERE tx_uuid='%s'" % txuuid)
self.assertFalse(cu.fetchall())
- def assertUndoTransaction(self, txuuid, expected_errors=None):
+ def assertUndoTransaction(self, cnx, txuuid, expected_errors=None):
if expected_errors is None :
expected_errors = []
try:
- self.cnx.undo_transaction(txuuid)
+ cnx.undo_transaction(txuuid)
except UndoTransactionException as exn:
errors = exn.errors
else:
@@ -70,238 +74,243 @@
def test_undo_api(self):
self.assertTrue(self.txuuid)
# test transaction api
- self.assertRaises(NoSuchTransaction,
- self.cnx.transaction_info, 'hop')
- self.assertRaises(NoSuchTransaction,
- self.cnx.transaction_actions, 'hop')
- self.assertRaises(NoSuchTransaction,
- self.cnx.undo_transaction, 'hop')
- txinfo = self.cnx.transaction_info(self.txuuid)
- self.assertTrue(txinfo.datetime)
- self.assertEqual(txinfo.user_eid, self.session.user.eid)
- self.assertEqual(txinfo.user().login, 'admin')
- actions = txinfo.actions_list()
- self.assertEqual(len(actions), 2)
- actions = txinfo.actions_list(public=False)
- self.assertEqual(len(actions), 6)
- a1 = actions[0]
- self.assertEqual(a1.action, 'C')
- self.assertEqual(a1.eid, self.toto.eid)
- self.assertEqual(a1.etype,'CWUser')
- self.assertEqual(a1.ertype, 'CWUser')
- self.assertEqual(a1.changes, None)
- self.assertEqual(a1.public, True)
- self.assertEqual(a1.order, 1)
- a4 = actions[3]
- self.assertEqual(a4.action, 'A')
- self.assertEqual(a4.rtype, 'in_group')
- self.assertEqual(a4.ertype, 'in_group')
- self.assertEqual(a4.eid_from, self.toto.eid)
- self.assertEqual(a4.eid_to, self.toto.in_group[0].eid)
- self.assertEqual(a4.order, 4)
- for i, rtype in ((1, 'owned_by'), (2, 'owned_by'),
- (4, 'in_state'), (5, 'created_by')):
- a = actions[i]
- self.assertEqual(a.action, 'A')
- self.assertEqual(a.eid_from, self.toto.eid)
- self.assertEqual(a.rtype, rtype)
- self.assertEqual(a.order, i+1)
- # test undoable_transactions
- txs = self.cnx.undoable_transactions()
- self.assertEqual(len(txs), 1)
- self.assertEqual(txs[0].uuid, self.txuuid)
- # test transaction_info / undoable_transactions security
- cnx = self.login('anon')
- self.assertRaises(NoSuchTransaction,
- cnx.transaction_info, self.txuuid)
- self.assertRaises(NoSuchTransaction,
- cnx.transaction_actions, self.txuuid)
- self.assertRaises(NoSuchTransaction,
- cnx.undo_transaction, self.txuuid)
- txs = cnx.undoable_transactions()
- self.assertEqual(len(txs), 0)
+ with self.admin_access.client_cnx() as cnx:
+ self.assertRaises(NoSuchTransaction,
+ cnx.transaction_info, 'hop')
+ self.assertRaises(NoSuchTransaction,
+ cnx.transaction_actions, 'hop')
+ self.assertRaises(NoSuchTransaction,
+ cnx.undo_transaction, 'hop')
+ txinfo = cnx.transaction_info(self.txuuid)
+ self.assertTrue(txinfo.datetime)
+ self.assertEqual(txinfo.user_eid, cnx.user.eid)
+ self.assertEqual(txinfo.user().login, 'admin')
+ actions = txinfo.actions_list()
+ self.assertEqual(len(actions), 2)
+ actions = txinfo.actions_list(public=False)
+ self.assertEqual(len(actions), 6)
+ a1 = actions[0]
+ self.assertEqual(a1.action, 'C')
+ self.assertEqual(a1.eid, self.totoeid)
+ self.assertEqual(a1.etype,'CWUser')
+ self.assertEqual(a1.ertype, 'CWUser')
+ self.assertEqual(a1.changes, None)
+ self.assertEqual(a1.public, True)
+ self.assertEqual(a1.order, 1)
+ a4 = actions[3]
+ self.assertEqual(a4.action, 'A')
+ self.assertEqual(a4.rtype, 'in_group')
+ self.assertEqual(a4.ertype, 'in_group')
+ self.assertEqual(a4.eid_from, self.totoeid)
+ self.assertEqual(a4.eid_to, self.toto(cnx).in_group[0].eid)
+ self.assertEqual(a4.order, 4)
+ for i, rtype in ((1, 'owned_by'), (2, 'owned_by'),
+ (4, 'in_state'), (5, 'created_by')):
+ a = actions[i]
+ self.assertEqual(a.action, 'A')
+ self.assertEqual(a.eid_from, self.totoeid)
+ self.assertEqual(a.rtype, rtype)
+ self.assertEqual(a.order, i+1)
+ # test undoable_transactions
+ txs = cnx.undoable_transactions()
+ self.assertEqual(len(txs), 1)
+ self.assertEqual(txs[0].uuid, self.txuuid)
+ # test transaction_info / undoable_transactions security
+ with self.new_access('anon').client_cnx() as cnx:
+ self.assertRaises(NoSuchTransaction,
+ cnx.transaction_info, self.txuuid)
+ self.assertRaises(NoSuchTransaction,
+ cnx.transaction_actions, self.txuuid)
+ self.assertRaises(NoSuchTransaction,
+ cnx.undo_transaction, self.txuuid)
+ txs = cnx.undoable_transactions()
+ self.assertEqual(len(txs), 0)
def test_undoable_transactions(self):
- toto = self.toto
- e = self.session.create_entity('EmailAddress',
- address=u'toto@logilab.org',
- reverse_use_email=toto)
- txuuid1 = self.commit()
- toto.cw_delete()
- txuuid2 = self.commit()
- undoable_transactions = self.cnx.undoable_transactions
- txs = undoable_transactions(action='D')
- self.assertEqual(len(txs), 1, txs)
- self.assertEqual(txs[0].uuid, txuuid2)
- txs = undoable_transactions(action='C')
- self.assertEqual(len(txs), 2, txs)
- self.assertEqual(txs[0].uuid, txuuid1)
- self.assertEqual(txs[1].uuid, self.txuuid)
- txs = undoable_transactions(eid=toto.eid)
- self.assertEqual(len(txs), 3)
- self.assertEqual(txs[0].uuid, txuuid2)
- self.assertEqual(txs[1].uuid, txuuid1)
- self.assertEqual(txs[2].uuid, self.txuuid)
- txs = undoable_transactions(etype='CWUser')
- self.assertEqual(len(txs), 2)
- txs = undoable_transactions(etype='CWUser', action='C')
- self.assertEqual(len(txs), 1)
- self.assertEqual(txs[0].uuid, self.txuuid)
- txs = undoable_transactions(etype='EmailAddress', action='D')
- self.assertEqual(len(txs), 0)
- txs = undoable_transactions(etype='EmailAddress', action='D',
- public=False)
- self.assertEqual(len(txs), 1)
- self.assertEqual(txs[0].uuid, txuuid2)
- txs = undoable_transactions(eid=toto.eid, action='R', public=False)
- self.assertEqual(len(txs), 1)
- self.assertEqual(txs[0].uuid, txuuid2)
+ with self.admin_access.client_cnx() as cnx:
+ toto = self.toto(cnx)
+ e = cnx.create_entity('EmailAddress',
+ address=u'toto@logilab.org',
+ reverse_use_email=toto)
+ txuuid1 = cnx.commit()
+ toto.cw_delete()
+ txuuid2 = cnx.commit()
+ undoable_transactions = cnx.undoable_transactions
+ txs = undoable_transactions(action='D')
+ self.assertEqual(len(txs), 1, txs)
+ self.assertEqual(txs[0].uuid, txuuid2)
+ txs = undoable_transactions(action='C')
+ self.assertEqual(len(txs), 2, txs)
+ self.assertEqual(txs[0].uuid, txuuid1)
+ self.assertEqual(txs[1].uuid, self.txuuid)
+ txs = undoable_transactions(eid=toto.eid)
+ self.assertEqual(len(txs), 3)
+ self.assertEqual(txs[0].uuid, txuuid2)
+ self.assertEqual(txs[1].uuid, txuuid1)
+ self.assertEqual(txs[2].uuid, self.txuuid)
+ txs = undoable_transactions(etype='CWUser')
+ self.assertEqual(len(txs), 2)
+ txs = undoable_transactions(etype='CWUser', action='C')
+ self.assertEqual(len(txs), 1)
+ self.assertEqual(txs[0].uuid, self.txuuid)
+ txs = undoable_transactions(etype='EmailAddress', action='D')
+ self.assertEqual(len(txs), 0)
+ txs = undoable_transactions(etype='EmailAddress', action='D',
+ public=False)
+ self.assertEqual(len(txs), 1)
+ self.assertEqual(txs[0].uuid, txuuid2)
+ txs = undoable_transactions(eid=toto.eid, action='R', public=False)
+ self.assertEqual(len(txs), 1)
+ self.assertEqual(txs[0].uuid, txuuid2)
def test_undo_deletion_base(self):
- toto = self.toto
- e = self.session.create_entity('EmailAddress',
- address=u'toto@logilab.org',
- reverse_use_email=toto)
- # entity with inlined relation
- p = self.session.create_entity('CWProperty',
- pkey=u'ui.default-text-format',
- value=u'text/rest',
- for_user=toto)
- self.commit()
- txs = self.cnx.undoable_transactions()
- self.assertEqual(len(txs), 2)
- toto.cw_delete()
- txuuid = self.commit()
- actions = self.cnx.transaction_info(txuuid).actions_list()
- self.assertEqual(len(actions), 1)
- toto.cw_clear_all_caches()
- e.cw_clear_all_caches()
- self.assertUndoTransaction(txuuid)
- undotxuuid = self.commit()
- self.assertEqual(undotxuuid, None) # undo not undoable
- self.assertTrue(self.execute('Any X WHERE X eid %(x)s', {'x': toto.eid}))
- self.assertTrue(self.execute('Any X WHERE X eid %(x)s', {'x': e.eid}))
- self.assertTrue(self.execute('Any X WHERE X has_text "toto@logilab"'))
- self.assertEqual(toto.cw_adapt_to('IWorkflowable').state, 'activated')
- self.assertEqual(toto.cw_adapt_to('IEmailable').get_email(), 'toto@logilab.org')
- self.assertEqual([(p.pkey, p.value) for p in toto.reverse_for_user],
- [('ui.default-text-format', 'text/rest')])
- self.assertEqual([g.name for g in toto.in_group],
- ['users'])
- self.assertEqual([et.name for et in toto.related('is', entities=True)],
- ['CWUser'])
- self.assertEqual([et.name for et in toto.is_instance_of],
- ['CWUser'])
- # undoing shouldn't be visble in undoable transaction, and the undone
- # transaction should be removed
- txs = self.cnx.undoable_transactions()
- self.assertEqual(len(txs), 2)
- self.assertRaises(NoSuchTransaction,
- self.cnx.transaction_info, txuuid)
- self.check_transaction_deleted(txuuid)
- # the final test: check we can login with the previously deleted user
- self.login('toto')
+ with self.admin_access.client_cnx() as cnx:
+ toto = self.toto(cnx)
+ e = cnx.create_entity('EmailAddress',
+ address=u'toto@logilab.org',
+ reverse_use_email=toto)
+ # entity with inlined relation
+ p = cnx.create_entity('CWProperty',
+ pkey=u'ui.default-text-format',
+ value=u'text/rest',
+ for_user=toto)
+ cnx.commit()
+ txs = cnx.undoable_transactions()
+ self.assertEqual(len(txs), 2)
+ toto.cw_delete()
+ txuuid = cnx.commit()
+ actions = cnx.transaction_info(txuuid).actions_list()
+ self.assertEqual(len(actions), 1)
+ toto.cw_clear_all_caches()
+ e.cw_clear_all_caches()
+ self.assertUndoTransaction(cnx, txuuid)
+ undotxuuid = cnx.commit()
+ self.assertEqual(undotxuuid, None) # undo not undoable
+ self.assertTrue(cnx.execute('Any X WHERE X eid %(x)s', {'x': toto.eid}))
+ self.assertTrue(cnx.execute('Any X WHERE X eid %(x)s', {'x': e.eid}))
+ self.assertTrue(cnx.execute('Any X WHERE X has_text "toto@logilab"'))
+ self.assertEqual(toto.cw_adapt_to('IWorkflowable').state, 'activated')
+ self.assertEqual(toto.cw_adapt_to('IEmailable').get_email(), 'toto@logilab.org')
+ self.assertEqual([(p.pkey, p.value) for p in toto.reverse_for_user],
+ [('ui.default-text-format', 'text/rest')])
+ self.assertEqual([g.name for g in toto.in_group],
+ ['users'])
+ self.assertEqual([et.name for et in toto.related('is', entities=True)],
+ ['CWUser'])
+ self.assertEqual([et.name for et in toto.is_instance_of],
+ ['CWUser'])
+ # undoing shouldn't be visble in undoable transaction, and the undone
+ # transaction should be removed
+ txs = self.cnx.undoable_transactions()
+ self.assertEqual(len(txs), 2)
+ self.assertRaises(NoSuchTransaction,
+ self.cnx.transaction_info, txuuid)
+ with self.admin_access.repo_cnx() as cnx:
+ with cnx.ensure_cnx_set:
+ self.check_transaction_deleted(cnx, txuuid)
+ # the final test: check we can login with the previously deleted user
+ with self.new_access('toto').client_cnx():
+ pass
def test_undo_deletion_integrity_1(self):
- session = self.session
- # 'Personne fiche Card with' '??' cardinality
- c = session.create_entity('Card', title=u'hop', content=u'hop')
- p = session.create_entity('Personne', nom=u'louis', fiche=c)
- self.commit()
- c.cw_delete()
- txuuid = self.commit()
- c2 = session.create_entity('Card', title=u'hip', content=u'hip')
- p.cw_set(fiche=c2)
- self.commit()
- self.assertUndoTransaction(txuuid, [
- "Can't restore object relation fiche to entity "
- "%s which is already linked using this relation." % p.eid])
- self.commit()
- p.cw_clear_all_caches()
- self.assertEqual(p.fiche[0].eid, c2.eid)
+ with self.admin_access.client_cnx() as cnx:
+ # 'Personne fiche Card with' '??' cardinality
+ c = cnx.create_entity('Card', title=u'hop', content=u'hop')
+ p = cnx.create_entity('Personne', nom=u'louis', fiche=c)
+ cnx.commit()
+ c.cw_delete()
+ txuuid = cnx.commit()
+ c2 = cnx.create_entity('Card', title=u'hip', content=u'hip')
+ p.cw_set(fiche=c2)
+ cnx.commit()
+ self.assertUndoTransaction(cnx, txuuid, [
+ "Can't restore object relation fiche to entity "
+ "%s which is already linked using this relation." % p.eid])
+ cnx.commit()
+ p.cw_clear_all_caches()
+ self.assertEqual(p.fiche[0].eid, c2.eid)
def test_undo_deletion_integrity_2(self):
- # test validation error raised if we can't restore a required relation
- session = self.session
- g = session.create_entity('CWGroup', name=u'staff')
- session.execute('DELETE U in_group G WHERE U eid %(x)s', {'x': self.toto.eid})
- self.toto.cw_set(in_group=g)
- self.commit()
- self.toto.cw_delete()
- txuuid = self.commit()
- g.cw_delete()
- self.commit()
- self.assertUndoTransaction(txuuid, [
- u"Can't restore relation in_group, object entity "
- "%s doesn't exist anymore." % g.eid])
- with self.assertRaises(ValidationError) as cm:
- self.commit()
- cm.exception.translate(unicode)
- self.assertEqual(cm.exception.entity, self.toto.eid)
- self.assertEqual(cm.exception.errors,
- {'in_group-subject': u'at least one relation in_group is '
- 'required on CWUser (%s)' % self.toto.eid})
+ with self.admin_access.client_cnx() as cnx:
+ # test validation error raised if we can't restore a required relation
+ g = cnx.create_entity('CWGroup', name=u'staff')
+ cnx.execute('DELETE U in_group G WHERE U eid %(x)s', {'x': self.totoeid})
+ self.toto(cnx).cw_set(in_group=g)
+ cnx.commit()
+ self.toto(cnx).cw_delete()
+ txuuid = cnx.commit()
+ g.cw_delete()
+ cnx.commit()
+ self.assertUndoTransaction(cnx, txuuid, [
+ u"Can't restore relation in_group, object entity "
+ "%s doesn't exist anymore." % g.eid])
+ with self.assertRaises(ValidationError) as cm:
+ cnx.commit()
+ cm.exception.translate(unicode)
+ self.assertEqual(cm.exception.entity, self.totoeid)
+ self.assertEqual(cm.exception.errors,
+ {'in_group-subject': u'at least one relation in_group is '
+ 'required on CWUser (%s)' % self.totoeid})
def test_undo_creation_1(self):
- session = self.session
- c = session.create_entity('Card', title=u'hop', content=u'hop')
- p = session.create_entity('Personne', nom=u'louis', fiche=c)
- txuuid = self.commit()
- self.assertUndoTransaction(txuuid)
- self.commit()
- self.assertFalse(self.execute('Any X WHERE X eid %(x)s', {'x': c.eid}))
- self.assertFalse(self.execute('Any X WHERE X eid %(x)s', {'x': p.eid}))
- self.assertFalse(self.execute('Any X,Y WHERE X fiche Y'))
- self.session.set_cnxset()
- for eid in (p.eid, c.eid):
- self.assertFalse(session.system_sql(
- 'SELECT * FROM entities WHERE eid=%s' % eid).fetchall())
- self.assertFalse(session.system_sql(
- 'SELECT 1 FROM owned_by_relation WHERE eid_from=%s' % eid).fetchall())
- # added by sql in hooks (except when using dataimport)
- self.assertFalse(session.system_sql(
- 'SELECT 1 FROM is_relation WHERE eid_from=%s' % eid).fetchall())
- self.assertFalse(session.system_sql(
- 'SELECT 1 FROM is_instance_of_relation WHERE eid_from=%s' % eid).fetchall())
- self.check_transaction_deleted(txuuid)
-
+ with self.admin_access.client_cnx() as cnx:
+ c = cnx.create_entity('Card', title=u'hop', content=u'hop')
+ p = cnx.create_entity('Personne', nom=u'louis', fiche=c)
+ txuuid = cnx.commit()
+ self.assertUndoTransaction(cnx, txuuid)
+ cnx.commit()
+ self.assertFalse(cnx.execute('Any X WHERE X eid %(x)s', {'x': c.eid}))
+ self.assertFalse(cnx.execute('Any X WHERE X eid %(x)s', {'x': p.eid}))
+ self.assertFalse(cnx.execute('Any X,Y WHERE X fiche Y'))
+ with self.admin_access.repo_cnx() as cnx:
+ with cnx.ensure_cnx_set:
+ for eid in (p.eid, c.eid):
+ self.assertFalse(cnx.system_sql(
+ 'SELECT * FROM entities WHERE eid=%s' % eid).fetchall())
+ self.assertFalse(cnx.system_sql(
+ 'SELECT 1 FROM owned_by_relation WHERE eid_from=%s' % eid).fetchall())
+ # added by sql in hooks (except when using dataimport)
+ self.assertFalse(cnx.system_sql(
+ 'SELECT 1 FROM is_relation WHERE eid_from=%s' % eid).fetchall())
+ self.assertFalse(cnx.system_sql(
+ 'SELECT 1 FROM is_instance_of_relation WHERE eid_from=%s' % eid).fetchall())
+ self.check_transaction_deleted(cnx, txuuid)
def test_undo_creation_integrity_1(self):
- session = self.session
- req = self.request()
- tutu = self.create_user(req, 'tutu', commit=False)
- txuuid = self.commit()
- email = self.request().create_entity('EmailAddress', address=u'tutu@cubicweb.org')
- prop = self.request().create_entity('CWProperty', pkey=u'ui.default-text-format',
- value=u'text/html')
- tutu.cw_set(use_email=email, reverse_for_user=prop)
- self.commit()
- with self.assertRaises(ValidationError) as cm:
- self.cnx.undo_transaction(txuuid)
- self.assertEqual(cm.exception.entity, tutu.eid)
- self.assertEqual(cm.exception.errors,
- {None: 'some later transaction(s) touch entity, undo them first'})
+ with self.admin_access.client_cnx() as cnx:
+ tutu = self.create_user(cnx, 'tutu', commit=False)
+ txuuid = cnx.commit()
+ email = cnx.create_entity('EmailAddress', address=u'tutu@cubicweb.org')
+ prop = cnx.create_entity('CWProperty', pkey=u'ui.default-text-format',
+ value=u'text/html')
+ tutu.cw_set(use_email=email, reverse_for_user=prop)
+ cnx.commit()
+ with self.assertRaises(ValidationError) as cm:
+ cnx.undo_transaction(txuuid)
+ self.assertEqual(cm.exception.entity, tutu.eid)
+ self.assertEqual(cm.exception.errors,
+ {None: 'some later transaction(s) touch entity, undo them first'})
def test_undo_creation_integrity_2(self):
- session = self.session
- g = session.create_entity('CWGroup', name=u'staff')
- txuuid = self.commit()
- session.execute('DELETE U in_group G WHERE U eid %(x)s', {'x': self.toto.eid})
- self.toto.cw_set(in_group=g)
- self.commit()
- with self.assertRaises(ValidationError) as cm:
- self.cnx.undo_transaction(txuuid)
- self.assertEqual(cm.exception.entity, g.eid)
- self.assertEqual(cm.exception.errors,
- {None: 'some later transaction(s) touch entity, undo them first'})
+ with self.admin_access.client_cnx() as cnx:
+ g = cnx.create_entity('CWGroup', name=u'staff')
+ txuuid = cnx.commit()
+ cnx.execute('DELETE U in_group G WHERE U eid %(x)s', {'x': self.totoeid})
+ self.toto(cnx).cw_set(in_group=g)
+ cnx.commit()
+ with self.assertRaises(ValidationError) as cm:
+ cnx.undo_transaction(txuuid)
+ self.assertEqual(cm.exception.entity, g.eid)
+ self.assertEqual(cm.exception.errors,
+ {None: 'some later transaction(s) touch entity, undo them first'})
# self.assertEqual(errors,
# [u"Can't restore relation in_group, object entity "
# "%s doesn't exist anymore." % g.eid])
- # with self.assertRaises(ValidationError) as cm: self.commit()
- # self.assertEqual(cm.exception.entity, self.toto.eid)
+ # with self.assertRaises(ValidationError) as cm: cnx.commit()
+ # self.assertEqual(cm.exception.entity, self.totoeid)
# self.assertEqual(cm.exception.errors,
# {'in_group-subject': u'at least one relation in_group is '
- # 'required on CWUser (%s)' % self.toto.eid})
+ # 'required on CWUser (%s)' % self.totoeid})
# test implicit 'replacement' of an inlined relation
@@ -309,124 +318,124 @@
"""Undo remove relation Personne (?) fiche (?) Card
NB: processed by `_undo_r` as expected"""
- session = self.session
- c = session.create_entity('Card', title=u'hop', content=u'hop')
- p = session.create_entity('Personne', nom=u'louis', fiche=c)
- self.commit()
- p.cw_set(fiche=None)
- txuuid = self.commit()
- self.assertUndoTransaction(txuuid)
- self.commit()
- p.cw_clear_all_caches()
- self.assertEqual(p.fiche[0].eid, c.eid)
+ with self.admin_access.client_cnx() as cnx:
+ c = cnx.create_entity('Card', title=u'hop', content=u'hop')
+ p = cnx.create_entity('Personne', nom=u'louis', fiche=c)
+ cnx.commit()
+ p.cw_set(fiche=None)
+ txuuid = cnx.commit()
+ self.assertUndoTransaction(cnx, txuuid)
+ cnx.commit()
+ p.cw_clear_all_caches()
+ self.assertEqual(p.fiche[0].eid, c.eid)
def test_undo_inline_rel_remove_ko(self):
"""Restore an inlined relation to a deleted entity, with an error.
NB: processed by `_undo_r` as expected"""
- session = self.session
- c = session.create_entity('Card', title=u'hop', content=u'hop')
- p = session.create_entity('Personne', nom=u'louis', fiche=c)
- self.commit()
- p.cw_set(fiche=None)
- txuuid = self.commit()
- c.cw_delete()
- self.commit()
- self.assertUndoTransaction(txuuid, [
- "Can't restore relation fiche, object entity %d doesn't exist anymore." % c.eid])
- self.commit()
- p.cw_clear_all_caches()
- self.assertFalse(p.fiche)
- self.assertIsNone(session.system_sql(
- 'SELECT cw_fiche FROM cw_Personne WHERE cw_eid=%s' % p.eid).fetchall()[0][0])
+ with self.admin_access.client_cnx() as cnx:
+ c = cnx.create_entity('Card', title=u'hop', content=u'hop')
+ p = cnx.create_entity('Personne', nom=u'louis', fiche=c)
+ cnx.commit()
+ p.cw_set(fiche=None)
+ txuuid = cnx.commit()
+ c.cw_delete()
+ cnx.commit()
+ self.assertUndoTransaction(cnx, txuuid, [
+ "Can't restore relation fiche, object entity %d doesn't exist anymore." % c.eid])
+ cnx.commit()
+ p.cw_clear_all_caches()
+ self.assertFalse(p.fiche)
+ with self.admin_access.repo_cnx() as cnx:
+ with cnx.ensure_cnx_set:
+ self.assertIsNone(cnx.system_sql(
+ 'SELECT cw_fiche FROM cw_Personne WHERE cw_eid=%s' % p.eid).fetchall()[0][0])
def test_undo_inline_rel_add_ok(self):
"""Undo add relation Personne (?) fiche (?) Card
Caution processed by `_undo_u`, not `_undo_a` !"""
- session = self.session
- c = session.create_entity('Card', title=u'hop', content=u'hop')
- p = session.create_entity('Personne', nom=u'louis')
- self.commit()
- p.cw_set(fiche=c)
- txuuid = self.commit()
- self.assertUndoTransaction(txuuid)
- self.commit()
- p.cw_clear_all_caches()
- self.assertFalse(p.fiche)
+ with self.admin_access.client_cnx() as cnx:
+ c = cnx.create_entity('Card', title=u'hop', content=u'hop')
+ p = cnx.create_entity('Personne', nom=u'louis')
+ cnx.commit()
+ p.cw_set(fiche=c)
+ txuuid = cnx.commit()
+ self.assertUndoTransaction(cnx, txuuid)
+ cnx.commit()
+ p.cw_clear_all_caches()
+ self.assertFalse(p.fiche)
def test_undo_inline_rel_add_ko(self):
"""Undo add relation Personne (?) fiche (?) Card
Caution processed by `_undo_u`, not `_undo_a` !"""
- session = self.session
- c = session.create_entity('Card', title=u'hop', content=u'hop')
- p = session.create_entity('Personne', nom=u'louis')
- self.commit()
- p.cw_set(fiche=c)
- txuuid = self.commit()
- c.cw_delete()
- self.commit()
- self.assertUndoTransaction(txuuid)
+ with self.admin_access.client_cnx() as cnx:
+ c = cnx.create_entity('Card', title=u'hop', content=u'hop')
+ p = cnx.create_entity('Personne', nom=u'louis')
+ cnx.commit()
+ p.cw_set(fiche=c)
+ txuuid = cnx.commit()
+ c.cw_delete()
+ cnx.commit()
+ self.assertUndoTransaction(cnx, txuuid)
def test_undo_inline_rel_replace_ok(self):
"""Undo changing relation Personne (?) fiche (?) Card
Caution processed by `_undo_u` """
- session = self.session
- c1 = session.create_entity('Card', title=u'hop', content=u'hop')
- c2 = session.create_entity('Card', title=u'hip', content=u'hip')
- p = session.create_entity('Personne', nom=u'louis', fiche=c1)
- self.commit()
- p.cw_set(fiche=c2)
- txuuid = self.commit()
- self.assertUndoTransaction(txuuid)
- self.commit()
- p.cw_clear_all_caches()
- self.assertEqual(p.fiche[0].eid, c1.eid)
+ with self.admin_access.client_cnx() as cnx:
+ c1 = cnx.create_entity('Card', title=u'hop', content=u'hop')
+ c2 = cnx.create_entity('Card', title=u'hip', content=u'hip')
+ p = cnx.create_entity('Personne', nom=u'louis', fiche=c1)
+ cnx.commit()
+ p.cw_set(fiche=c2)
+ txuuid = cnx.commit()
+ self.assertUndoTransaction(cnx, txuuid)
+ cnx.commit()
+ p.cw_clear_all_caches()
+ self.assertEqual(p.fiche[0].eid, c1.eid)
def test_undo_inline_rel_replace_ko(self):
"""Undo changing relation Personne (?) fiche (?) Card, with an error
Caution processed by `_undo_u` """
- session = self.session
- c1 = session.create_entity('Card', title=u'hop', content=u'hop')
- c2 = session.create_entity('Card', title=u'hip', content=u'hip')
- p = session.create_entity('Personne', nom=u'louis', fiche=c1)
- self.commit()
- p.cw_set(fiche=c2)
- txuuid = self.commit()
- c1.cw_delete()
- self.commit()
- self.assertUndoTransaction(txuuid, [
- "can't restore entity %s of type Personne, target of fiche (eid %s)"
- " does not exist any longer" % (p.eid, c1.eid)])
- self.commit()
- p.cw_clear_all_caches()
- self.assertFalse(p.fiche)
+ with self.admin_access.client_cnx() as cnx:
+ c1 = cnx.create_entity('Card', title=u'hop', content=u'hop')
+ c2 = cnx.create_entity('Card', title=u'hip', content=u'hip')
+ p = cnx.create_entity('Personne', nom=u'louis', fiche=c1)
+ cnx.commit()
+ p.cw_set(fiche=c2)
+ txuuid = cnx.commit()
+ c1.cw_delete()
+ cnx.commit()
+ self.assertUndoTransaction(cnx, txuuid, [
+ "can't restore entity %s of type Personne, target of fiche (eid %s)"
+ " does not exist any longer" % (p.eid, c1.eid)])
+ cnx.commit()
+ p.cw_clear_all_caches()
+ self.assertFalse(p.fiche)
def test_undo_attr_update_ok(self):
- session = self.session
- p = session.create_entity('Personne', nom=u'toto')
- session.commit()
- self.session.set_cnxset()
- p.cw_set(nom=u'titi')
- txuuid = self.commit()
- self.assertUndoTransaction(txuuid)
- p.cw_clear_all_caches()
- self.assertEqual(p.nom, u'toto')
+ with self.admin_access.client_cnx() as cnx:
+ p = cnx.create_entity('Personne', nom=u'toto')
+ cnx.commit()
+ p.cw_set(nom=u'titi')
+ txuuid = cnx.commit()
+ self.assertUndoTransaction(cnx, txuuid)
+ p.cw_clear_all_caches()
+ self.assertEqual(p.nom, u'toto')
def test_undo_attr_update_ko(self):
- session = self.session
- p = session.create_entity('Personne', nom=u'toto')
- session.commit()
- self.session.set_cnxset()
- p.cw_set(nom=u'titi')
- txuuid = self.commit()
- p.cw_delete()
- self.commit()
- self.assertUndoTransaction(txuuid, [
- u"can't restore state of entity %s, it has been deleted inbetween" % p.eid])
+ with self.admin_access.client_cnx() as cnx:
+ p = cnx.create_entity('Personne', nom=u'toto')
+ cnx.commit()
+ p.cw_set(nom=u'titi')
+ txuuid = cnx.commit()
+ p.cw_delete()
+ cnx.commit()
+ self.assertUndoTransaction(cnx, txuuid, [
+ u"can't restore state of entity %s, it has been deleted inbetween" % p.eid])
class UndoExceptionInUnicode(CubicWebTC):
--- a/sobjects/cwxmlparser.py Fri May 23 18:35:13 2014 +0200
+++ b/sobjects/cwxmlparser.py Fri Jun 27 11:48:26 2014 +0200
@@ -467,7 +467,7 @@
self._clear_relation((ttype,))
def _find_entities(self, item, kwargs):
- return tuple(self._cw.find_entities(item['cwtype'], **kwargs))
+ return tuple(self._cw.find(item['cwtype'], **kwargs).entities())
class CWEntityXMLActionLinkInState(CWEntityXMLActionLink):
--- a/sobjects/notification.py Fri May 23 18:35:13 2014 +0200
+++ b/sobjects/notification.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -123,32 +123,37 @@
emailaddr = something.cw_adapt_to('IEmailable').get_email()
user = something
# hi-jack self._cw to get a session for the returned user
- self._cw = Session(user, self._cw.repo)
- try:
- self._cw.set_cnxset()
- # since the same view (eg self) may be called multiple time and we
- # need a fresh stream at each iteration, reset it explicitly
- self.w = None
- # XXX call render before subject to set .row/.col attributes on the
- # view
+ session = Session(user, self._cw.repo)
+ with session.new_cnx() as cnx:
+ self._cw = cnx
try:
- content = self.render(row=0, col=0, **kwargs)
- subject = self.subject()
- except SkipEmail:
- continue
- except Exception as ex:
- # shouldn't make the whole transaction fail because of rendering
- # error (unauthorized or such) XXX check it doesn't actually
- # occurs due to rollback on such error
- self.exception(str(ex))
- continue
- msg = format_mail(self.user_data, [emailaddr], content, subject,
- config=self._cw.vreg.config, msgid=msgid, references=refs)
- yield [emailaddr], msg
- finally:
- self._cw.commit()
- self._cw.close()
- self._cw = req
+ # since the same view (eg self) may be called multiple time and we
+ # need a fresh stream at each iteration, reset it explicitly
+ self.w = None
+ try:
+ # XXX forcing the row & col here may make the content and
+ # subject inconsistent because subject will depend on
+ # self.cw_row & self.cw_col if they are set.
+ content = self.render(row=0, col=0, **kwargs)
+ subject = self.subject()
+ except SkipEmail:
+ continue
+ except Exception as ex:
+ # shouldn't make the whole transaction fail because of rendering
+ # error (unauthorized or such) XXX check it doesn't actually
+ # occurs due to rollback on such error
+ self.exception(str(ex))
+ continue
+ msg = format_mail(self.user_data, [emailaddr], content, subject,
+ config=self._cw.vreg.config, msgid=msgid, references=refs)
+ yield [emailaddr], msg
+ finally:
+ # ensure we have a cnxset since commit will fail if there is
+ # some operation but no cnxset. This may occurs in this very
+ # specific case (eg SendMailOp)
+ with cnx.ensure_cnx_set:
+ cnx.commit()
+ self._cw = req
# recipients / email sending ###############################################
--- a/sobjects/services.py Fri May 23 18:35:13 2014 +0200
+++ b/sobjects/services.py Fri Jun 27 11:48:26 2014 +0200
@@ -154,3 +154,5 @@
cnx.execute('INSERT EmailAddress X: X address %(email)s, '
'U primary_email X, U use_email X '
'WHERE U login %(login)s', d, build_descr=False)
+
+ return user
--- a/sobjects/supervising.py Fri May 23 18:35:13 2014 +0200
+++ b/sobjects/supervising.py Fri Jun 27 11:48:26 2014 +0200
@@ -142,16 +142,16 @@
self.w(u' %s' % entity.absolute_url())
def _relation_context(self, changedescr):
- session = self._cw
+ cnx = self._cw
def describe(eid):
try:
- return session._(session.entity_metas(eid)['type']).lower()
+ return cnx._(cnx.entity_metas(eid)['type']).lower()
except UnknownEid:
# may occurs when an entity has been deleted from an external
# source and we're cleaning its relation
- return session._('unknown external entity')
+ return cnx._('unknown external entity')
eidfrom, rtype, eidto = changedescr.eidfrom, changedescr.rtype, changedescr.eidto
- return {'rtype': session._(rtype),
+ return {'rtype': cnx._(rtype),
'eidfrom': eidfrom,
'frometype': describe(eidfrom),
'eidto': eidto,
@@ -171,16 +171,15 @@
of changes
"""
def _get_view(self):
- return self.session.vreg['components'].select('supervision_notif',
- self.session)
+ return self.cnx.vreg['components'].select('supervision_notif', self.cnx)
def _prepare_email(self):
- session = self.session
- config = session.vreg.config
+ cnx = self.cnx
+ config = cnx.vreg.config
uinfo = {'email': config['sender-addr'],
'name': config['sender-name']}
view = self._get_view()
- content = view.render(changes=session.transaction_data.get('pendingchanges'))
+ content = view.render(changes=cnx.transaction_data.get('pendingchanges'))
recipients = view.recipients()
msg = format_mail(uinfo, recipients, content, view.subject(), config=config)
self.to_send = [(msg, recipients)]
--- a/sobjects/test/unittest_cwxmlparser.py Fri May 23 18:35:13 2014 +0200
+++ b/sobjects/test/unittest_cwxmlparser.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,4 +1,4 @@
-# copyright 2011-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2011-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -157,17 +157,18 @@
def test_complete_url(self):
dfsource = self.repo.sources_by_uri['myfeed']
- parser = dfsource._get_parser(self.session)
- self.assertEqual(parser.complete_url('http://www.cubicweb.org/CWUser'),
- 'http://www.cubicweb.org/CWUser?relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject')
- self.assertEqual(parser.complete_url('http://www.cubicweb.org/cwuser'),
- 'http://www.cubicweb.org/cwuser?relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject')
- self.assertEqual(parser.complete_url('http://www.cubicweb.org/cwuser?vid=rdf&relation=hop'),
- 'http://www.cubicweb.org/cwuser?relation=hop&relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject&vid=rdf')
- self.assertEqual(parser.complete_url('http://www.cubicweb.org/?rql=cwuser&vid=rdf&relation=hop'),
- 'http://www.cubicweb.org/?rql=cwuser&relation=hop&vid=rdf')
- self.assertEqual(parser.complete_url('http://www.cubicweb.org/?rql=cwuser&relation=hop'),
- 'http://www.cubicweb.org/?rql=cwuser&relation=hop')
+ with self.admin_access.repo_cnx() as cnx:
+ parser = dfsource._get_parser(cnx)
+ self.assertEqual(parser.complete_url('http://www.cubicweb.org/CWUser'),
+ 'http://www.cubicweb.org/CWUser?relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject')
+ self.assertEqual(parser.complete_url('http://www.cubicweb.org/cwuser'),
+ 'http://www.cubicweb.org/cwuser?relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject')
+ self.assertEqual(parser.complete_url('http://www.cubicweb.org/cwuser?vid=rdf&relation=hop'),
+ 'http://www.cubicweb.org/cwuser?relation=hop&relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject&vid=rdf')
+ self.assertEqual(parser.complete_url('http://www.cubicweb.org/?rql=cwuser&vid=rdf&relation=hop'),
+ 'http://www.cubicweb.org/?rql=cwuser&relation=hop&vid=rdf')
+ self.assertEqual(parser.complete_url('http://www.cubicweb.org/?rql=cwuser&relation=hop'),
+ 'http://www.cubicweb.org/?rql=cwuser&relation=hop')
def test_actions(self):
@@ -192,113 +193,105 @@
(u'Tag', {u'linkattr': u'name'})],
},
})
- session = self.repo.internal_session(safe=True)
- stats = dfsource.pull_data(session, force=True, raise_on_error=True)
- self.assertEqual(sorted(stats), ['checked', 'created', 'updated'])
- self.assertEqual(len(stats['created']), 2)
- self.assertEqual(stats['updated'], set())
+ with self.repo.internal_cnx() as cnx:
+ stats = dfsource.pull_data(cnx, force=True, raise_on_error=True)
+ self.assertEqual(sorted(stats), ['checked', 'created', 'updated'])
+ self.assertEqual(len(stats['created']), 2)
+ self.assertEqual(stats['updated'], set())
- user = self.execute('CWUser X WHERE X login "sthenault"').get_entity(0, 0)
- self.assertEqual(user.creation_date, datetime(2010, 01, 22, 10, 27, 59))
- self.assertEqual(user.modification_date, datetime(2011, 01, 25, 14, 14, 06))
- self.assertEqual(user.cwuri, 'http://pouet.org/5')
- self.assertEqual(user.cw_source[0].name, 'myfeed')
- self.assertEqual(user.absolute_url(), 'http://pouet.org/5')
- self.assertEqual(len(user.use_email), 1)
- # copy action
- email = user.use_email[0]
- self.assertEqual(email.address, 'syt@logilab.fr')
- self.assertEqual(email.cwuri, 'http://pouet.org/6')
- self.assertEqual(email.absolute_url(), 'http://pouet.org/6')
- self.assertEqual(email.cw_source[0].name, 'myfeed')
- self.assertEqual(len(email.reverse_tags), 1)
- self.assertEqual(email.reverse_tags[0].name, 'hop')
- # link action
- self.assertFalse(self.execute('CWGroup X WHERE X name "unknown"'))
- groups = sorted([g.name for g in user.in_group])
- self.assertEqual(groups, ['users'])
- group = user.in_group[0]
- self.assertEqual(len(group.reverse_tags), 1)
- self.assertEqual(group.reverse_tags[0].name, 'hop')
- # link or create action
- tags = set([(t.name, t.cwuri.replace(str(t.eid), ''), t.cw_source[0].name)
- for t in user.reverse_tags])
- self.assertEqual(tags, set((('hop', 'http://testing.fr/cubicweb/', 'system'),
- ('unknown', 'http://testing.fr/cubicweb/', 'system')))
- )
- session.set_cnxset()
- with session.security_enabled(read=False): # avoid Unauthorized due to password selection
- stats = dfsource.pull_data(session, force=True, raise_on_error=True)
- self.assertEqual(stats['created'], set())
- self.assertEqual(len(stats['updated']), 0)
- self.assertEqual(len(stats['checked']), 2)
- self.repo._type_source_cache.clear()
- self.repo._extid_cache.clear()
- session.set_cnxset()
- with session.security_enabled(read=False): # avoid Unauthorized due to password selection
- stats = dfsource.pull_data(session, force=True, raise_on_error=True)
- self.assertEqual(stats['created'], set())
- self.assertEqual(len(stats['updated']), 0)
- self.assertEqual(len(stats['checked']), 2)
- session.commit()
+ with self.admin_access.web_request() as req:
+ user = req.execute('CWUser X WHERE X login "sthenault"').get_entity(0, 0)
+ self.assertEqual(user.creation_date, datetime(2010, 01, 22, 10, 27, 59))
+ self.assertEqual(user.modification_date, datetime(2011, 01, 25, 14, 14, 06))
+ self.assertEqual(user.cwuri, 'http://pouet.org/5')
+ self.assertEqual(user.cw_source[0].name, 'myfeed')
+ self.assertEqual(user.absolute_url(), 'http://pouet.org/5')
+ self.assertEqual(len(user.use_email), 1)
+ # copy action
+ email = user.use_email[0]
+ self.assertEqual(email.address, 'syt@logilab.fr')
+ self.assertEqual(email.cwuri, 'http://pouet.org/6')
+ self.assertEqual(email.absolute_url(), 'http://pouet.org/6')
+ self.assertEqual(email.cw_source[0].name, 'myfeed')
+ self.assertEqual(len(email.reverse_tags), 1)
+ self.assertEqual(email.reverse_tags[0].name, 'hop')
+ # link action
+ self.assertFalse(req.execute('CWGroup X WHERE X name "unknown"'))
+ groups = sorted([g.name for g in user.in_group])
+ self.assertEqual(groups, ['users'])
+ group = user.in_group[0]
+ self.assertEqual(len(group.reverse_tags), 1)
+ self.assertEqual(group.reverse_tags[0].name, 'hop')
+ # link or create action
+ tags = set([(t.name, t.cwuri.replace(str(t.eid), ''), t.cw_source[0].name)
+ for t in user.reverse_tags])
+ self.assertEqual(tags, set((('hop', 'http://testing.fr/cubicweb/', 'system'),
+ ('unknown', 'http://testing.fr/cubicweb/', 'system')))
+ )
+ with self.repo.internal_cnx() as cnx:
+ stats = dfsource.pull_data(cnx, force=True, raise_on_error=True)
+ self.assertEqual(stats['created'], set())
+ self.assertEqual(len(stats['updated']), 0)
+ self.assertEqual(len(stats['checked']), 2)
+ self.repo._type_source_cache.clear()
+ self.repo._extid_cache.clear()
+ stats = dfsource.pull_data(cnx, force=True, raise_on_error=True)
+ self.assertEqual(stats['created'], set())
+ self.assertEqual(len(stats['updated']), 0)
+ self.assertEqual(len(stats['checked']), 2)
- # test move to system source
- self.sexecute('SET X cw_source S WHERE X eid %(x)s, S name "system"', {'x': email.eid})
- self.commit()
- rset = self.sexecute('EmailAddress X WHERE X address "syt@logilab.fr"')
- self.assertEqual(len(rset), 1)
- e = rset.get_entity(0, 0)
- self.assertEqual(e.eid, email.eid)
- self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native', 'uri': u'system',
- 'use-cwuri-as-url': False},
- 'type': 'EmailAddress',
- 'extid': None})
- self.assertEqual(e.cw_source[0].name, 'system')
- self.assertEqual(e.reverse_use_email[0].login, 'sthenault')
- self.commit()
- # test everything is still fine after source synchronization
- session.set_cnxset()
- with session.security_enabled(read=False): # avoid Unauthorized due to password selection
- stats = dfsource.pull_data(session, force=True, raise_on_error=True)
- rset = self.sexecute('EmailAddress X WHERE X address "syt@logilab.fr"')
- self.assertEqual(len(rset), 1)
- e = rset.get_entity(0, 0)
- self.assertEqual(e.eid, email.eid)
- self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native', 'uri': u'system',
- 'use-cwuri-as-url': False},
- 'type': 'EmailAddress',
- 'extid': None})
- self.assertEqual(e.cw_source[0].name, 'system')
- self.assertEqual(e.reverse_use_email[0].login, 'sthenault')
- session.commit()
+ # test move to system source
+ cnx.execute('SET X cw_source S WHERE X eid %(x)s, S name "system"', {'x': email.eid})
+ cnx.commit()
+ rset = cnx.execute('EmailAddress X WHERE X address "syt@logilab.fr"')
+ self.assertEqual(len(rset), 1)
+ e = rset.get_entity(0, 0)
+ self.assertEqual(e.eid, email.eid)
+ self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native', 'uri': u'system',
+ 'use-cwuri-as-url': False},
+ 'type': 'EmailAddress',
+ 'extid': None})
+ self.assertEqual(e.cw_source[0].name, 'system')
+ self.assertEqual(e.reverse_use_email[0].login, 'sthenault')
+ # test everything is still fine after source synchronization
+ stats = dfsource.pull_data(cnx, force=True, raise_on_error=True)
+ rset = cnx.execute('EmailAddress X WHERE X address "syt@logilab.fr"')
+ self.assertEqual(len(rset), 1)
+ e = rset.get_entity(0, 0)
+ self.assertEqual(e.eid, email.eid)
+ self.assertEqual(e.cw_metainformation(), {'source': {'type': u'native', 'uri': u'system',
+ 'use-cwuri-as-url': False},
+ 'type': 'EmailAddress',
+ 'extid': None})
+ self.assertEqual(e.cw_source[0].name, 'system')
+ self.assertEqual(e.reverse_use_email[0].login, 'sthenault')
+ cnx.commit()
- # test delete entity
- e.cw_delete()
- self.commit()
- # test everything is still fine after source synchronization
- session.set_cnxset()
- with session.security_enabled(read=False): # avoid Unauthorized due to password selection
- stats = dfsource.pull_data(session, force=True, raise_on_error=True)
- rset = self.sexecute('EmailAddress X WHERE X address "syt@logilab.fr"')
- self.assertEqual(len(rset), 0)
- rset = self.sexecute('Any X WHERE X use_email E, X login "sthenault"')
- self.assertEqual(len(rset), 0)
+ # test delete entity
+ e.cw_delete()
+ cnx.commit()
+ # test everything is still fine after source synchronization
+ stats = dfsource.pull_data(cnx, force=True, raise_on_error=True)
+ rset = cnx.execute('EmailAddress X WHERE X address "syt@logilab.fr"')
+ self.assertEqual(len(rset), 0)
+ rset = cnx.execute('Any X WHERE X use_email E, X login "sthenault"')
+ self.assertEqual(len(rset), 0)
def test_external_entity(self):
dfsource = self.repo.sources_by_uri['myotherfeed']
- session = self.repo.internal_session(safe=True)
- stats = dfsource.pull_data(session, force=True, raise_on_error=True)
- user = self.execute('CWUser X WHERE X login "sthenault"').get_entity(0, 0)
- self.assertEqual(user.creation_date, datetime(2010, 01, 22, 10, 27, 59))
- self.assertEqual(user.modification_date, datetime(2011, 01, 25, 14, 14, 06))
- self.assertEqual(user.cwuri, 'http://pouet.org/5')
- self.assertEqual(user.cw_source[0].name, 'myfeed')
+ with self.repo.internal_cnx() as cnx:
+ stats = dfsource.pull_data(cnx, force=True, raise_on_error=True)
+ user = cnx.execute('CWUser X WHERE X login "sthenault"').get_entity(0, 0)
+ self.assertEqual(user.creation_date, datetime(2010, 01, 22, 10, 27, 59))
+ self.assertEqual(user.modification_date, datetime(2011, 01, 25, 14, 14, 06))
+ self.assertEqual(user.cwuri, 'http://pouet.org/5')
+ self.assertEqual(user.cw_source[0].name, 'myfeed')
def test_noerror_missing_fti_attribute(self):
dfsource = self.repo.sources_by_uri['myfeed']
- session = self.repo.internal_session(safe=True)
- parser = dfsource._get_parser(session)
- dfsource.process_urls(parser, ['''
+ with self.repo.internal_cnx() as cnx:
+ parser = dfsource._get_parser(cnx)
+ dfsource.process_urls(parser, ['''
<rset size="1">
<Card eid="50" cwuri="http://pouet.org/50" cwsource="system">
<title>how-to</title>
@@ -308,9 +301,9 @@
def test_noerror_unspecified_date(self):
dfsource = self.repo.sources_by_uri['myfeed']
- session = self.repo.internal_session(safe=True)
- parser = dfsource._get_parser(session)
- dfsource.process_urls(parser, ['''
+ with self.repo.internal_cnx() as cnx:
+ parser = dfsource._get_parser(cnx)
+ dfsource.process_urls(parser, ['''
<rset size="1">
<Card eid="50" cwuri="http://pouet.org/50" cwsource="system">
<title>how-to</title>
--- a/sobjects/test/unittest_email.py Fri May 23 18:35:13 2014 +0200
+++ b/sobjects/test/unittest_email.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -15,9 +15,6 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
-"""
-
-"""
from cubicweb import Unauthorized
from cubicweb.devtools.testlib import CubicWebTC
@@ -25,45 +22,46 @@
class EmailAddressHooksTC(CubicWebTC):
def test_use_email_set_primary_email(self):
- self.execute('INSERT EmailAddress X: X address "admin@logilab.fr", U use_email X WHERE U login "admin"')
- self.assertEqual(self.execute('Any A WHERE U primary_email X, U login "admin", X address A').rows,
- [])
- self.commit()
- self.assertEqual(self.execute('Any A WHERE U primary_email X, U login "admin", X address A')[0][0],
- 'admin@logilab.fr')
- # having another email should'nt change anything
- self.execute('INSERT EmailAddress X: X address "a@logilab.fr", U use_email X WHERE U login "admin"')
- self.commit()
- self.assertEqual(self.execute('Any A WHERE U primary_email X, U login "admin", X address A')[0][0],
- 'admin@logilab.fr')
+ with self.admin_access.client_cnx() as cnx:
+ cnx.execute('INSERT EmailAddress X: X address "admin@logilab.fr", U use_email X WHERE U login "admin"')
+ self.assertEqual(cnx.execute('Any A WHERE U primary_email X, U login "admin", X address A').rows,
+ [])
+ cnx.commit()
+ self.assertEqual(cnx.execute('Any A WHERE U primary_email X, U login "admin", X address A')[0][0],
+ 'admin@logilab.fr')
+ # having another email should'nt change anything
+ cnx.execute('INSERT EmailAddress X: X address "a@logilab.fr", U use_email X WHERE U login "admin"')
+ cnx.commit()
+ self.assertEqual(cnx.execute('Any A WHERE U primary_email X, U login "admin", X address A')[0][0],
+ 'admin@logilab.fr')
def test_primary_email_set_use_email(self):
- self.execute('INSERT EmailAddress X: X address "admin@logilab.fr", U primary_email X WHERE U login "admin"')
- self.assertEqual(self.execute('Any A WHERE U use_email X, U login "admin", X address A').rows,
- [])
- self.commit()
- self.assertEqual(self.execute('Any A WHERE U use_email X, U login "admin", X address A')[0][0],
- 'admin@logilab.fr')
+ with self.admin_access.client_cnx() as cnx:
+ cnx.execute('INSERT EmailAddress X: X address "admin@logilab.fr", U primary_email X WHERE U login "admin"')
+ self.assertEqual(cnx.execute('Any A WHERE U use_email X, U login "admin", X address A').rows,
+ [])
+ cnx.commit()
+ self.assertEqual(cnx.execute('Any A WHERE U use_email X, U login "admin", X address A')[0][0],
+ 'admin@logilab.fr')
def test_cardinality_check(self):
- email1 = self.execute('INSERT EmailAddress E: E address "client@client.com", U use_email E WHERE U login "admin"')[0][0]
- self.commit()
- self.execute('SET U primary_email E WHERE U login "anon", E address "client@client.com"')
- self.commit()
- rset = self.execute('Any X WHERE X use_email E, E eid %(e)s', {'e': email1})
- self.assertFalse(rset.rowcount != 1, rset)
+ with self.admin_access.client_cnx() as cnx:
+ email1 = cnx.execute('INSERT EmailAddress E: E address "client@client.com", U use_email E WHERE U login "admin"')[0][0]
+ cnx.commit()
+ cnx.execute('SET U primary_email E WHERE U login "anon", E address "client@client.com"')
+ cnx.commit()
+ rset = cnx.execute('Any X WHERE X use_email E, E eid %(e)s', {'e': email1})
+ self.assertFalse(rset.rowcount != 1, rset)
def test_security_check(self):
- req = self.request()
- self.create_user(req, 'toto')
- email1 = self.execute('INSERT EmailAddress E: E address "client@client.com", U use_email E WHERE U login "admin"')[0][0]
- self.commit()
- cnx = self.login('toto')
- cu = cnx.cursor()
- self.assertRaises(Unauthorized,
- cu.execute, 'SET U primary_email E WHERE E eid %(e)s, U login "toto"',
- {'e': email1})
- cnx.close()
+ with self.admin_access.client_cnx() as cnx:
+ self.create_user(cnx, 'toto')
+ email1 = cnx.execute('INSERT EmailAddress E: E address "client@client.com", U use_email E WHERE U login "admin"')[0][0]
+ cnx.commit()
+ with self.new_access('toto').client_cnx() as cnx:
+ self.assertRaises(Unauthorized,
+ cnx.execute, 'SET U primary_email E WHERE E eid %(e)s, U login "toto"',
+ {'e': email1})
if __name__ == '__main__':
from logilab.common.testlib import unittest_main
--- a/sobjects/test/unittest_notification.py Fri May 23 18:35:13 2014 +0200
+++ b/sobjects/test/unittest_notification.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,5 +1,5 @@
# -*- coding: iso-8859-1 -*-
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -16,9 +16,7 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
-"""
-"""
from socket import gethostname
from logilab.common.testlib import unittest_main, TestCase
@@ -63,33 +61,34 @@
class NotificationTC(CubicWebTC):
def test_recipients_finder(self):
- urset = self.execute('CWUser X WHERE X login "admin"')
- self.execute('INSERT EmailAddress X: X address "admin@logilab.fr", U primary_email X '
- 'WHERE U eid %(x)s', {'x': urset[0][0]})
- self.execute('INSERT CWProperty X: X pkey "ui.language", X value "fr", X for_user U '
- 'WHERE U eid %(x)s', {'x': urset[0][0]})
- self.commit() # commit so that admin get its properties updated
- finder = self.vreg['components'].select('recipients_finder',
- self.request(), rset=urset)
- self.set_option('default-recipients-mode', 'none')
- self.assertEqual(finder.recipients(), [])
- self.set_option('default-recipients-mode', 'users')
- self.assertEqual(finder.recipients(), [(u'admin@logilab.fr', 'fr')])
- self.set_option('default-recipients-mode', 'default-dest-addrs')
- self.set_option('default-dest-addrs', 'abcd@logilab.fr, efgh@logilab.fr')
- self.assertEqual(finder.recipients(), [('abcd@logilab.fr', 'en'), ('efgh@logilab.fr', 'en')])
+ with self.admin_access.web_request() as req:
+ urset = req.execute('CWUser X WHERE X login "admin"')
+ req.execute('INSERT EmailAddress X: X address "admin@logilab.fr", U primary_email X '
+ 'WHERE U eid %(x)s', {'x': urset[0][0]})
+ req.execute('INSERT CWProperty X: X pkey "ui.language", X value "fr", X for_user U '
+ 'WHERE U eid %(x)s', {'x': urset[0][0]})
+ req.cnx.commit() # commit so that admin get its properties updated
+ finder = self.vreg['components'].select('recipients_finder',
+ req, rset=urset)
+ self.set_option('default-recipients-mode', 'none')
+ self.assertEqual(finder.recipients(), [])
+ self.set_option('default-recipients-mode', 'users')
+ self.assertEqual(finder.recipients(), [(u'admin@logilab.fr', 'fr')])
+ self.set_option('default-recipients-mode', 'default-dest-addrs')
+ self.set_option('default-dest-addrs', 'abcd@logilab.fr, efgh@logilab.fr')
+ self.assertEqual(finder.recipients(), [('abcd@logilab.fr', 'en'), ('efgh@logilab.fr', 'en')])
def test_status_change_view(self):
- req = self.request()
- u = self.create_user(req, 'toto')
- iwfable = u.cw_adapt_to('IWorkflowable')
- iwfable.fire_transition('deactivate', comment=u'yeah')
- self.assertFalse(MAILBOX)
- self.commit()
- self.assertEqual(len(MAILBOX), 1)
- email = MAILBOX[0]
- self.assertEqual(email.content,
- '''
+ with self.admin_access.web_request() as req:
+ u = self.create_user(req, 'toto')
+ iwfable = u.cw_adapt_to('IWorkflowable')
+ iwfable.fire_transition('deactivate', comment=u'yeah')
+ self.assertFalse(MAILBOX)
+ req.cnx.commit()
+ self.assertEqual(len(MAILBOX), 1)
+ email = MAILBOX[0]
+ self.assertEqual(email.content,
+ '''
admin changed status from <activated> to <deactivated> for entity
'toto'
@@ -97,8 +96,8 @@
url: http://testing.fr/cubicweb/cwuser/toto
''')
- self.assertEqual(email.subject,
- 'status changed CWUser #%s (admin)' % u.eid)
+ self.assertEqual(email.subject,
+ 'status changed CWUser #%s (admin)' % u.eid)
if __name__ == '__main__':
unittest_main()
--- a/sobjects/test/unittest_supervising.py Fri May 23 18:35:13 2014 +0200
+++ b/sobjects/test/unittest_supervising.py Fri Jun 27 11:48:26 2014 +0200
@@ -16,9 +16,6 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
-"""
-
-"""
import re
from logilab.common.testlib import unittest_main
@@ -30,38 +27,38 @@
class SupervisingTC(CubicWebTC):
def setup_database(self):
- req = self.request()
- req.create_entity('Card', title=u"une news !", content=u"cubicweb c'est beau")
- req.create_entity('Card', title=u"une autre news !", content=u"cubicweb c'est beau")
- req.create_entity('Bookmark', title=u"un signet !", path=u"view?vid=index")
- req.create_entity('Comment', content=u"Yo !")
- self.execute('SET C comments B WHERE B title "une autre news !", C content "Yo !"')
+ with self.admin_access.client_cnx() as cnx:
+ cnx.create_entity('Card', title=u"une news !", content=u"cubicweb c'est beau")
+ card = cnx.create_entity('Card', title=u"une autre news !", content=u"cubicweb c'est beau")
+ cnx.create_entity('Bookmark', title=u"un signet !", path=u"view?vid=index")
+ cnx.create_entity('Comment', content=u"Yo !", comments=card)
+ cnx.commit()
self.vreg.config.global_set_option('supervising-addrs', 'test@logilab.fr')
def test_supervision(self):
# do some modification
- user = self.execute('INSERT CWUser X: X login "toto", X upassword "sosafe", X in_group G '
- 'WHERE G name "users"').get_entity(0, 0)
- self.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': user.eid})
- self.execute('DELETE Card B WHERE B title "une news !"')
- self.execute('SET X bookmarked_by U WHERE X is Bookmark, U eid %(x)s', {'x': user.eid})
- self.execute('SET X content "duh?" WHERE X is Comment')
- self.execute('DELETE Comment C WHERE C comments Y, Y is Card, Y title "une autre news !"')
- # check only one supervision email operation
- session = self.session
- sentops = [op for op in session.pending_operations
- if isinstance(op, SupervisionMailOp)]
- self.assertEqual(len(sentops), 1)
- # check view content
- op = sentops[0]
- view = sentops[0]._get_view()
- self.assertEqual(view.recipients(), ['test@logilab.fr'])
- self.assertEqual(view.subject(), '[data supervision] changes summary')
- data = view.render(changes=session.transaction_data.get('pendingchanges')).strip()
- data = re.sub('#\d+', '#EID', data)
- data = re.sub('/\d+', '/EID', data)
- self.assertMultiLineEqual('''user admin has made the following change(s):
+ with self.admin_access.repo_cnx() as cnx:
+ user = cnx.execute('INSERT CWUser X: X login "toto", X upassword "sosafe", X in_group G '
+ 'WHERE G name "users"').get_entity(0, 0)
+ cnx.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': user.eid})
+ cnx.execute('DELETE Card B WHERE B title "une news !"')
+ cnx.execute('SET X bookmarked_by U WHERE X is Bookmark, U eid %(x)s', {'x': user.eid})
+ cnx.execute('SET X content "duh?" WHERE X is Comment')
+ cnx.execute('DELETE Comment C WHERE C comments Y, Y is Card, Y title "une autre news !"')
+ # check only one supervision email operation
+ sentops = [op for op in cnx.pending_operations
+ if isinstance(op, SupervisionMailOp)]
+ self.assertEqual(len(sentops), 1)
+ # check view content
+ op = sentops[0]
+ view = sentops[0]._get_view()
+ self.assertEqual(view.recipients(), ['test@logilab.fr'])
+ self.assertEqual(view.subject(), '[data supervision] changes summary')
+ data = view.render(changes=cnx.transaction_data.get('pendingchanges')).strip()
+ data = re.sub('#\d+', '#EID', data)
+ data = re.sub('/\d+', '/EID', data)
+ self.assertMultiLineEqual('''user admin has made the following change(s):
* added cwuser #EID (toto)
http://testing.fr/cubicweb/cwuser/toto
@@ -77,24 +74,24 @@
* deleted comment #EID (duh?)''',
data)
- # check prepared email
- op._prepare_email()
- self.assertEqual(len(op.to_send), 1)
- self.assert_(op.to_send[0][0])
- self.assertEqual(op.to_send[0][1], ['test@logilab.fr'])
- self.commit()
- # some other changes #######
- user.cw_adapt_to('IWorkflowable').fire_transition('deactivate')
- sentops = [op for op in session.pending_operations
- if isinstance(op, SupervisionMailOp)]
- self.assertEqual(len(sentops), 1)
- # check view content
- op = sentops[0]
- view = sentops[0]._get_view()
- data = view.render(changes=session.transaction_data.get('pendingchanges')).strip()
- data = re.sub('#\d+', '#EID', data)
- data = re.sub('/\d+', '/EID', data)
- self.assertMultiLineEqual('''user admin has made the following change(s):
+ # check prepared email
+ op._prepare_email()
+ self.assertEqual(len(op.to_send), 1)
+ self.assert_(op.to_send[0][0])
+ self.assertEqual(op.to_send[0][1], ['test@logilab.fr'])
+ cnx.commit()
+ # some other changes #######
+ user.cw_adapt_to('IWorkflowable').fire_transition('deactivate')
+ sentops = [op for op in cnx.pending_operations
+ if isinstance(op, SupervisionMailOp)]
+ self.assertEqual(len(sentops), 1)
+ # check view content
+ op = sentops[0]
+ view = sentops[0]._get_view()
+ data = view.render(changes=cnx.transaction_data.get('pendingchanges')).strip()
+ data = re.sub('#\d+', '#EID', data)
+ data = re.sub('/\d+', '/EID', data)
+ self.assertMultiLineEqual('''user admin has made the following change(s):
* changed state of cwuser #EID (toto)
from state activated to state deactivated
@@ -102,10 +99,10 @@
data)
def test_nonregr1(self):
- session = self.session
- # do some unlogged modification
- self.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': session.user.eid})
- self.commit() # no crash
+ with self.admin_access.repo_cnx() as cnx:
+ # do some unlogged modification
+ cnx.execute('SET X last_login_time NOW WHERE X eid %(x)s', {'x': cnx.user.eid})
+ cnx.commit() # no crash
if __name__ == '__main__':
--- a/test/data/schema.py Fri May 23 18:35:13 2014 +0200
+++ b/test/data/schema.py Fri Jun 27 11:48:26 2014 +0200
@@ -60,7 +60,7 @@
class Produit(EntityType):
- fabrique_par = SubjectRelation('Usine', cardinality='1*')
+ fabrique_par = SubjectRelation('Usine', cardinality='1*', inlined=True)
class Usine(EntityType):
--- a/test/unittest_dataimport.py Fri May 23 18:35:13 2014 +0200
+++ b/test/unittest_dataimport.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,6 +1,8 @@
from StringIO import StringIO
from logilab.common.testlib import TestCase, unittest_main
from cubicweb import dataimport
+
+
class UcsvreaderTC(TestCase):
def test_empty_lines_skipped(self):
@@ -21,6 +23,34 @@
],
list(dataimport.ucsvreader(stream, skip_empty=False)))
+ def test_skip_first(self):
+ stream = StringIO('a,b,c,d,\n'
+ '1,2,3,4,\n')
+ reader = dataimport.ucsvreader(stream, skipfirst=True,
+ ignore_errors=True)
+ self.assertEqual(list(reader),
+ [[u'1', u'2', u'3', u'4', u'']])
+
+ stream.seek(0)
+ reader = dataimport.ucsvreader(stream, skipfirst=True,
+ ignore_errors=False)
+ self.assertEqual(list(reader),
+ [[u'1', u'2', u'3', u'4', u'']])
+
+ stream.seek(0)
+ reader = dataimport.ucsvreader(stream, skipfirst=False,
+ ignore_errors=True)
+ self.assertEqual(list(reader),
+ [[u'a', u'b', u'c', u'd', u''],
+ [u'1', u'2', u'3', u'4', u'']])
+
+ stream.seek(0)
+ reader = dataimport.ucsvreader(stream, skipfirst=False,
+ ignore_errors=False)
+ self.assertEqual(list(reader),
+ [[u'a', u'b', u'c', u'd', u''],
+ [u'1', u'2', u'3', u'4', u'']])
+
if __name__ == '__main__':
unittest_main()
--- a/test/unittest_entity.py Fri May 23 18:35:13 2014 +0200
+++ b/test/unittest_entity.py Fri Jun 27 11:48:26 2014 +0200
@@ -44,6 +44,18 @@
for cls in self.vreg['etypes'].iter_classes():
cls.fetch_attrs, cls.cw_fetch_order = self.backup_dict[cls]
+ def test_no_prefill_related_cache_bug(self):
+ session = self.session
+ usine = session.create_entity('Usine', lieu=u'Montbeliard')
+ produit = session.create_entity('Produit')
+ # usine was prefilled in glob_add_entity
+ # let's simulate produit creation without prefill
+ produit._cw_related_cache.clear()
+ # use add_relations
+ session.add_relations([('fabrique_par', [(produit.eid, usine.eid)])])
+ self.assertEqual(1, len(usine.reverse_fabrique_par))
+ self.assertEqual(1, len(produit.fabrique_par))
+
def test_boolean_value(self):
with self.admin_access.web_request() as req:
e = self.vreg['etypes'].etype_class('CWUser')(req)
@@ -354,6 +366,7 @@
self.assertEqual(rql, 'Any S,AA,AB,AC,AD ORDERBY AA '
'WHERE NOT S use_email O, O eid %(x)s, S is_instance_of CWUser, '
'S login AA, S firstname AB, S surname AC, S modification_date AD')
+ req.cnx.commit()
rperms = self.schema['EmailAddress'].permissions['read']
clear_cache(self.schema['EmailAddress'], 'get_groups')
clear_cache(self.schema['EmailAddress'], 'get_rqlexprs')
@@ -688,7 +701,7 @@
e.cw_attr_cache['data_name'] = 'an html file'
e.cw_attr_cache['data_format'] = 'text/html'
e.cw_attr_cache['data_encoding'] = 'ascii'
- e._cw.transaction_data = {} # XXX req should be a session
+ e._cw.transaction_data.clear()
words = e.cw_adapt_to('IFTIndexable').get_words()
words['C'].sort()
self.assertEqual({'C': sorted(['an', 'html', 'file', 'du', 'html', 'some', 'data'])},
--- a/test/unittest_migration.py Fri May 23 18:35:13 2014 +0200
+++ b/test/unittest_migration.py Fri Jun 27 11:48:26 2014 +0200
@@ -76,7 +76,7 @@
def test_filter_scripts_for_mode(self):
config = CubicWebConfiguration('data')
config.verbosity = 0
- self.assert_(not isinstance(config.migration_handler(), ServerMigrationHelper))
+ self.assertNotIsInstance(config.migration_handler(), ServerMigrationHelper)
self.assertIsInstance(config.migration_handler(), MigrationHelper)
config = self.config
config.__class__.name = 'repository'
--- a/utils.py Fri May 23 18:35:13 2014 +0200
+++ b/utils.py Fri Jun 27 11:48:26 2014 +0200
@@ -169,8 +169,6 @@
id(self), self._item, self._size)
def __len__(self):
return self._size
- def __nonzero__(self):
- return self._size
def __iter__(self):
return repeat(self._item, self._size)
def __getitem__(self, index):
--- a/web/_exceptions.py Fri May 23 18:35:13 2014 +0200
+++ b/web/_exceptions.py Fri Jun 27 11:48:26 2014 +0200
@@ -64,7 +64,6 @@
def __repr__(self):
return '%s(%r, %r)' % (self.__class__.__name__, self.status, self.content)
- self.url = url
# Publish related error
--- a/web/captcha.py Fri May 23 18:35:13 2014 +0200
+++ b/web/captcha.py Fri Jun 27 11:48:26 2014 +0200
@@ -24,7 +24,7 @@
from random import randint, choice
from cStringIO import StringIO
-import Image, ImageFont, ImageDraw, ImageFilter
+from PIL import Image, ImageFont, ImageDraw, ImageFilter
from time import time
--- a/web/component.py Fri May 23 18:35:13 2014 +0200
+++ b/web/component.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -448,7 +448,7 @@
return domid(self.__regid__) + unicode(self.entity.eid)
def lazy_view_holder(self, w, entity, oid, registry='views'):
- """add a holder and return an url that may be used to replace this
+ """add a holder and return a URL that may be used to replace this
holder by the html generate by the view specified by registry and
identifier. Registry defaults to 'views'.
"""
@@ -508,25 +508,27 @@
class EditRelationMixIn(ReloadableMixIn):
- def box_item(self, entity, etarget, rql, label):
+
+ def box_item(self, entity, etarget, fname, label):
"""builds HTML link to edit relation between `entity` and `etarget`"""
- args = {role(self)[0] : entity.eid, target(self)[0] : etarget.eid}
- url = self._cw.user_rql_callback((rql, args))
+ args = {role(self) : entity.eid, target(self): etarget.eid}
# for each target, provide a link to edit the relation
- return u'[<a href="%s" class="action">%s</a>] %s' % (
- xml_escape(url), label, etarget.view('incontext'))
+ jscall = unicode(js.cw.utils.callAddOrDeleteThenReload(fname,
+ self.rtype,
+ args['subject'],
+ args['object']))
+ return u'[<a href="javascript: %s" class="action">%s</a>] %s' % (
+ xml_escape(jscall), label, etarget.view('incontext'))
def related_boxitems(self, entity):
- rql = 'DELETE S %s O WHERE S eid %%(s)s, O eid %%(o)s' % self.rtype
- return [self.box_item(entity, etarget, rql, u'-')
+ return [self.box_item(entity, etarget, 'delete_relation', u'-')
for etarget in self.related_entities(entity)]
def related_entities(self, entity):
return entity.related(self.rtype, role(self), entities=True)
def unrelated_boxitems(self, entity):
- rql = 'SET S %s O WHERE S eid %%(s)s, O eid %%(o)s' % self.rtype
- return [self.box_item(entity, etarget, rql, u'+')
+ return [self.box_item(entity, etarget, 'add_relation', u'+')
for etarget in self.unrelated_entities(entity)]
def unrelated_entities(self, entity):
--- a/web/data/cubicweb.ajax.js Fri May 23 18:35:13 2014 +0200
+++ b/web/data/cubicweb.ajax.js Fri Jun 27 11:48:26 2014 +0200
@@ -1,4 +1,4 @@
-/* copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+/* copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
* contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
*
* This file is part of CubicWeb.
@@ -371,7 +371,7 @@
/**
* .. function:: loadRemote(url, form, reqtype='GET', sync=false)
*
- * Asynchronously (unless `sync` argument is set to true) load an url or path
+ * Asynchronously (unless `sync` argument is set to true) load a URL or path
* and return a deferred whose callbacks args are decoded according to the
* Content-Type response header. `form` should be additional form params
* dictionary, `reqtype` the HTTP request type (get 'GET' or 'POST').
@@ -515,16 +515,20 @@
});
}
-function userCallback(cbname) {
+userCallback = cw.utils.deprecatedFunction(
+ '[3.19] use a plain ajaxfunc instead of user callbacks',
+ function userCallback(cbname) {
setProgressCursor();
var d = loadRemote(AJAX_BASE_URL, ajaxFuncArgs('user_callback', null, cbname));
d.addCallback(resetCursor);
d.addErrback(resetCursor);
d.addErrback(remoteCallFailed);
return d;
-}
+});
-function userCallbackThenUpdateUI(cbname, compid, rql, msg, registry, nodeid) {
+userCallbackThenUpdateUI = cw.utils.deprecatedFunction(
+ '[3.19] use a plain ajaxfunc instead of user callbacks',
+ function userCallbackThenUpdateUI(cbname, compid, rql, msg, registry, nodeid) {
var d = userCallback(cbname);
d.addCallback(function() {
$('#' + nodeid).loadxhtml(AJAX_BASE_URL, ajaxFuncArgs('render', {'rql': rql},
@@ -533,9 +537,11 @@
updateMessage(msg);
}
});
-}
+});
-function userCallbackThenReloadPage(cbname, msg) {
+userCallbackThenReloadPage = cw.utils.deprecatedFunction(
+ '[3.19] use a plain ajaxfunc instead of user callbacks',
+ function userCallbackThenReloadPage(cbname, msg) {
var d = userCallback(cbname);
d.addCallback(function() {
window.location.reload();
@@ -543,7 +549,7 @@
updateMessage(msg);
}
});
-}
+});
/**
* .. function:: unregisterUserCallback(cbname)
@@ -551,14 +557,17 @@
* unregisters the python function registered on the server's side
* while the page was generated.
*/
-function unregisterUserCallback(cbname) {
+unregisterUserCallback = cw.utils.deprecatedFunction(
+ '[3.19] use a plain ajaxfunc instead of user callbacks',
+ function unregisterUserCallback(cbname) {
setProgressCursor();
var d = loadRemote(AJAX_BASE_URL, ajaxFuncArgs('unregister_user_callback',
null, cbname));
d.addCallback(resetCursor);
d.addErrback(resetCursor);
d.addErrback(remoteCallFailed);
-}
+});
+
//============= XXX move those functions? ====================================//
function openHash() {
--- a/web/data/cubicweb.htmlhelpers.js Fri May 23 18:35:13 2014 +0200
+++ b/web/data/cubicweb.htmlhelpers.js Fri Jun 27 11:48:26 2014 +0200
@@ -57,7 +57,7 @@
/**
* .. function:: asURL(props)
*
- * builds an url from an object (used as a dictionary)
+ * builds a URL from an object (used as a dictionary)
*
* >>> asURL({'rql' : "RQL", 'x': [1, 2], 'itemvid' : "oneline"})
* rql=RQL&vid=list&itemvid=oneline&x=1&x=2
--- a/web/data/cubicweb.js Fri May 23 18:35:13 2014 +0200
+++ b/web/data/cubicweb.js Fri Jun 27 11:48:26 2014 +0200
@@ -100,7 +100,8 @@
return $node.text();
}
return cw.evalJSON(sortvalue);
- }
+ },
+
});
@@ -336,8 +337,16 @@
$.map(cw.utils.sliceList(arguments, 1), JSON.stringify).join(',')
+ ')'
);
+ },
+
+ callAddOrDeleteThenReload: function (add_or_delete, rtype, subjeid, objeid) {
+ var d = asyncRemoteExec(add_or_delete, rtype, subjeid, objeid);
+ d.addCallback(function() {
+ window.location.reload();
+ });
}
+
});
/** DOM factories ************************************************************/
--- a/web/data/cubicweb.old.css Fri May 23 18:35:13 2014 +0200
+++ b/web/data/cubicweb.old.css Fri Jun 27 11:48:26 2014 +0200
@@ -260,32 +260,48 @@
/* header */
table#header {
- background: %(headerBg)s;
+ background-image: linear-gradient(white, #e2e2e2);
width: 100%;
+ border-bottom: 1px solid #bbb;
+ text-shadow: 1px 1px 0 #f5f5f5;
}
table#header td {
vertical-align: middle;
}
-table#header a {
- color: #000;
+table#header, table#header a {
+ color: #444;
}
+
table#header td#headtext {
white-space: nowrap;
+ padding: 0 10px;
+ width: 10%;
+}
+
+#logo{
+ width: 150px;
+ height: 42px;
+ background-image: url(logo-cubicweb.svg);
+ background-repeat: no-repeat;
+ background-position: center center;
+ background-size: contain;
+ float: left;
}
table#header td#header-right {
- padding-top: 1em;
white-space: nowrap;
+ width: 10%;
}
table#header td#header-center{
- width: 100%;
+ border-bottom-left-radius: 10px;
+ border-top-left-radius: 10px;
+ padding-left: 1em;
}
span#appliName {
font-weight: bold;
- color: #000;
white-space: nowrap;
}
@@ -637,6 +653,8 @@
div#userActionsBox {
width: 14em;
text-align: right;
+ display: inline-block;
+ padding-right: 10px;
}
div#userActionsBox a.popupMenu {
Binary file web/data/favicon.ico has changed
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/logo-cubicweb-gray.svg Fri Jun 27 11:48:26 2014 +0200
@@ -0,0 +1,151 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+ xmlns:dc="http://purl.org/dc/elements/1.1/"
+ xmlns:cc="http://creativecommons.org/ns#"
+ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:svg="http://www.w3.org/2000/svg"
+ xmlns="http://www.w3.org/2000/svg"
+ xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+ xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+ width="260.68375"
+ height="40.011749"
+ id="svg4127"
+ version="1.1"
+ inkscape:version="0.48.3.1 r9886"
+ sodipodi:docname="logo-cubicweb-gray.svg">
+ <defs
+ id="defs4129" />
+ <sodipodi:namedview
+ id="base"
+ pagecolor="#ffffff"
+ bordercolor="#666666"
+ borderopacity="1.0"
+ inkscape:pageopacity="0.0"
+ inkscape:pageshadow="2"
+ inkscape:zoom="1.979899"
+ inkscape:cx="129.03681"
+ inkscape:cy="-31.754963"
+ inkscape:document-units="px"
+ inkscape:current-layer="layer1"
+ showgrid="false"
+ fit-margin-top="0"
+ fit-margin-left="0"
+ fit-margin-right="0"
+ fit-margin-bottom="0"
+ inkscape:window-width="1362"
+ inkscape:window-height="729"
+ inkscape:window-x="0"
+ inkscape:window-y="18"
+ inkscape:window-maximized="0" />
+ <metadata
+ id="metadata4132">
+ <rdf:RDF>
+ <cc:Work
+ rdf:about="">
+ <dc:format>image/svg+xml</dc:format>
+ <dc:type
+ rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+ <dc:title />
+ </cc:Work>
+ </rdf:RDF>
+ </metadata>
+ <g
+ inkscape:label="Calque 1"
+ inkscape:groupmode="layer"
+ id="layer1"
+ transform="translate(-327.77712,-550.4231)">
+ <g
+ transform="matrix(1.0580599,0,0,1.0580599,1312.264,209.71605)"
+ id="g3151-5"
+ style="font-size:32.60407639px;font-style:normal;font-weight:bold;line-height:125%;letter-spacing:0px;word-spacing:0px;fill:#404042;fill-opacity:1;stroke:none;font-family:orbitron;-inkscape-font-specification:orbitron">
+ <path
+ inkscape:connector-curvature="0"
+ id="path3153-0"
+ style="fill:#404042;fill-opacity:1"
+ d="m -792.00021,355.53459 14.70444,0 0,4.27114 -14.70444,0 c -1.23896,0 -2.30402,-0.43473 -3.1952,-1.30417 -0.86944,-0.89117 -1.30416,-1.95624 -1.30416,-3.1952 l 0,-9.91164 c 0,-1.23894 0.43472,-2.29313 1.30416,-3.16259 0.89118,-0.89116 1.95624,-1.33675 3.1952,-1.33677 l 14.63923,0 0,4.27113 -14.63923,0 c -0.15215,2e-5 -0.22823,0.0761 -0.22822,0.22823 l 0,9.91164 c -10e-6,0.15216 0.0761,0.22824 0.22822,0.22823" />
+ <path
+ inkscape:connector-curvature="0"
+ style="fill:#404042;fill-opacity:1"
+ id="path3155-7"
+ d="m -833.03908,340.89536 4.27114,0 0,14.411 c -2e-5,1.23896 -0.44561,2.30403 -1.33677,3.1952 -0.86946,0.86944 -1.92366,1.30417 -3.1626,1.30417 l -10.20507,0 c -1.2607,0 -2.32576,-0.43473 -3.1952,-1.30417 -0.86945,-0.89117 -1.30417,-1.95624 -1.30416,-3.1952 l 0,-14.411 4.27113,0 0,14.411 c -10e-6,0.15216 0.0761,0.22824 0.22823,0.22823 l 10.20507,0 c 0.15214,1e-5 0.22822,-0.0761 0.22823,-0.22823 l 0,-14.411" />
+ <path
+ inkscape:connector-curvature="0"
+ style="fill:#404042;fill-opacity:1"
+ id="path3157-7"
+ d="m -811.05372,340.89536 c 1.23894,2e-5 2.29313,0.44561 3.16259,1.33677 0.89116,0.86946 1.33675,1.92365 1.33677,3.16259 l 0,9.91164 c -2e-5,1.23896 -0.44561,2.30403 -1.33677,3.1952 -0.86946,0.86944 -1.92365,1.30417 -3.16259,1.30417 l -14.70444,0 0,-25.10514 4.27113,0 0,6.19477 10.43331,0 m 0.22823,14.411 0,-9.91164 c -2e-5,-0.15213 -0.0761,-0.22821 -0.22823,-0.22823 l -10.20508,0 c -0.15216,2e-5 -0.22823,0.0761 -0.22823,0.22823 l 0,9.91164 c 0,0.15216 0.0761,0.22824 0.22823,0.22823 l 10.20508,0 c 0.15213,1e-5 0.22821,-0.0761 0.22823,-0.22823" />
+ <path
+ inkscape:connector-curvature="0"
+ id="path3159-3"
+ style="fill:#404042;fill-opacity:1"
+ d="m -804.04487,359.80573 0,-18.91037 4.27114,0 0,18.91037 -4.27114,0 m 0,-25.10514 4.27114,0 0,4.30373 -4.27114,0 0,-4.30373" />
+ </g>
+ <path
+ style="font-size:51.94805145px;font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;text-align:start;line-height:125%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#404042;fill-opacity:1;stroke:none;font-family:Orbitron;-inkscape-font-specification:Orbitron Bold"
+ inkscape:connector-curvature="0"
+ d="m 411.51608,557.76053 -32.16667,0 c -0.25926,10e-5 -0.38889,0.12972 -0.38888,0.38895 l 0,24.55552 c -1e-5,0.25923 0.12962,0.38885 0.38888,0.38885 l 32.16667,0 0,7.33339 -32.16667,0 c -2.14815,0 -3.98148,-0.7408 -5.49999,-2.2222 -1.48149,-1.51861 -2.22223,-3.35185 -2.22223,-5.50004 l 0,-24.55552 c 0,-2.14819 0.74074,-3.96293 2.22223,-5.44443 1.51851,-1.51851 3.35184,-2.27781 5.49999,-2.27781 l 32.16667,0 0,7.33329"
+ id="path3161-2" />
+ <path
+ style="font-size:51.94805145px;font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;text-align:start;line-height:125%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#404042;fill-opacity:1;stroke:none;font-family:Orbitron;-inkscape-font-specification:Orbitron Bold"
+ d="m 541.44864,550.4231 7.77778,0 -14.55555,40 -5.77778,0 -10.3889,-28.38894 -10.38888,28.38894 -5.72222,0 -14.55556,-40 7.77778,0 9.66667,26.38886 9.66666,-26.38886 7.16667,0 9.66667,26.38886 9.66666,-26.38886"
+ id="path3163-5"
+ inkscape:connector-curvature="0" />
+ <g
+ id="g3165-0"
+ style="font-size:32.25769424px;font-style:normal;font-weight:bold;line-height:125%;letter-spacing:0px;word-spacing:0px;fill:#404042;fill-opacity:1;stroke:none;font-family:orbitron;-inkscape-font-specification:orbitron"
+ transform="matrix(1.0694444,0,0,1.0694444,1321.9048,205.52534)">
+ <path
+ inkscape:connector-curvature="0"
+ id="path3167-8"
+ style="fill:#404042;fill-opacity:1"
+ d="m -712.66923,341.16755 c 1.22577,2e-5 2.26877,0.44087 3.12899,1.32257 0.88169,0.86022 1.32255,1.90322 1.32257,3.12899 l 0,7.03218 -14.77402,0 0,2.77416 c -1e-5,0.15054 0.0753,0.22581 0.2258,0.22581 l 14.54822,0 0,4.22575 -14.54822,0 c -1.2258,0 -2.27955,-0.4301 -3.16125,-1.2903 -0.86021,-0.88171 -1.29031,-1.93546 -1.29031,-3.16126 l 0,-9.80634 c 0,-1.22577 0.4301,-2.26877 1.29031,-3.12899 0.8817,-0.8817 1.93545,-1.32255 3.16125,-1.32257 l 10.09666,0 m -10.32246,7.22573 10.54826,0 0,-2.77417 c -10e-6,-0.15052 -0.0753,-0.22579 -0.2258,-0.2258 l -10.09666,0 c -0.15054,1e-5 -0.22581,0.0753 -0.2258,0.2258 l 0,2.77417" />
+ <path
+ inkscape:connector-curvature="0"
+ id="path3169-7"
+ style="fill:#404042;fill-opacity:1"
+ d="m -690.26928,341.16755 c 1.22577,2e-5 2.26877,0.44087 3.12899,1.32257 0.88169,0.86022 1.32255,1.90322 1.32257,3.12899 l 0,9.80634 c -2e-5,1.2258 -0.44088,2.27955 -1.32257,3.16126 -0.86022,0.8602 -1.90322,1.2903 -3.12899,1.2903 l -14.54822,0 0,-24.83842 4.22576,0 0,6.12896 10.32246,0 m 0.2258,14.2579 0,-9.80634 c -10e-6,-0.15052 -0.0753,-0.22579 -0.2258,-0.2258 l -10.09666,0 c -0.15054,1e-5 -0.22581,0.0753 -0.2258,0.2258 l 0,9.80634 c -1e-5,0.15054 0.0753,0.22581 0.2258,0.22581 l 10.09666,0 c 0.15052,0 0.22579,-0.0753 0.2258,-0.22581" />
+ </g>
+ <g
+ transform="translate(156.68927,208.07625)"
+ id="g3408"
+ style="fill:#404042;fill-opacity:1">
+ <path
+ sodipodi:nodetypes="ccccc"
+ inkscape:connector-curvature="0"
+ id="path3410"
+ d="m 196.89624,349.49384 -8.06851,4.8684 0,10.62195 8.06851,-4.8684 z"
+ style="fill:#404042;fill-opacity:1;stroke:none" />
+ <path
+ sodipodi:nodetypes="ccccc"
+ inkscape:connector-curvature="0"
+ style="fill:#404042;fill-opacity:1;stroke:none"
+ d="m 188.28608,343.70506 8.00049,4.84516 -8.00049,4.78822 -8.00051,-4.78822 z"
+ id="path3412" />
+ <path
+ style="fill:#404042;fill-opacity:1;stroke:none"
+ d="m 187.69852,366.86825 -8.06851,4.8684 0,10.62195 8.06851,-4.8684 z"
+ id="path3414"
+ inkscape:connector-curvature="0"
+ sodipodi:nodetypes="ccccc" />
+ <path
+ id="path3416"
+ d="m 179.08836,361.07947 8.00049,4.84516 -8.00049,4.78822 -8.00051,-4.78822 z"
+ style="fill:#404042;fill-opacity:1;stroke:none"
+ inkscape:connector-curvature="0"
+ sodipodi:nodetypes="ccccc" />
+ <path
+ sodipodi:nodetypes="ccccc"
+ inkscape:connector-curvature="0"
+ id="path3418"
+ d="m 206.05702,366.86825 -8.06851,4.8684 0,10.62195 8.06851,-4.8684 z"
+ style="fill:#404042;fill-opacity:1;stroke:none" />
+ <path
+ sodipodi:nodetypes="cccccc"
+ inkscape:connector-curvature="0"
+ id="path3420"
+ d="m 197.41294,361.10753 -8.00027,4.84524 8.00686,4.78602 c 0.0406,-0.0455 6.64235,-2.44836 7.44329,-10.21298 -2.81627,1.46 -3.71023,2.32033 -7.44988,0.58175 z"
+ style="fill:#404042;fill-opacity:1;stroke:none" />
+ </g>
+ </g>
+</svg>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/logo-cubicweb-icon.svg Fri Jun 27 11:48:26 2014 +0200
@@ -0,0 +1,100 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+ xmlns:dc="http://purl.org/dc/elements/1.1/"
+ xmlns:cc="http://creativecommons.org/ns#"
+ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:svg="http://www.w3.org/2000/svg"
+ xmlns="http://www.w3.org/2000/svg"
+ xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+ xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+ width="34.96917"
+ height="38.653542"
+ id="svg4127"
+ version="1.1"
+ inkscape:version="0.48.3.1 r9886"
+ sodipodi:docname="logo-cubicweb-icon.svg">
+ <defs
+ id="defs4129" />
+ <sodipodi:namedview
+ id="base"
+ pagecolor="#ffffff"
+ bordercolor="#666666"
+ borderopacity="1.0"
+ inkscape:pageopacity="0.0"
+ inkscape:pageshadow="2"
+ inkscape:zoom="13.633938"
+ inkscape:cx="17.899925"
+ inkscape:cy="19.290099"
+ inkscape:document-units="px"
+ inkscape:current-layer="layer1"
+ showgrid="false"
+ fit-margin-top="0"
+ fit-margin-left="0"
+ fit-margin-right="0"
+ fit-margin-bottom="0"
+ inkscape:window-width="1362"
+ inkscape:window-height="729"
+ inkscape:window-x="0"
+ inkscape:window-y="18"
+ inkscape:window-maximized="0" />
+ <metadata
+ id="metadata4132">
+ <rdf:RDF>
+ <cc:Work
+ rdf:about="">
+ <dc:format>image/svg+xml</dc:format>
+ <dc:type
+ rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+ <dc:title />
+ </cc:Work>
+ </rdf:RDF>
+ </metadata>
+ <g
+ inkscape:label="Calque 1"
+ inkscape:groupmode="layer"
+ id="layer1"
+ transform="translate(-327.28442,-551.94182)">
+ <g
+ transform="translate(156.19657,208.23676)"
+ id="g3408">
+ <path
+ sodipodi:nodetypes="ccccc"
+ inkscape:connector-curvature="0"
+ id="path3410"
+ d="m 196.89624,349.49384 -8.06851,4.8684 0,10.62195 8.06851,-4.8684 z"
+ style="fill:#ff8800;fill-opacity:1;stroke:none" />
+ <path
+ sodipodi:nodetypes="ccccc"
+ inkscape:connector-curvature="0"
+ style="fill:#ff8800;fill-opacity:1;stroke:none"
+ d="m 188.28608,343.70506 8.00049,4.84516 -8.00049,4.78822 -8.00051,-4.78822 z"
+ id="path3412" />
+ <path
+ style="fill:#ff8800;fill-opacity:1;stroke:none"
+ d="m 187.69852,366.86825 -8.06851,4.8684 0,10.62195 8.06851,-4.8684 z"
+ id="path3414"
+ inkscape:connector-curvature="0"
+ sodipodi:nodetypes="ccccc" />
+ <path
+ id="path3416"
+ d="m 179.08836,361.07947 8.00049,4.84516 -8.00049,4.78822 -8.00051,-4.78822 z"
+ style="fill:#ff8800;fill-opacity:1;stroke:none"
+ inkscape:connector-curvature="0"
+ sodipodi:nodetypes="ccccc" />
+ <path
+ sodipodi:nodetypes="ccccc"
+ inkscape:connector-curvature="0"
+ id="path3418"
+ d="m 206.05702,366.86825 -8.06851,4.8684 0,10.62195 8.06851,-4.8684 z"
+ style="fill:#ff8800;fill-opacity:1;stroke:none" />
+ <path
+ sodipodi:nodetypes="cccccc"
+ inkscape:connector-curvature="0"
+ id="path3420"
+ d="m 197.41294,361.10753 -8.00027,4.84524 8.00686,4.78602 c 0.0406,-0.0455 6.64235,-2.44836 7.44329,-10.21298 -2.81627,1.46 -3.71023,2.32033 -7.44988,0.58175 z"
+ style="fill:#404042;fill-opacity:1;stroke:none" />
+ </g>
+ </g>
+</svg>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/logo-cubicweb-text.svg Fri Jun 27 11:48:26 2014 +0200
@@ -0,0 +1,110 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+ xmlns:dc="http://purl.org/dc/elements/1.1/"
+ xmlns:cc="http://creativecommons.org/ns#"
+ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:svg="http://www.w3.org/2000/svg"
+ xmlns="http://www.w3.org/2000/svg"
+ xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+ xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+ width="216.83368"
+ height="40.004139"
+ id="svg4127"
+ version="1.1"
+ inkscape:version="0.48.3.1 r9886"
+ sodipodi:docname="logo-cubicweb-text.svg">
+ <defs
+ id="defs4129" />
+ <sodipodi:namedview
+ id="base"
+ pagecolor="#ffffff"
+ bordercolor="#666666"
+ borderopacity="1.0"
+ inkscape:pageopacity="0.0"
+ inkscape:pageshadow="2"
+ inkscape:zoom="1.979899"
+ inkscape:cx="184.14583"
+ inkscape:cy="-31.557783"
+ inkscape:document-units="px"
+ inkscape:current-layer="layer1"
+ showgrid="false"
+ fit-margin-top="0"
+ fit-margin-left="0"
+ fit-margin-right="0"
+ fit-margin-bottom="0"
+ inkscape:window-width="1362"
+ inkscape:window-height="1161"
+ inkscape:window-x="1920"
+ inkscape:window-y="18"
+ inkscape:window-maximized="0" />
+ <metadata
+ id="metadata4132">
+ <rdf:RDF>
+ <cc:Work
+ rdf:about="">
+ <dc:format>image/svg+xml</dc:format>
+ <dc:type
+ rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+ <dc:title></dc:title>
+ </cc:Work>
+ </rdf:RDF>
+ </metadata>
+ <g
+ inkscape:label="Calque 1"
+ inkscape:groupmode="layer"
+ id="layer1"
+ transform="translate(-370.6529,-550.62789)">
+ <g
+ transform="matrix(1.0580599,0,0,1.0580599,1311.2897,209.92084)"
+ id="g3151"
+ style="font-size:32.60407639px;font-style:normal;font-weight:bold;line-height:125%;letter-spacing:0px;word-spacing:0px;fill:#2b0000;fill-opacity:1;stroke:none;font-family:orbitron;-inkscape-font-specification:orbitron">
+ <path
+ inkscape:connector-curvature="0"
+ id="path3153"
+ style="fill:#404042;fill-opacity:1"
+ d="m -792.00021,355.53459 14.70444,0 0,4.27114 -14.70444,0 c -1.23896,0 -2.30402,-0.43473 -3.1952,-1.30417 -0.86944,-0.89117 -1.30416,-1.95624 -1.30416,-3.1952 l 0,-9.91164 c 0,-1.23894 0.43472,-2.29313 1.30416,-3.16259 0.89118,-0.89116 1.95624,-1.33675 3.1952,-1.33677 l 14.63923,0 0,4.27113 -14.63923,0 c -0.15215,2e-5 -0.22823,0.0761 -0.22822,0.22823 l 0,9.91164 c -10e-6,0.15216 0.0761,0.22824 0.22822,0.22823" />
+ <path
+ inkscape:connector-curvature="0"
+ style="fill:#404042;fill-opacity:1"
+ id="path3155"
+ d="m -833.03908,340.89536 4.27114,0 0,14.411 c -2e-5,1.23896 -0.44561,2.30403 -1.33677,3.1952 -0.86946,0.86944 -1.92366,1.30417 -3.1626,1.30417 l -10.20507,0 c -1.2607,0 -2.32576,-0.43473 -3.1952,-1.30417 -0.86945,-0.89117 -1.30417,-1.95624 -1.30416,-3.1952 l 0,-14.411 4.27113,0 0,14.411 c -10e-6,0.15216 0.0761,0.22824 0.22823,0.22823 l 10.20507,0 c 0.15214,1e-5 0.22822,-0.0761 0.22823,-0.22823 l 0,-14.411" />
+ <path
+ inkscape:connector-curvature="0"
+ style="fill:#404042;fill-opacity:1"
+ id="path3157"
+ d="m -811.05372,340.89536 c 1.23894,2e-5 2.29313,0.44561 3.16259,1.33677 0.89116,0.86946 1.33675,1.92365 1.33677,3.16259 l 0,9.91164 c -2e-5,1.23896 -0.44561,2.30403 -1.33677,3.1952 -0.86946,0.86944 -1.92365,1.30417 -3.16259,1.30417 l -14.70444,0 0,-25.10514 4.27113,0 0,6.19477 10.43331,0 m 0.22823,14.411 0,-9.91164 c -2e-5,-0.15213 -0.0761,-0.22821 -0.22823,-0.22823 l -10.20508,0 c -0.15216,2e-5 -0.22823,0.0761 -0.22823,0.22823 l 0,9.91164 c 0,0.15216 0.0761,0.22824 0.22823,0.22823 l 10.20508,0 c 0.15213,1e-5 0.22821,-0.0761 0.22823,-0.22823" />
+ <path
+ inkscape:connector-curvature="0"
+ id="path3159"
+ style="fill:#404042;fill-opacity:1"
+ d="m -804.04487,359.80573 0,-18.91037 4.27114,0 0,18.91037 -4.27114,0 m 0,-25.10514 4.27114,0 0,4.30373 -4.27114,0 0,-4.30373" />
+ </g>
+ <path
+ style="font-size:51.94805145px;font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;text-align:start;line-height:125%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#404042;fill-opacity:1;stroke:none;font-family:Orbitron;-inkscape-font-specification:Orbitron Bold"
+ inkscape:connector-curvature="0"
+ d="m 410.54179,557.96532 -32.16667,0 c -0.25926,10e-5 -0.38889,0.12972 -0.38888,0.38895 l 0,24.55552 c -1e-5,0.25923 0.12962,0.38885 0.38888,0.38885 l 32.16667,0 0,7.33339 -32.16667,0 c -2.14815,0 -3.98148,-0.7408 -5.49999,-2.2222 -1.48149,-1.51861 -2.22223,-3.35185 -2.22223,-5.50004 l 0,-24.55552 c 0,-2.14819 0.74074,-3.96293 2.22223,-5.44443 1.51851,-1.51851 3.35184,-2.27781 5.49999,-2.27781 l 32.16667,0 0,7.33329"
+ id="path3161" />
+ <path
+ style="font-size:51.94805145px;font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;text-align:start;line-height:125%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#ff8800;fill-opacity:1;stroke:none;font-family:Orbitron;-inkscape-font-specification:Orbitron Bold"
+ d="m 540.47435,550.62789 7.77778,0 -14.55555,40 -5.77778,0 -10.3889,-28.38894 -10.38888,28.38894 -5.72222,0 -14.55556,-40 7.77778,0 9.66667,26.38886 9.66666,-26.38886 7.16667,0 9.66667,26.38886 9.66666,-26.38886"
+ id="path3163"
+ inkscape:connector-curvature="0" />
+ <g
+ id="g3165"
+ style="font-size:32.25769424px;font-style:normal;font-weight:bold;line-height:125%;letter-spacing:0px;word-spacing:0px;fill:#ff8800;fill-opacity:1;stroke:none;font-family:orbitron;-inkscape-font-specification:orbitron"
+ transform="matrix(1.0694444,0,0,1.0694444,1320.9305,205.73013)">
+ <path
+ inkscape:connector-curvature="0"
+ id="path3167"
+ style="fill:#ff8800;fill-opacity:1"
+ d="m -712.66923,341.16755 c 1.22577,2e-5 2.26877,0.44087 3.12899,1.32257 0.88169,0.86022 1.32255,1.90322 1.32257,3.12899 l 0,7.03218 -14.77402,0 0,2.77416 c -1e-5,0.15054 0.0753,0.22581 0.2258,0.22581 l 14.54822,0 0,4.22575 -14.54822,0 c -1.2258,0 -2.27955,-0.4301 -3.16125,-1.2903 -0.86021,-0.88171 -1.29031,-1.93546 -1.29031,-3.16126 l 0,-9.80634 c 0,-1.22577 0.4301,-2.26877 1.29031,-3.12899 0.8817,-0.8817 1.93545,-1.32255 3.16125,-1.32257 l 10.09666,0 m -10.32246,7.22573 10.54826,0 0,-2.77417 c -10e-6,-0.15052 -0.0753,-0.22579 -0.2258,-0.2258 l -10.09666,0 c -0.15054,1e-5 -0.22581,0.0753 -0.2258,0.2258 l 0,2.77417" />
+ <path
+ inkscape:connector-curvature="0"
+ id="path3169"
+ style="fill:#ff8800;fill-opacity:1"
+ d="m -690.26928,341.16755 c 1.22577,2e-5 2.26877,0.44087 3.12899,1.32257 0.88169,0.86022 1.32255,1.90322 1.32257,3.12899 l 0,9.80634 c -2e-5,1.2258 -0.44088,2.27955 -1.32257,3.16126 -0.86022,0.8602 -1.90322,1.2903 -3.12899,1.2903 l -14.54822,0 0,-24.83842 4.22576,0 0,6.12896 10.32246,0 m 0.2258,14.2579 0,-9.80634 c -10e-6,-0.15052 -0.0753,-0.22579 -0.2258,-0.2258 l -10.09666,0 c -0.15054,1e-5 -0.22581,0.0753 -0.2258,0.2258 l 0,9.80634 c -1e-5,0.15054 0.0753,0.22581 0.2258,0.22581 l 10.09666,0 c 0.15052,0 0.22579,-0.0753 0.2258,-0.22581" />
+ </g>
+ </g>
+</svg>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/web/data/logo-cubicweb.svg Fri Jun 27 11:48:26 2014 +0200
@@ -0,0 +1,157 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!-- Created with Inkscape (http://www.inkscape.org/) -->
+
+<svg
+ xmlns:dc="http://purl.org/dc/elements/1.1/"
+ xmlns:cc="http://creativecommons.org/ns#"
+ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:svg="http://www.w3.org/2000/svg"
+ xmlns="http://www.w3.org/2000/svg"
+ xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+ xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+ width="260.68744"
+ height="40.004143"
+ id="svg4127"
+ version="1.1"
+ inkscape:version="0.48.3.1 r9886"
+ sodipodi:docname="logo-cubicweb.svg">
+ <defs
+ id="defs4129" />
+ <sodipodi:namedview
+ id="base"
+ pagecolor="#ffffff"
+ bordercolor="#666666"
+ borderopacity="1.0"
+ inkscape:pageopacity="0.0"
+ inkscape:pageshadow="2"
+ inkscape:zoom="5.6"
+ inkscape:cx="65.025864"
+ inkscape:cy="3.1272067"
+ inkscape:document-units="px"
+ inkscape:current-layer="layer1"
+ showgrid="false"
+ fit-margin-top="0"
+ fit-margin-left="0"
+ fit-margin-right="0"
+ fit-margin-bottom="0"
+ inkscape:window-width="1916"
+ inkscape:window-height="1161"
+ inkscape:window-x="1366"
+ inkscape:window-y="18"
+ inkscape:window-maximized="0"
+ showguides="true"
+ inkscape:guide-bbox="true">
+ <sodipodi:guide
+ orientation="1,0"
+ position="-144.19927,66.164991"
+ id="guide3458" />
+ </sodipodi:namedview>
+ <metadata
+ id="metadata4132">
+ <rdf:RDF>
+ <cc:Work
+ rdf:about="">
+ <dc:format>image/svg+xml</dc:format>
+ <dc:type
+ rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
+ <dc:title />
+ </cc:Work>
+ </rdf:RDF>
+ </metadata>
+ <g
+ inkscape:label="Calque 1"
+ inkscape:groupmode="layer"
+ id="layer1"
+ transform="translate(-326.79915,-550.62789)">
+ <g
+ transform="matrix(1.0580599,0,0,1.0580599,1311.2897,209.92084)"
+ id="g3151"
+ style="font-size:32.60407639px;font-style:normal;font-weight:bold;line-height:125%;letter-spacing:0px;word-spacing:0px;fill:#2b0000;fill-opacity:1;stroke:none;font-family:orbitron;-inkscape-font-specification:orbitron">
+ <path
+ inkscape:connector-curvature="0"
+ id="path3153"
+ style="fill:#404042;fill-opacity:1"
+ d="m -792.00021,355.53459 14.70444,0 0,4.27114 -14.70444,0 c -1.23896,0 -2.30402,-0.43473 -3.1952,-1.30417 -0.86944,-0.89117 -1.30416,-1.95624 -1.30416,-3.1952 l 0,-9.91164 c 0,-1.23894 0.43472,-2.29313 1.30416,-3.16259 0.89118,-0.89116 1.95624,-1.33675 3.1952,-1.33677 l 14.63923,0 0,4.27113 -14.63923,0 c -0.15215,2e-5 -0.22823,0.0761 -0.22822,0.22823 l 0,9.91164 c -10e-6,0.15216 0.0761,0.22824 0.22822,0.22823" />
+ <path
+ inkscape:connector-curvature="0"
+ style="fill:#404042;fill-opacity:1"
+ id="path3155"
+ d="m -833.03908,340.89536 4.27114,0 0,14.411 c -2e-5,1.23896 -0.44561,2.30403 -1.33677,3.1952 -0.86946,0.86944 -1.92366,1.30417 -3.1626,1.30417 l -10.20507,0 c -1.2607,0 -2.32576,-0.43473 -3.1952,-1.30417 -0.86945,-0.89117 -1.30417,-1.95624 -1.30416,-3.1952 l 0,-14.411 4.27113,0 0,14.411 c -10e-6,0.15216 0.0761,0.22824 0.22823,0.22823 l 10.20507,0 c 0.15214,1e-5 0.22822,-0.0761 0.22823,-0.22823 l 0,-14.411" />
+ <path
+ inkscape:connector-curvature="0"
+ style="fill:#404042;fill-opacity:1"
+ id="path3157"
+ d="m -811.05372,340.89536 c 1.23894,2e-5 2.29313,0.44561 3.16259,1.33677 0.89116,0.86946 1.33675,1.92365 1.33677,3.16259 l 0,9.91164 c -2e-5,1.23896 -0.44561,2.30403 -1.33677,3.1952 -0.86946,0.86944 -1.92365,1.30417 -3.16259,1.30417 l -14.70444,0 0,-25.10514 4.27113,0 0,6.19477 10.43331,0 m 0.22823,14.411 0,-9.91164 c -2e-5,-0.15213 -0.0761,-0.22821 -0.22823,-0.22823 l -10.20508,0 c -0.15216,2e-5 -0.22823,0.0761 -0.22823,0.22823 l 0,9.91164 c 0,0.15216 0.0761,0.22824 0.22823,0.22823 l 10.20508,0 c 0.15213,1e-5 0.22821,-0.0761 0.22823,-0.22823" />
+ <path
+ inkscape:connector-curvature="0"
+ id="path3159"
+ style="fill:#404042;fill-opacity:1"
+ d="m -804.04487,359.80573 0,-18.91037 4.27114,0 0,18.91037 -4.27114,0 m 0,-25.10514 4.27114,0 0,4.30373 -4.27114,0 0,-4.30373" />
+ </g>
+ <path
+ style="font-size:51.94805145px;font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;text-align:start;line-height:125%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#404042;fill-opacity:1;stroke:none;font-family:Orbitron;-inkscape-font-specification:Orbitron Bold"
+ inkscape:connector-curvature="0"
+ d="m 410.54179,557.96532 -32.16667,0 c -0.25926,10e-5 -0.38889,0.12972 -0.38888,0.38895 l 0,24.55552 c -1e-5,0.25923 0.12962,0.38885 0.38888,0.38885 l 32.16667,0 0,7.33339 -32.16667,0 c -2.14815,0 -3.98148,-0.7408 -5.49999,-2.2222 -1.48149,-1.51861 -2.22223,-3.35185 -2.22223,-5.50004 l 0,-24.55552 c 0,-2.14819 0.74074,-3.96293 2.22223,-5.44443 1.51851,-1.51851 3.35184,-2.27781 5.49999,-2.27781 l 32.16667,0 0,7.33329"
+ id="path3161" />
+ <path
+ style="font-size:51.94805145px;font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;text-align:start;line-height:125%;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#ff8800;fill-opacity:1;stroke:none;font-family:Orbitron;-inkscape-font-specification:Orbitron Bold"
+ d="m 540.47435,550.62789 7.77778,0 -14.55555,40 -5.77778,0 -10.3889,-28.38894 -10.38888,28.38894 -5.72222,0 -14.55556,-40 7.77778,0 9.66667,26.38886 9.66666,-26.38886 7.16667,0 9.66667,26.38886 9.66666,-26.38886"
+ id="path3163"
+ inkscape:connector-curvature="0" />
+ <g
+ id="g3165"
+ style="font-size:32.25769424px;font-style:normal;font-weight:bold;line-height:125%;letter-spacing:0px;word-spacing:0px;fill:#ff8800;fill-opacity:1;stroke:none;font-family:orbitron;-inkscape-font-specification:orbitron"
+ transform="matrix(1.0694444,0,0,1.0694444,1320.9305,205.73013)">
+ <path
+ inkscape:connector-curvature="0"
+ id="path3167"
+ style="fill:#ff8800;fill-opacity:1"
+ d="m -712.66923,341.16755 c 1.22577,2e-5 2.26877,0.44087 3.12899,1.32257 0.88169,0.86022 1.32255,1.90322 1.32257,3.12899 l 0,7.03218 -14.77402,0 0,2.77416 c -1e-5,0.15054 0.0753,0.22581 0.2258,0.22581 l 14.54822,0 0,4.22575 -14.54822,0 c -1.2258,0 -2.27955,-0.4301 -3.16125,-1.2903 -0.86021,-0.88171 -1.29031,-1.93546 -1.29031,-3.16126 l 0,-9.80634 c 0,-1.22577 0.4301,-2.26877 1.29031,-3.12899 0.8817,-0.8817 1.93545,-1.32255 3.16125,-1.32257 l 10.09666,0 m -10.32246,7.22573 10.54826,0 0,-2.77417 c -10e-6,-0.15052 -0.0753,-0.22579 -0.2258,-0.2258 l -10.09666,0 c -0.15054,1e-5 -0.22581,0.0753 -0.2258,0.2258 l 0,2.77417" />
+ <path
+ inkscape:connector-curvature="0"
+ id="path3169"
+ style="fill:#ff8800;fill-opacity:1"
+ d="m -690.26928,341.16755 c 1.22577,2e-5 2.26877,0.44087 3.12899,1.32257 0.88169,0.86022 1.32255,1.90322 1.32257,3.12899 l 0,9.80634 c -2e-5,1.2258 -0.44088,2.27955 -1.32257,3.16126 -0.86022,0.8602 -1.90322,1.2903 -3.12899,1.2903 l -14.54822,0 0,-24.83842 4.22576,0 0,6.12896 10.32246,0 m 0.2258,14.2579 0,-9.80634 c -10e-6,-0.15052 -0.0753,-0.22579 -0.2258,-0.2258 l -10.09666,0 c -0.15054,1e-5 -0.22581,0.0753 -0.2258,0.2258 l 0,9.80634 c -1e-5,0.15054 0.0753,0.22581 0.2258,0.22581 l 10.09666,0 c 0.15052,0 0.22579,-0.0753 0.2258,-0.22581" />
+ </g>
+ <g
+ transform="translate(155.71498,208.28104)"
+ id="g3408">
+ <path
+ sodipodi:nodetypes="ccccc"
+ inkscape:connector-curvature="0"
+ id="path3410"
+ d="m 196.89624,349.49384 -8.06851,4.8684 0,10.62195 8.06851,-4.8684 z"
+ style="fill:#ff8800;fill-opacity:1;stroke:none" />
+ <path
+ sodipodi:nodetypes="ccccc"
+ inkscape:connector-curvature="0"
+ style="fill:#ff8800;fill-opacity:1;stroke:none"
+ d="m 188.28608,343.70506 8.00049,4.84516 -8.00049,4.78822 -8.00051,-4.78822 z"
+ id="path3412" />
+ <path
+ style="fill:#ff8800;fill-opacity:1;stroke:none"
+ d="m 187.69852,366.86825 -8.06851,4.8684 0,10.62195 8.06851,-4.8684 z"
+ id="path3414"
+ inkscape:connector-curvature="0"
+ sodipodi:nodetypes="ccccc" />
+ <path
+ id="path3416"
+ d="m 179.08836,361.07947 8.00049,4.84516 -8.00049,4.78822 -8.00051,-4.78822 z"
+ style="fill:#ff8800;fill-opacity:1;stroke:none"
+ inkscape:connector-curvature="0"
+ sodipodi:nodetypes="ccccc" />
+ <path
+ sodipodi:nodetypes="ccccc"
+ inkscape:connector-curvature="0"
+ id="path3418"
+ d="m 206.05702,366.86825 -8.06851,4.8684 0,10.62195 8.06851,-4.8684 z"
+ style="fill:#ff8800;fill-opacity:1;stroke:none" />
+ <path
+ sodipodi:nodetypes="cccccc"
+ inkscape:connector-curvature="0"
+ id="path3420"
+ d="m 197.41294,361.10753 -8.00027,4.84524 8.00686,4.78602 c 0.0406,-0.0455 6.64235,-2.44836 7.44329,-10.21298 -2.81627,1.46 -3.71023,2.32033 -7.44988,0.58175 z"
+ style="fill:#404042;fill-opacity:1;stroke:none" />
+ </g>
+ </g>
+</svg>
--- a/web/formfields.py Fri May 23 18:35:13 2014 +0200
+++ b/web/formfields.py Fri Jun 27 11:48:26 2014 +0200
@@ -382,7 +382,7 @@
called by widgets which requires a vocabulary.
It should return a list of tuple (label, value), where value
- *must be an unicode string*, not a typed value.
+ *must be a unicode string*, not a typed value.
"""
assert self.choices is not None
if callable(self.choices):
--- a/web/formwidgets.py Fri May 23 18:35:13 2014 +0200
+++ b/web/formwidgets.py Fri Jun 27 11:48:26 2014 +0200
@@ -313,7 +313,7 @@
# basic html widgets ###########################################################
class TextInput(Input):
- """Simple <input type='text'>, will return an unicode string."""
+ """Simple <input type='text'>, will return a unicode string."""
type = 'text'
@@ -323,7 +323,7 @@
class PasswordSingleInput(Input):
- """Simple <input type='password'>, will return an utf-8 encoded string.
+ """Simple <input type='password'>, will return a utf-8 encoded string.
You may prefer using the :class:`~cubicweb.web.formwidgets.PasswordInput`
widget which handles password confirmation.
@@ -340,7 +340,7 @@
class PasswordInput(Input):
"""<input type='password'> and a confirmation input. Form processing will
fail if password and confirmation differs, else it will return the password
- as an utf-8 encoded string.
+ as a utf-8 encoded string.
"""
type = 'password'
@@ -381,7 +381,7 @@
class HiddenInput(Input):
- """Simple <input type='hidden'> for hidden value, will return an unicode
+ """Simple <input type='hidden'> for hidden value, will return a unicode
string.
"""
type = 'hidden'
@@ -390,7 +390,7 @@
class ButtonInput(Input):
- """Simple <input type='button'>, will return an unicode string.
+ """Simple <input type='button'>, will return a unicode string.
If you want a global form button, look at the :class:`Button`,
:class:`SubmitButton`, :class:`ResetButton` and :class:`ImgButton` below.
@@ -399,7 +399,7 @@
class TextArea(FieldWidget):
- """Simple <textarea>, will return an unicode string."""
+ """Simple <textarea>, will return a unicode string."""
def _render(self, form, field, renderer):
values, attrs = self.values_and_attributes(form, field)
@@ -421,7 +421,7 @@
class FCKEditor(TextArea):
- """FCKEditor enabled <textarea>, will return an unicode string containing
+ """FCKEditor enabled <textarea>, will return a unicode string containing
HTML formated text.
"""
def __init__(self, *args, **kwargs):
@@ -435,7 +435,7 @@
class Select(FieldWidget):
"""Simple <select>, for field having a specific vocabulary. Will return
- an unicode string, or a list of unicode strings.
+ a unicode string, or a list of unicode strings.
"""
vocabulary_widget = True
default_size = 10
@@ -631,7 +631,7 @@
class DateTimePicker(TextInput):
"""<input type='text'> + javascript date/time picker for date or datetime
- fields. Will return the date or datetime as an unicode string.
+ fields. Will return the date or datetime as a unicode string.
"""
monthnames = ('january', 'february', 'march', 'april',
'may', 'june', 'july', 'august',
@@ -673,7 +673,7 @@
class JQueryDatePicker(FieldWidget):
"""Use jquery.ui.datepicker to define a date picker. Will return the date as
- an unicode string.
+ a unicode string.
"""
needs_js = ('jquery.ui.js', )
needs_css = ('jquery.ui.css',)
@@ -933,7 +933,7 @@
class EditableURLWidget(FieldWidget):
- """Custom widget to edit separatly an url path / query string (used by
+ """Custom widget to edit separatly a URL path / query string (used by
default for the `path` attribute of `Bookmark` entities).
It deals with url quoting nicely so that the user edit the unquoted value.
--- a/web/htmlwidgets.py Fri May 23 18:35:13 2014 +0200
+++ b/web/htmlwidgets.py Fri Jun 27 11:48:26 2014 +0200
@@ -153,8 +153,6 @@
else:
return u'<li class="%s">' % self.liclass
- return self.label
-
def _render(self):
self.w(u'%s%s</li>' % (self._start_li(), self.label))
--- a/web/request.py Fri May 23 18:35:13 2014 +0200
+++ b/web/request.py Fri Jun 27 11:48:26 2014 +0200
@@ -1027,6 +1027,10 @@
self.session = DBAPISession(None)
self.cnx = self.user = _NeedAuthAccessMock()
+ @property
+ def transaction_data(self):
+ return self.cnx.transaction_data
+
def set_cnx(self, cnx):
self.cnx = cnx
self.session = cnx._session
--- a/web/schemaviewer.py Fri May 23 18:35:13 2014 +0200
+++ b/web/schemaviewer.py Fri Jun 27 11:48:26 2014 +0200
@@ -32,7 +32,7 @@
class SchemaViewer(object):
- """return an ureport layout for some part of a schema"""
+ """return a ureport layout for some part of a schema"""
def __init__(self, req=None, encoding=None):
self.req = req
if req is not None:
--- a/web/test/unittest_views_actions.py Fri May 23 18:35:13 2014 +0200
+++ b/web/test/unittest_views_actions.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -23,21 +23,19 @@
class ActionsTC(CubicWebTC):
def test_view_action(self):
- req = self.request(vid='rss', rql='CWUser X')
- rset = self.execute('CWUser X')
- actions = self.vreg['actions'].poss_visible_objects(req, rset=rset)
- vaction = [action for action in actions if action.__regid__ == 'view'][0]
- self.assertEqual(vaction.url(), 'http://testing.fr/cubicweb/view?rql=CWUser%20X')
+ with self.admin_access.web_request(vid='rss', rql='CWUser X') as req:
+ rset = req.execute('CWUser X')
+ actions = self.vreg['actions'].poss_visible_objects(req, rset=rset)
+ vaction = [action for action in actions if action.__regid__ == 'view'][0]
+ self.assertEqual(vaction.url(), 'http://testing.fr/cubicweb/view?rql=CWUser%20X')
def test_has_editable_relations(self):
"""ensure has_editable_relation predicate used by ModifyAction
return positive score if there is only some inlined forms
"""
use_email = self.schema['use_email'].rdefs['CWUser', 'EmailAddress']
- with self.temporary_permissions((use_email, {'add': ('guests',)}),
- ):
- with self.login('anon'):
- req = self.request()
+ with self.temporary_permissions((use_email, {'add': ('guests',)})):
+ with self.new_access('anon').web_request() as req:
predicate = actions.has_editable_relation()
self.assertEqual(predicate(None, req, rset=req.user.as_rset()),
1)
--- a/web/test/unittest_views_errorform.py Fri May 23 18:35:13 2014 +0200
+++ b/web/test/unittest_views_errorform.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -15,25 +15,20 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
+import re
+import sys
from logilab.common.testlib import unittest_main
-from logilab.mtconverter import html_unescape
-from cubicweb import Forbidden, ValidationError
+from cubicweb import Forbidden
from cubicweb.devtools.testlib import CubicWebTC
-from cubicweb.utils import json
-from cubicweb.view import StartupView, TRANSITIONAL_DOCTYPE_NOEXT
+from cubicweb.view import StartupView
from cubicweb.web import Redirect
-from cubicweb.web.htmlwidgets import TableWidget
-from cubicweb.web.views import vid_from_rset
-import re
-import hmac
class ErrorViewTC(CubicWebTC):
def setUp(self):
super(ErrorViewTC, self).setUp()
- self.req = self.request()
self.vreg.config['submit-mail'] = "test@logilab.fr"
self.vreg.config['print-traceback'] = "yes"
@@ -48,15 +43,16 @@
raise ValueError('This is wrong')
with self.temporary_appobjects(MyWrongView):
- try:
- self.view('my-view')
- except Exception as e:
- import sys
- self.req.data['excinfo'] = sys.exc_info()
- self.req.data['ex'] = e
- html = self.view('error', req=self.req)
- self.failUnless(re.search(r'^<input name="__signature" type="hidden" value="[0-9a-f]{32}" />$',
- html.source, re.M))
+ with self.admin_access.web_request() as req:
+ try:
+ self.view('my-view', req=req)
+ except Exception as e:
+ req.data['excinfo'] = sys.exc_info()
+ req.data['ex'] = e
+ html = self.view('error', req=req)
+ self.failUnless(re.search(r'^<input name="__signature" type="hidden" '
+ 'value="[0-9a-f]{32}" />$',
+ html.source, re.M))
def test_error_submit_nosig(self):
@@ -64,36 +60,33 @@
tests that the reportbug controller refuses submission if
there is not content signature
"""
-
- self.req.form = {'description': u'toto',
- }
- with self.assertRaises(Forbidden) as cm:
- self.ctrl_publish(self.req, 'reportbug')
+ with self.admin_access.web_request() as req:
+ req.form = {'description': u'toto'}
+ with self.assertRaises(Forbidden) as cm:
+ self.ctrl_publish(req, 'reportbug')
def test_error_submit_wrongsig(self):
"""
tests that the reportbug controller refuses submission if the
content signature is invalid
"""
-
- self.req.form = {'__signature': 'X',
- 'description': u'toto',
- }
- with self.assertRaises(Forbidden) as cm:
- self.ctrl_publish(self.req, 'reportbug')
+ with self.admin_access.web_request() as req:
+ req.form = {'__signature': 'X',
+ 'description': u'toto'}
+ with self.assertRaises(Forbidden) as cm:
+ self.ctrl_publish(req, 'reportbug')
def test_error_submit_ok(self):
"""
tests that the reportbug controller accept the email submission if the
content signature is valid
"""
-
- sign = self.vreg.config.sign_text('toto')
- self.req.form = {'__signature': sign,
- 'description': u'toto',
- }
- with self.assertRaises(Redirect) as cm:
- self.ctrl_publish(self.req, 'reportbug')
+ with self.admin_access.web_request() as req:
+ sign = self.vreg.config.sign_text('toto')
+ req.form = {'__signature': sign,
+ 'description': u'toto'}
+ with self.assertRaises(Redirect) as cm:
+ self.ctrl_publish(req, 'reportbug')
if __name__ == '__main__':
unittest_main()
--- a/web/test/unittest_views_json.py Fri May 23 18:35:13 2014 +0200
+++ b/web/test/unittest_views_json.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# copyright 2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -18,9 +18,6 @@
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
from cubicweb.devtools.testlib import CubicWebTC
-from cubicweb.utils import json
-
-from cubicweb.web.application import anonymized_request
class JsonViewsTC(CubicWebTC):
anonymize = True
@@ -31,47 +28,47 @@
self.config.global_set_option('anonymize-jsonp-queries', self.anonymize)
def test_json_rsetexport(self):
- req = self.request()
- rset = req.execute('Any GN,COUNT(X) GROUPBY GN ORDERBY GN WHERE X in_group G, G name GN')
- data = self.view('jsonexport', rset)
- self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['application/json'])
- self.assertListEqual(data, [["guests", 1], ["managers", 1]])
+ with self.admin_access.web_request() as req:
+ rset = req.execute('Any GN,COUNT(X) GROUPBY GN ORDERBY GN WHERE X in_group G, G name GN')
+ data = self.view('jsonexport', rset)
+ self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['application/json'])
+ self.assertListEqual(data, [["guests", 1], ["managers", 1]])
def test_json_rsetexport_empty_rset(self):
- req = self.request()
- rset = req.execute('Any X WHERE X is CWUser, X login "foobarbaz"')
- data = self.view('jsonexport', rset)
- self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['application/json'])
- self.assertListEqual(data, [])
+ with self.admin_access.web_request() as req:
+ rset = req.execute('Any X WHERE X is CWUser, X login "foobarbaz"')
+ data = self.view('jsonexport', rset)
+ self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['application/json'])
+ self.assertListEqual(data, [])
def test_json_rsetexport_with_jsonp(self):
- req = self.request()
- req.form.update({'callback': 'foo',
- 'rql': 'Any GN,COUNT(X) GROUPBY GN ORDERBY GN WHERE X in_group G, G name GN',
- })
- data = self.ctrl_publish(req, ctrl='jsonp')
- self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['application/javascript'])
- # because jsonp anonymizes data, only 'guests' group should be found
- self.assertEqual(data, 'foo(%s)' % self.res_jsonp_data)
+ with self.admin_access.web_request() as req:
+ req.form.update({'callback': 'foo',
+ 'rql': 'Any GN,COUNT(X) GROUPBY GN ORDERBY GN '
+ 'WHERE X in_group G, G name GN'})
+ data = self.ctrl_publish(req, ctrl='jsonp')
+ self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['application/javascript'])
+ # because jsonp anonymizes data, only 'guests' group should be found
+ self.assertEqual(data, 'foo(%s)' % self.res_jsonp_data)
def test_json_rsetexport_with_jsonp_and_bad_vid(self):
- req = self.request()
- req.form.update({'callback': 'foo',
- 'vid': 'table', # <-- this parameter should be ignored by jsonp controller
- 'rql': 'Any GN,COUNT(X) GROUPBY GN ORDERBY GN WHERE X in_group G, G name GN',
- })
- data = self.ctrl_publish(req, ctrl='jsonp')
- self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['application/javascript'])
- # result should be plain json, not the table view
- self.assertEqual(data, 'foo(%s)' % self.res_jsonp_data)
+ with self.admin_access.web_request() as req:
+ req.form.update({'callback': 'foo',
+ 'vid': 'table', # <-- this parameter should be ignored by jsonp controller
+ 'rql': 'Any GN,COUNT(X) GROUPBY GN ORDERBY GN '
+ 'WHERE X in_group G, G name GN'})
+ data = self.ctrl_publish(req, ctrl='jsonp')
+ self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['application/javascript'])
+ # result should be plain json, not the table view
+ self.assertEqual(data, 'foo(%s)' % self.res_jsonp_data)
def test_json_ersetexport(self):
- req = self.request()
- rset = req.execute('Any G ORDERBY GN WHERE G is CWGroup, G name GN')
- data = self.view('ejsonexport', rset)
- self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['application/json'])
- self.assertEqual(data[0]['name'], 'guests')
- self.assertEqual(data[1]['name'], 'managers')
+ with self.admin_access.web_request() as req:
+ rset = req.execute('Any G ORDERBY GN WHERE G is CWGroup, G name GN')
+ data = self.view('ejsonexport', rset)
+ self.assertEqual(req.headers_out.getRawHeaders('content-type'), ['application/json'])
+ self.assertEqual(data[0]['name'], 'guests')
+ self.assertEqual(data[1]['name'], 'managers')
class NotAnonymousJsonViewsTC(JsonViewsTC):
--- a/web/test/unittest_views_pyviews.py Fri May 23 18:35:13 2014 +0200
+++ b/web/test/unittest_views_pyviews.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -21,20 +21,22 @@
class PyViewsTC(CubicWebTC):
def test_pyvaltable(self):
- view = self.vreg['views'].select('pyvaltable', self.request(),
- pyvalue=[[1, 'a'], [2, 'b']])
- content = view.render(pyvalue=[[1, 'a'], [2, 'b']],
- headers=['num', 'char'])
- self.assertEqual(content.strip(), '''<table class="listing"><tbody>\
+ with self.admin_access.web_request() as req:
+ view = self.vreg['views'].select('pyvaltable', req,
+ pyvalue=[[1, 'a'], [2, 'b']])
+ content = view.render(pyvalue=[[1, 'a'], [2, 'b']],
+ headers=['num', 'char'])
+ self.assertEqual(content.strip(), '''<table class="listing"><tbody>\
<tr class="even" onmouseout="$(this).removeClass("highlighted")" onmouseover="$(this).addClass("highlighted");"><td >1</td><td >a</td></tr>
<tr class="odd" onmouseout="$(this).removeClass("highlighted")" onmouseover="$(this).addClass("highlighted");"><td >2</td><td >b</td></tr>
</tbody></table>''')
def test_pyvallist(self):
- view = self.vreg['views'].select('pyvallist', self.request(),
- pyvalue=[1, 'a'])
- content = view.render(pyvalue=[1, 'a'])
- self.assertEqual(content.strip(), '''<ul>
+ with self.admin_access.web_request() as req:
+ view = self.vreg['views'].select('pyvallist', req,
+ pyvalue=[1, 'a'])
+ content = view.render(pyvalue=[1, 'a'])
+ self.assertEqual(content.strip(), '''<ul>
<li>1</li>
<li>a</li>
</ul>''')
--- a/web/test/unittest_views_staticcontrollers.py Fri May 23 18:35:13 2014 +0200
+++ b/web/test/unittest_views_staticcontrollers.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,5 +1,25 @@
+# -*- coding: utf-8 -*-
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
+from contextlib import contextmanager
+
from logilab.common import tempattr
-from logilab.common.testlib import tag, Tags
+from logilab.common.testlib import Tags
from cubicweb.devtools.testlib import CubicWebTC
import os
@@ -9,50 +29,46 @@
from cubicweb.utils import HTMLHead
from cubicweb.web.views.staticcontrollers import ConcatFilesHandler
-class StaticControllerCacheTC(CubicWebTC):
+class staticfilespublishermixin(object):
+ @contextmanager
+ def _publish_static_files(self, url, header={}):
+ with self.admin_access.web_request(headers=header) as req:
+ req._url = url
+ self.app_handle_request(req, url)
+ yield req
+
+class StaticControllerCacheTC(staticfilespublishermixin, CubicWebTC):
tags = CubicWebTC.tags | Tags('static_controller', 'cache', 'http')
-
- def _publish_static_files(self, url, header={}):
- req = self.request(headers=header)
- req._url = url
- return self.app_handle_request(req, url), req
-
def test_static_file_are_cached(self):
- _, req = self._publish_static_files('data/cubicweb.css')
- self.assertEqual(200, req.status_out)
- self.assertIn('last-modified', req.headers_out)
+ with self._publish_static_files('data/cubicweb.css') as req:
+ self.assertEqual(200, req.status_out)
+ self.assertIn('last-modified', req.headers_out)
next_headers = {
'if-modified-since': req.get_response_header('last-modified', raw=True),
}
- _, req = self._publish_static_files('data/cubicweb.css', next_headers)
- self.assertEqual(304, req.status_out)
+ with self._publish_static_files('data/cubicweb.css', next_headers) as req:
+ self.assertEqual(304, req.status_out)
-class DataControllerTC(CubicWebTC):
-
+class DataControllerTC(staticfilespublishermixin, CubicWebTC):
tags = CubicWebTC.tags | Tags('static_controller', 'data', 'http')
- def _publish_static_files(self, url, header={}):
- req = self.request(headers=header)
- req._url = url
- return self.app_handle_request(req, url), req
-
def _check_datafile_ok(self, fname):
- _, req = self._publish_static_files(fname)
- self.assertEqual(200, req.status_out)
- self.assertIn('last-modified', req.headers_out)
+ with self._publish_static_files(fname) as req:
+ self.assertEqual(200, req.status_out)
+ self.assertIn('last-modified', req.headers_out)
next_headers = {
'if-modified-since': req.get_response_header('last-modified', raw=True),
}
- _, req = self._publish_static_files(fname, next_headers)
- self.assertEqual(304, req.status_out)
+ with self._publish_static_files(fname, next_headers) as req:
+ self.assertEqual(304, req.status_out)
def _check_no_datafile(self, fname):
- _, req = self._publish_static_files(fname)
- self.assertEqual(404, req.status_out)
+ with self._publish_static_files(fname) as req:
+ self.assertEqual(404, req.status_out)
def test_static_data_mode(self):
hash = self.vreg.config.instance_md5_version()
@@ -83,12 +99,15 @@
for fname in glob.glob(osp.join(uicachedir, 'cache_concat_*')):
os.unlink(osp.join(uicachedir, fname))
+ @contextmanager
def _publish_js_files(self, js_files):
- req = self.request()
- head = HTMLHead(req)
- url = head.concat_urls([req.data_url(js_file) for js_file in js_files])[len(req.base_url()):]
- req._url = url
- return self.app_handle_request(req, url), req
+ with self.admin_access.web_request() as req:
+ head = HTMLHead(req)
+ url = head.concat_urls([req.data_url(js_file)
+ for js_file in js_files])[len(req.base_url()):]
+ req._url = url
+ res = self.app_handle_request(req, url)
+ yield res, req
def expected_content(self, js_files):
content = u''
@@ -101,14 +120,14 @@
def test_cache(self):
js_files = ('cubicweb.ajax.js', 'jquery.js')
- result, req = self._publish_js_files(js_files)
- self.assertNotEqual(404, req.status_out)
- # check result content
- self.assertEqual(result, self.expected_content(js_files))
- # make sure we kept a cached version on filesystem
- concat_hander = ConcatFilesHandler(self.config)
- filepath = concat_hander.build_filepath(js_files)
- self.assertTrue(osp.isfile(filepath))
+ with self._publish_js_files(js_files) as (result, req):
+ self.assertNotEqual(404, req.status_out)
+ # check result content
+ self.assertEqual(result, self.expected_content(js_files))
+ # make sure we kept a cached version on filesystem
+ concat_hander = ConcatFilesHandler(self.config)
+ filepath = concat_hander.build_filepath(js_files)
+ self.assertTrue(osp.isfile(filepath))
def test_invalid_file_in_debug_mode(self):
@@ -116,18 +135,18 @@
# in debug mode, an error is raised
self.config.debugmode = True
try:
- result, req = self._publish_js_files(js_files)
- #print result
- self.assertEqual(404, req.status_out)
+ with self._publish_js_files(js_files) as (result, req):
+ #print result
+ self.assertEqual(404, req.status_out)
finally:
self.config.debugmode = False
def test_invalid_file_in_production_mode(self):
js_files = ('cubicweb.ajax.js', 'dummy.js')
- result, req = self._publish_js_files(js_files)
- self.assertNotEqual(404, req.status_out)
- # check result content
- self.assertEqual(result, self.expected_content(js_files))
+ with self._publish_js_files(js_files) as (result, req):
+ self.assertNotEqual(404, req.status_out)
+ # check result content
+ self.assertEqual(result, self.expected_content(js_files))
if __name__ == '__main__':
--- a/web/test/unittest_views_xmlrss.py Fri May 23 18:35:13 2014 +0200
+++ b/web/test/unittest_views_xmlrss.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,13 +1,15 @@
from cubicweb.devtools.testlib import CubicWebTC
from cubicweb.web.views.xmlrss import SERIALIZERS
+
class EntityXMLViewTC(CubicWebTC):
"""see also cw.sobjects.test.unittest_parsers"""
def test(self):
- req = self.request(relation=['tags-object', 'in_group-subject',
- 'in_state-subject', 'use_email-subject'])
- self.assertMultiLineEqual(
- req.user.view('xml'),
- '''\
+ rels = ['tags-object', 'in_group-subject',
+ 'in_state-subject', 'use_email-subject']
+ with self.admin_access.web_request(relation=rels) as req:
+ self.assertMultiLineEqual(
+ req.user.view('xml'),
+ '''\
<CWUser eid="6" cwuri="None6" cwsource="system">
<login>admin</login>
<upassword/>
--- a/web/test/unittest_viewselector.py Fri May 23 18:35:13 2014 +0200
+++ b/web/test/unittest_viewselector.py Fri Jun 27 11:48:26 2014 +0200
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -21,16 +21,17 @@
from logilab.common.testlib import unittest_main
from cubicweb.devtools.testlib import CubicWebTC
-from cubicweb import CW_SOFTWARE_ROOT as BASE, Binary, UnknownProperty
-from cubicweb.predicates import (match_user_groups, is_instance,
+from cubicweb import Binary, UnknownProperty
+from cubicweb.predicates import (is_instance,
specified_etype_implements, rql_condition)
from cubicweb.web import NoSelectableObject
from cubicweb.web.action import Action
-from cubicweb.web.views import (
- primary, baseviews, tableview, editforms, calendar, management, embedding,
- actions, startup, cwuser, schema, xbel, vcard, owl, treeview, idownloadable,
- wdoc, debug, cwuser, cwproperties, cwsources, workflow, xmlrss, rdf,
- csvexport, json, undohistory)
+
+from cubicweb.web.views import (primary, baseviews, tableview,
+ editforms, management, actions, startup, cwuser, schema, xbel,
+ vcard, owl, treeview, idownloadable, wdoc, debug, cwuser,
+ cwproperties, cwsources, xmlrss, rdf, csvexport, json,
+ undohistory)
from cubes.folder import views as folderviews
@@ -55,11 +56,12 @@
class ViewSelectorTC(CubicWebTC):
def setup_database(self):
- req = self.request()
- req.create_entity('BlogEntry', title=u"une news !", content=u"cubicweb c'est beau")
- req.create_entity('Bookmark', title=u"un signet !", path=u"view?vid=index")
- req.create_entity('EmailAddress', address=u"devel@logilab.fr", alias=u'devel')
- req.create_entity('Tag', name=u'x')
+ with self.admin_access.repo_cnx() as cnx:
+ cnx.create_entity('BlogEntry', title=u"une news !", content=u"cubicweb c'est beau")
+ cnx.create_entity('Bookmark', title=u"un signet !", path=u"view?vid=index")
+ cnx.create_entity('EmailAddress', address=u"devel@logilab.fr", alias=u'devel')
+ cnx.create_entity('Tag', name=u'x')
+ cnx.commit()
class VRegistryTC(ViewSelectorTC):
"""test the view selector"""
@@ -85,389 +87,377 @@
assert self.vreg['views']['propertiesform']
def test_possible_views_none_rset(self):
- req = self.request()
- self.assertListEqual(self.pviews(req, None),
- [('cw.sources-management', cwsources.CWSourcesManagementView),
- ('cw.users-and-groups-management', cwuser.UsersAndGroupsManagementView),
- ('gc', debug.GCView),
- ('index', startup.IndexView),
- ('info', debug.ProcessInformationView),
- ('manage', startup.ManageView),
- ('owl', owl.OWLView),
- ('propertiesform', cwproperties.CWPropertiesForm),
- ('registry', debug.RegistryView),
- ('schema', schema.SchemaView),
- ('siteinfo', debug.SiteInfoView),
- ('systempropertiesform', cwproperties.SystemCWPropertiesForm),
- ('tree', folderviews.FolderTreeView),
- ('undohistory', undohistory.UndoHistoryView),
- ])
+ with self.admin_access.web_request() as req:
+ self.assertListEqual(self.pviews(req, None),
+ [('cw.sources-management', cwsources.CWSourcesManagementView),
+ ('cw.users-and-groups-management', cwuser.UsersAndGroupsManagementView),
+ ('gc', debug.GCView),
+ ('index', startup.IndexView),
+ ('info', debug.ProcessInformationView),
+ ('manage', startup.ManageView),
+ ('owl', owl.OWLView),
+ ('propertiesform', cwproperties.CWPropertiesForm),
+ ('registry', debug.RegistryView),
+ ('schema', schema.SchemaView),
+ ('siteinfo', debug.SiteInfoView),
+ ('systempropertiesform', cwproperties.SystemCWPropertiesForm),
+ ('tree', folderviews.FolderTreeView),
+ ('undohistory', undohistory.UndoHistoryView)])
def test_possible_views_noresult(self):
- req = self.request()
- rset = req.execute('Any X WHERE X eid 999999')
- self.assertListEqual([('jsonexport', json.JsonRsetView)],
- self.pviews(req, rset))
+ with self.admin_access.web_request() as req:
+ rset = req.execute('Any X WHERE X eid 999999')
+ self.assertListEqual([('jsonexport', json.JsonRsetView)],
+ self.pviews(req, rset))
def test_possible_views_one_egroup(self):
- req = self.request()
- rset = req.execute('CWGroup X WHERE X name "managers"')
- self.assertListEqual(self.pviews(req, rset),
- [('csvexport', csvexport.CSVRsetView),
- ('ecsvexport', csvexport.CSVEntityView),
- ('ejsonexport', json.JsonEntityView),
- ('filetree', treeview.FileTreeView),
- ('jsonexport', json.JsonRsetView),
- ('list', baseviews.ListView),
- ('oneline', baseviews.OneLineView),
- ('owlabox', owl.OWLABOXView),
- ('primary', cwuser.CWGroupPrimaryView)] + RDFVIEWS + [
- ('rsetxml', xmlrss.XMLRsetView),
- ('rss', xmlrss.RSSView),
- ('sameetypelist', baseviews.SameETypeListView),
- ('security', management.SecurityManagementView),
- ('table', tableview.RsetTableView),
- ('text', baseviews.TextView),
- ('treeview', treeview.TreeView),
- ('xbel', xbel.XbelView),
- ('xml', xmlrss.XMLView),
- ])
+ with self.admin_access.web_request() as req:
+ rset = req.execute('CWGroup X WHERE X name "managers"')
+ self.assertListEqual(self.pviews(req, rset),
+ [('csvexport', csvexport.CSVRsetView),
+ ('ecsvexport', csvexport.CSVEntityView),
+ ('ejsonexport', json.JsonEntityView),
+ ('filetree', treeview.FileTreeView),
+ ('jsonexport', json.JsonRsetView),
+ ('list', baseviews.ListView),
+ ('oneline', baseviews.OneLineView),
+ ('owlabox', owl.OWLABOXView),
+ ('primary', cwuser.CWGroupPrimaryView)] + \
+ RDFVIEWS + \
+ [('rsetxml', xmlrss.XMLRsetView),
+ ('rss', xmlrss.RSSView),
+ ('sameetypelist', baseviews.SameETypeListView),
+ ('security', management.SecurityManagementView),
+ ('table', tableview.RsetTableView),
+ ('text', baseviews.TextView),
+ ('treeview', treeview.TreeView),
+ ('xbel', xbel.XbelView),
+ ('xml', xmlrss.XMLView)])
def test_possible_views_multiple_egroups(self):
- req = self.request()
- rset = req.execute('CWGroup X')
- self.assertListEqual(self.pviews(req, rset),
- [('csvexport', csvexport.CSVRsetView),
- ('ecsvexport', csvexport.CSVEntityView),
- ('ejsonexport', json.JsonEntityView),
- ('filetree', treeview.FileTreeView),
- ('jsonexport', json.JsonRsetView),
- ('list', baseviews.ListView),
- ('oneline', baseviews.OneLineView),
- ('owlabox', owl.OWLABOXView),
- ('primary', cwuser.CWGroupPrimaryView)] + RDFVIEWS + [
- ('rsetxml', xmlrss.XMLRsetView),
- ('rss', xmlrss.RSSView),
- ('sameetypelist', baseviews.SameETypeListView),
- ('security', management.SecurityManagementView),
- ('table', tableview.RsetTableView),
- ('text', baseviews.TextView),
- ('treeview', treeview.TreeView),
- ('xbel', xbel.XbelView),
- ('xml', xmlrss.XMLView),
- ])
+ with self.admin_access.web_request() as req:
+ rset = req.execute('CWGroup X')
+ self.assertListEqual(self.pviews(req, rset),
+ [('csvexport', csvexport.CSVRsetView),
+ ('ecsvexport', csvexport.CSVEntityView),
+ ('ejsonexport', json.JsonEntityView),
+ ('filetree', treeview.FileTreeView),
+ ('jsonexport', json.JsonRsetView),
+ ('list', baseviews.ListView),
+ ('oneline', baseviews.OneLineView),
+ ('owlabox', owl.OWLABOXView),
+ ('primary', cwuser.CWGroupPrimaryView)] + RDFVIEWS + [
+ ('rsetxml', xmlrss.XMLRsetView),
+ ('rss', xmlrss.RSSView),
+ ('sameetypelist', baseviews.SameETypeListView),
+ ('security', management.SecurityManagementView),
+ ('table', tableview.RsetTableView),
+ ('text', baseviews.TextView),
+ ('treeview', treeview.TreeView),
+ ('xbel', xbel.XbelView),
+ ('xml', xmlrss.XMLView),
+ ])
def test_propertiesform_admin(self):
assert self.vreg['views']['propertiesform']
- req1 = self.request()
- req2 = self.request()
- rset1 = req1.execute('CWUser X WHERE X login "admin"')
- rset2 = req2.execute('CWUser X WHERE X login "anon"')
- self.assertTrue(self.vreg['views'].select('propertiesform', req1, rset=None))
- self.assertTrue(self.vreg['views'].select('propertiesform', req1, rset=rset1))
- self.assertTrue(self.vreg['views'].select('propertiesform', req2, rset=rset2))
+ with self.admin_access.web_request() as req:
+ rset1 = req.execute('CWUser X WHERE X login "admin"')
+ self.assertTrue(self.vreg['views'].select('propertiesform', req, rset=None))
+ self.assertTrue(self.vreg['views'].select('propertiesform', req, rset=rset1))
+ rset2 = req.execute('CWUser X WHERE X login "anon"')
+ self.assertTrue(self.vreg['views'].select('propertiesform', req, rset=rset2))
def test_propertiesform_anon(self):
- self.login('anon')
- req1 = self.request()
- req2 = self.request()
- rset1 = req1.execute('CWUser X WHERE X login "admin"')
- rset2 = req2.execute('CWUser X WHERE X login "anon"')
- self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'propertiesform', req1, rset=None)
- self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'propertiesform', req1, rset=rset1)
- self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'propertiesform', req1, rset=rset2)
+ with self.new_access('anon').web_request() as req:
+ rset1 = req.execute('CWUser X WHERE X login "admin"')
+ self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'propertiesform', req, rset=None)
+ self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'propertiesform', req, rset=rset1)
+ rset2 = req.execute('CWUser X WHERE X login "anon"')
+ self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'propertiesform', req, rset=rset2)
def test_propertiesform_jdoe(self):
- self.create_user(self.request(), 'jdoe')
- self.login('jdoe')
- req1 = self.request()
- req2 = self.request()
- rset1 = req1.execute('CWUser X WHERE X login "admin"')
- rset2 = req2.execute('CWUser X WHERE X login "jdoe"')
- self.assertTrue(self.vreg['views'].select('propertiesform', req1, rset=None))
- self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'propertiesform', req1, rset=rset1)
- self.assertTrue(self.vreg['views'].select('propertiesform', req2, rset=rset2))
+ with self.admin_access.repo_cnx() as cnx:
+ self.create_user(cnx, 'jdoe')
+ cnx.commit()
+ with self.new_access('jdoe').web_request() as req:
+ rset1 = req.execute('CWUser X WHERE X login "admin"')
+ rset2 = req.execute('CWUser X WHERE X login "jdoe"')
+ self.assertTrue(self.vreg['views'].select('propertiesform', req, rset=None))
+ self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'propertiesform', req, rset=rset1)
+ self.assertTrue(self.vreg['views'].select('propertiesform', req, rset=rset2))
def test_possible_views_multiple_different_types(self):
- req = self.request()
- rset = req.execute('Any X')
- self.assertListEqual(self.pviews(req, rset),
- [('csvexport', csvexport.CSVRsetView),
- ('ecsvexport', csvexport.CSVEntityView),
- ('ejsonexport', json.JsonEntityView),
- ('filetree', treeview.FileTreeView),
- ('jsonexport', json.JsonRsetView),
- ('list', baseviews.ListView),
- ('oneline', baseviews.OneLineView),
- ('owlabox', owl.OWLABOXView),
- ('primary', primary.PrimaryView),] + RDFVIEWS + [
- ('rsetxml', xmlrss.XMLRsetView),
- ('rss', xmlrss.RSSView),
- ('security', management.SecurityManagementView),
- ('table', tableview.RsetTableView),
- ('text', baseviews.TextView),
- ('treeview', treeview.TreeView),
- ('xbel', xbel.XbelView),
- ('xml', xmlrss.XMLView),
- ])
+ with self.admin_access.web_request() as req:
+ rset = req.execute('Any X')
+ self.assertListEqual(self.pviews(req, rset),
+ [('csvexport', csvexport.CSVRsetView),
+ ('ecsvexport', csvexport.CSVEntityView),
+ ('ejsonexport', json.JsonEntityView),
+ ('filetree', treeview.FileTreeView),
+ ('jsonexport', json.JsonRsetView),
+ ('list', baseviews.ListView),
+ ('oneline', baseviews.OneLineView),
+ ('owlabox', owl.OWLABOXView),
+ ('primary', primary.PrimaryView),] + RDFVIEWS + [
+ ('rsetxml', xmlrss.XMLRsetView),
+ ('rss', xmlrss.RSSView),
+ ('security', management.SecurityManagementView),
+ ('table', tableview.RsetTableView),
+ ('text', baseviews.TextView),
+ ('treeview', treeview.TreeView),
+ ('xbel', xbel.XbelView),
+ ('xml', xmlrss.XMLView),
+ ])
def test_possible_views_any_rset(self):
- req = self.request()
- rset = req.execute('Any N, X WHERE X in_group Y, Y name N')
- self.assertListEqual(self.pviews(req, rset),
- [('csvexport', csvexport.CSVRsetView),
- ('jsonexport', json.JsonRsetView),
- ('rsetxml', xmlrss.XMLRsetView),
- ('table', tableview.RsetTableView),
- ])
+ with self.admin_access.web_request() as req:
+ rset = req.execute('Any N, X WHERE X in_group Y, Y name N')
+ self.assertListEqual(self.pviews(req, rset),
+ [('csvexport', csvexport.CSVRsetView),
+ ('jsonexport', json.JsonRsetView),
+ ('rsetxml', xmlrss.XMLRsetView),
+ ('table', tableview.RsetTableView),
+ ])
def test_possible_views_multiple_eusers(self):
- req = self.request()
- rset = req.execute('CWUser X')
- self.assertListEqual(self.pviews(req, rset),
- [('csvexport', csvexport.CSVRsetView),
- ('ecsvexport', csvexport.CSVEntityView),
- ('ejsonexport', json.JsonEntityView),
- ('filetree', treeview.FileTreeView),
- ('foaf', cwuser.FoafView),
- ('jsonexport', json.JsonRsetView),
- ('list', baseviews.ListView),
- ('oneline', baseviews.OneLineView),
- ('owlabox', owl.OWLABOXView),
- ('primary', primary.PrimaryView)] + RDFVIEWS + [
- ('rsetxml', xmlrss.XMLRsetView),
- ('rss', xmlrss.RSSView),
- ('sameetypelist', baseviews.SameETypeListView),
- ('security', management.SecurityManagementView),
- ('table', tableview.RsetTableView),
- ('text', baseviews.TextView),
- ('treeview', treeview.TreeView),
- ('vcard', vcard.VCardCWUserView),
- ('xbel', xbel.XbelView),
- ('xml', xmlrss.XMLView),
- ])
+ with self.admin_access.web_request() as req:
+ rset = req.execute('CWUser X')
+ self.assertListEqual(self.pviews(req, rset),
+ [('csvexport', csvexport.CSVRsetView),
+ ('ecsvexport', csvexport.CSVEntityView),
+ ('ejsonexport', json.JsonEntityView),
+ ('filetree', treeview.FileTreeView),
+ ('foaf', cwuser.FoafView),
+ ('jsonexport', json.JsonRsetView),
+ ('list', baseviews.ListView),
+ ('oneline', baseviews.OneLineView),
+ ('owlabox', owl.OWLABOXView),
+ ('primary', primary.PrimaryView)] + RDFVIEWS + [
+ ('rsetxml', xmlrss.XMLRsetView),
+ ('rss', xmlrss.RSSView),
+ ('sameetypelist', baseviews.SameETypeListView),
+ ('security', management.SecurityManagementView),
+ ('table', tableview.RsetTableView),
+ ('text', baseviews.TextView),
+ ('treeview', treeview.TreeView),
+ ('vcard', vcard.VCardCWUserView),
+ ('xbel', xbel.XbelView),
+ ('xml', xmlrss.XMLView),
+ ])
def test_possible_actions_none_rset(self):
- req = self.request()
- self.assertDictEqual(self.pactionsdict(req, None, skipcategories=()),
- {'useractions': USERACTIONS,
- 'siteactions': SITEACTIONS,
- 'manage': MANAGEACTIONS,
- 'footer': FOOTERACTIONS,
+ with self.admin_access.web_request() as req:
+ self.assertDictEqual(self.pactionsdict(req, None, skipcategories=()),
+ {'useractions': USERACTIONS,
+ 'siteactions': SITEACTIONS,
+ 'manage': MANAGEACTIONS,
+ 'footer': FOOTERACTIONS})
- })
def test_possible_actions_no_entity(self):
- req = self.request()
- rset = req.execute('Any X WHERE X eid 999999')
- self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()),
- {'useractions': USERACTIONS,
- 'siteactions': SITEACTIONS,
- 'manage': MANAGEACTIONS,
- 'footer': FOOTERACTIONS,
- })
+ with self.admin_access.web_request() as req:
+ rset = req.execute('Any X WHERE X eid 999999')
+ self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()),
+ {'useractions': USERACTIONS,
+ 'siteactions': SITEACTIONS,
+ 'manage': MANAGEACTIONS,
+ 'footer': FOOTERACTIONS,
+ })
def test_possible_actions_same_type_entities(self):
- req = self.request()
- rset = req.execute('CWGroup X')
- self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()),
- {'useractions': USERACTIONS,
- 'siteactions': SITEACTIONS,
- 'manage': MANAGEACTIONS,
- 'footer': FOOTERACTIONS,
- 'mainactions': [actions.MultipleEditAction],
- 'moreactions': [actions.DeleteAction,
- actions.AddNewAction],
- })
+ with self.admin_access.web_request() as req:
+ rset = req.execute('CWGroup X')
+ self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()),
+ {'useractions': USERACTIONS,
+ 'siteactions': SITEACTIONS,
+ 'manage': MANAGEACTIONS,
+ 'footer': FOOTERACTIONS,
+ 'mainactions': [actions.MultipleEditAction],
+ 'moreactions': [actions.DeleteAction,
+ actions.AddNewAction]})
def test_possible_actions_different_types_entities(self):
- req = self.request()
- rset = req.execute('Any X')
- self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()),
- {'useractions': USERACTIONS,
- 'siteactions': SITEACTIONS,
- 'manage': MANAGEACTIONS,
- 'footer': FOOTERACTIONS,
- 'moreactions': [actions.DeleteAction],
- })
+ with self.admin_access.web_request() as req:
+ rset = req.execute('Any X')
+ self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()),
+ {'useractions': USERACTIONS,
+ 'siteactions': SITEACTIONS,
+ 'manage': MANAGEACTIONS,
+ 'footer': FOOTERACTIONS,
+ 'moreactions': [actions.DeleteAction],
+ })
def test_possible_actions_final_entities(self):
- req = self.request()
- rset = req.execute('Any N, X WHERE X in_group Y, Y name N')
- self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()),
- {'useractions': USERACTIONS,
- 'siteactions': SITEACTIONS,
- 'manage': MANAGEACTIONS,
- 'footer': FOOTERACTIONS,
- })
+ with self.admin_access.web_request() as req:
+ rset = req.execute('Any N, X WHERE X in_group Y, Y name N')
+ self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()),
+ {'useractions': USERACTIONS,
+ 'siteactions': SITEACTIONS,
+ 'manage': MANAGEACTIONS,
+ 'footer': FOOTERACTIONS,
+ })
def test_possible_actions_eetype_cwuser_entity(self):
- req = self.request()
- rset = req.execute('CWEType X WHERE X name "CWUser"')
- self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()),
- {'useractions': USERACTIONS,
- 'siteactions': SITEACTIONS,
- 'manage': MANAGEACTIONS,
- 'footer': FOOTERACTIONS,
- 'mainactions': [actions.ModifyAction,
- actions.ViewSameCWEType],
- 'moreactions': [actions.ManagePermissionsAction,
- actions.AddRelatedActions,
- actions.DeleteAction,
- actions.CopyAction,
- ],
- })
+ with self.admin_access.web_request() as req:
+ rset = req.execute('CWEType X WHERE X name "CWUser"')
+ self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()),
+ {'useractions': USERACTIONS,
+ 'siteactions': SITEACTIONS,
+ 'manage': MANAGEACTIONS,
+ 'footer': FOOTERACTIONS,
+ 'mainactions': [actions.ModifyAction,
+ actions.ViewSameCWEType],
+ 'moreactions': [actions.ManagePermissionsAction,
+ actions.AddRelatedActions,
+ actions.DeleteAction,
+ actions.CopyAction,
+ ],
+ })
def test_select_creation_form(self):
rset = None
- req = self.request()
- # creation form
- req.form['etype'] = 'CWGroup'
- self.assertIsInstance(self.vreg['views'].select('creation', req, rset=rset),
- editforms.CreationFormView)
- del req.form['etype']
- # custom creation form
- class CWUserCreationForm(editforms.CreationFormView):
- __select__ = specified_etype_implements('CWUser')
- self.vreg._loadedmods[__name__] = {}
- self.vreg.register(CWUserCreationForm)
- req.form['etype'] = 'CWUser'
- self.assertIsInstance(self.vreg['views'].select('creation', req, rset=rset),
- CWUserCreationForm)
+ with self.admin_access.web_request() as req:
+ # creation form
+ req.form['etype'] = 'CWGroup'
+ self.assertIsInstance(self.vreg['views'].select('creation', req, rset=rset),
+ editforms.CreationFormView)
+
+ with self.admin_access.web_request() as req:
+ # custom creation form
+ class CWUserCreationForm(editforms.CreationFormView):
+ __select__ = specified_etype_implements('CWUser')
+
+ self.vreg._loadedmods[__name__] = {}
+ self.vreg.register(CWUserCreationForm)
+ req.form['etype'] = 'CWUser'
+
+ self.assertIsInstance(self.vreg['views'].select('creation', req, rset=rset),
+ CWUserCreationForm)
def test_select_view(self):
# no entity
rset = None
- req = self.request()
- self.assertIsInstance(self.vreg['views'].select('index', req, rset=rset),
- startup.IndexView)
- self.assertRaises(NoSelectableObject,
- self.vreg['views'].select, 'primary', req, rset=rset)
- self.assertRaises(NoSelectableObject,
- self.vreg['views'].select, 'table', req, rset=rset)
+ with self.admin_access.web_request() as req:
+ self.assertIsInstance(self.vreg['views'].select('index', req, rset=rset),
+ startup.IndexView)
+ self.assertRaises(NoSelectableObject,
+ self.vreg['views'].select, 'primary', req, rset=rset)
+ self.assertRaises(NoSelectableObject,
+ self.vreg['views'].select, 'table', req, rset=rset)
- # no entity
- req = self.request()
- rset = req.execute('Any X WHERE X eid 999999')
- self.assertRaises(NoSelectableObject,
+ # no entity
+ rset = req.execute('Any X WHERE X eid 999999')
+ self.assertRaises(NoSelectableObject,
self.vreg['views'].select, 'index', req, rset=rset)
- self.assertRaises(NoSelectableObject,
+ self.assertRaises(NoSelectableObject,
self.vreg['views'].select, 'creation', req, rset=rset)
- self.assertRaises(NoSelectableObject,
+ self.assertRaises(NoSelectableObject,
self.vreg['views'].select, 'primary', req, rset=rset)
- self.assertRaises(NoSelectableObject,
- self.vreg['views'].select, 'table', req, rset=rset)
- # one entity
- req = self.request()
- rset = req.execute('CWGroup X WHERE X name "managers"')
- self.assertIsInstance(self.vreg['views'].select('primary', req, rset=rset),
- primary.PrimaryView)
- self.assertIsInstance(self.vreg['views'].select('list', req, rset=rset),
- baseviews.ListView)
- self.assertIsInstance(self.vreg['views'].select('edition', req, rset=rset),
- editforms.EditionFormView)
- self.assertIsInstance(self.vreg['views'].select('table', req, rset=rset),
- tableview.RsetTableView)
- self.assertRaises(NoSelectableObject,
+ self.assertRaises(NoSelectableObject,
+ self.vreg['views'].select, 'table', req, rset=rset)
+ # one entity
+ rset = req.execute('CWGroup X WHERE X name "managers"')
+ self.assertIsInstance(self.vreg['views'].select('primary', req, rset=rset),
+ primary.PrimaryView)
+ self.assertIsInstance(self.vreg['views'].select('list', req, rset=rset),
+ baseviews.ListView)
+ self.assertIsInstance(self.vreg['views'].select('edition', req, rset=rset),
+ editforms.EditionFormView)
+ self.assertIsInstance(self.vreg['views'].select('table', req, rset=rset),
+ tableview.RsetTableView)
+ self.assertRaises(NoSelectableObject,
self.vreg['views'].select, 'creation', req, rset=rset)
- self.assertRaises(NoSelectableObject,
+ self.assertRaises(NoSelectableObject,
self.vreg['views'].select, 'index', req, rset=rset)
- # list of entities of the same type
- req = self.request()
- rset = req.execute('CWGroup X')
- self.assertIsInstance(self.vreg['views'].select('primary', req, rset=rset),
- primary.PrimaryView)
- self.assertIsInstance(self.vreg['views'].select('list', req, rset=rset),
- baseviews.ListView)
- self.assertIsInstance(self.vreg['views'].select('table', req, rset=rset),
- tableview.RsetTableView)
- self.assertRaises(NoSelectableObject,
+ # list of entities of the same type
+ rset = req.execute('CWGroup X')
+ self.assertIsInstance(self.vreg['views'].select('primary', req, rset=rset),
+ primary.PrimaryView)
+ self.assertIsInstance(self.vreg['views'].select('list', req, rset=rset),
+ baseviews.ListView)
+ self.assertIsInstance(self.vreg['views'].select('table', req, rset=rset),
+ tableview.RsetTableView)
+ self.assertRaises(NoSelectableObject,
self.vreg['views'].select, 'creation', req, rset=rset)
- # list of entities of different types
- req = self.request()
- rset = req.execute('Any X')
- self.assertIsInstance(self.vreg['views'].select('primary', req, rset=rset),
+ # list of entities of different types
+ rset = req.execute('Any X')
+ self.assertIsInstance(self.vreg['views'].select('primary', req, rset=rset),
primary.PrimaryView)
- self.assertIsInstance(self.vreg['views'].select('list', req, rset=rset),
+ self.assertIsInstance(self.vreg['views'].select('list', req, rset=rset),
baseviews.ListView)
- self.assertIsInstance(self.vreg['views'].select('table', req, rset=rset),
+ self.assertIsInstance(self.vreg['views'].select('table', req, rset=rset),
tableview.RsetTableView)
- self.assertRaises(NoSelectableObject,
- self.vreg['views'].select, 'creation', req, rset=rset)
- self.assertRaises(NoSelectableObject,
+ self.assertRaises(NoSelectableObject,
+ self.vreg['views'].select, 'creation', req, rset=rset)
+ self.assertRaises(NoSelectableObject,
self.vreg['views'].select, 'index', req, rset=rset)
- # whatever
- req = self.request()
- rset = req.execute('Any N, X WHERE X in_group Y, Y name N')
- self.assertIsInstance(self.vreg['views'].select('table', req, rset=rset),
+ # whatever
+ rset = req.execute('Any N, X WHERE X in_group Y, Y name N')
+ self.assertIsInstance(self.vreg['views'].select('table', req, rset=rset),
tableview.RsetTableView)
- self.assertRaises(NoSelectableObject,
+ self.assertRaises(NoSelectableObject,
self.vreg['views'].select, 'index', req, rset=rset)
- self.assertRaises(NoSelectableObject,
+ self.assertRaises(NoSelectableObject,
self.vreg['views'].select, 'creation', req, rset=rset)
- self.assertRaises(NoSelectableObject,
- self.vreg['views'].select, 'primary', req, rset=rset)
- self.assertRaises(NoSelectableObject,
- self.vreg['views'].select, 'list', req, rset=rset)
- self.assertRaises(NoSelectableObject,
- self.vreg['views'].select, 'edition', req, rset=rset)
- # mixed query
- req = self.request()
- rset = req.execute('Any U,G WHERE U is CWUser, G is CWGroup')
- self.assertRaises(NoSelectableObject,
+ self.assertRaises(NoSelectableObject,
+ self.vreg['views'].select, 'primary', req, rset=rset)
+ self.assertRaises(NoSelectableObject,
+ self.vreg['views'].select, 'list', req, rset=rset)
+ self.assertRaises(NoSelectableObject,
self.vreg['views'].select, 'edition', req, rset=rset)
- self.assertRaises(NoSelectableObject,
+ # mixed query
+ rset = req.execute('Any U,G WHERE U is CWUser, G is CWGroup')
+ self.assertRaises(NoSelectableObject,
+ self.vreg['views'].select, 'edition', req, rset=rset)
+ self.assertRaises(NoSelectableObject,
self.vreg['views'].select, 'creation', req, rset=rset)
- self.assertIsInstance(self.vreg['views'].select('table', req, rset=rset),
- tableview.RsetTableView)
+ self.assertIsInstance(self.vreg['views'].select('table', req, rset=rset),
+ tableview.RsetTableView)
def test_interface_selector(self):
- image = self.request().create_entity('File', data_name=u'bim.png', data=Binary('bim'))
- # image primary view priority
- req = self.request()
- rset = req.execute('File X WHERE X data_name "bim.png"')
- self.assertIsInstance(self.vreg['views'].select('primary', req, rset=rset),
- idownloadable.IDownloadablePrimaryView)
+ with self.admin_access.web_request() as req:
+ req.create_entity('File', data_name=u'bim.png', data=Binary('bim'))
+ # image primary view priority
+ rset = req.execute('File X WHERE X data_name "bim.png"')
+ self.assertIsInstance(self.vreg['views'].select('primary', req, rset=rset),
+ idownloadable.IDownloadablePrimaryView)
def test_score_entity_selector(self):
- image = self.request().create_entity('File', data_name=u'bim.png', data=Binary('bim'))
- # image/ehtml primary view priority
- req = self.request()
- rset = req.execute('File X WHERE X data_name "bim.png"')
- self.assertIsInstance(self.vreg['views'].select('image', req, rset=rset),
- idownloadable.ImageView)
- self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'ehtml', req, rset=rset)
+ with self.admin_access.web_request() as req:
+ req.create_entity('File', data_name=u'bim.png', data=Binary('bim'))
+ # image/ehtml primary view priority
+ rset = req.execute('File X WHERE X data_name "bim.png"')
+ self.assertIsInstance(self.vreg['views'].select('image', req, rset=rset),
+ idownloadable.ImageView)
+ self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'ehtml', req, rset=rset)
- fileobj = self.request().create_entity('File', data_name=u'bim.html', data=Binary('<html>bam</html'))
- # image/ehtml primary view priority
- req = self.request()
- rset = req.execute('File X WHERE X data_name "bim.html"')
- self.assertIsInstance(self.vreg['views'].select('ehtml', req, rset=rset),
- idownloadable.EHTMLView)
- self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'image', req, rset=rset)
+ fileobj = req.create_entity('File', data_name=u'bim.html', data=Binary('<html>bam</html'))
+ # image/ehtml primary view priority
+ rset = req.execute('File X WHERE X data_name "bim.html"')
+ self.assertIsInstance(self.vreg['views'].select('ehtml', req, rset=rset),
+ idownloadable.EHTMLView)
+ self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'image', req, rset=rset)
- fileobj = self.request().create_entity('File', data_name=u'bim.txt', data=Binary('boum'))
- # image/ehtml primary view priority
- req = self.request()
- rset = req.execute('File X WHERE X data_name "bim.txt"')
- self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'image', req, rset=rset)
- self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'ehtml', req, rset=rset)
+ fileobj = req.create_entity('File', data_name=u'bim.txt', data=Binary('boum'))
+ # image/ehtml primary view priority
+ rset = req.execute('File X WHERE X data_name "bim.txt"')
+ self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'image', req, rset=rset)
+ self.assertRaises(NoSelectableObject, self.vreg['views'].select, 'ehtml', req, rset=rset)
def _test_view(self, vid, rql, args):
- if rql is None:
- rset = None
- req = self.request()
- else:
- req = self.request()
- rset = req.execute(rql)
- try:
- obj = self.vreg['views'].select(vid, req, rset=rset, **args)
- return obj.render(**args)
- except Exception:
- print vid, rset, args
- raise
+ with self.admin_access.web_request() as req:
+ if rql is None:
+ rset = None
+ else:
+ rset = req.execute(rql)
+ try:
+ obj = self.vreg['views'].select(vid, req, rset=rset, **args)
+ return obj.render(**args)
+ except Exception:
+ print vid, rset, args
+ raise
def test_form(self):
for vid, rql, args in (
@@ -502,6 +492,7 @@
__select__ = is_instance('CWEType') & rql_condition('X name "CWEType"')
title = 'bla'
+
class RQLActionTC(ViewSelectorTC):
def setUp(self):
@@ -516,34 +507,33 @@
del self.vreg['actions']['testaction']
def test(self):
- req = self.request()
- rset = req.execute('CWEType X WHERE X name "CWEType"')
- self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()),
- {'useractions': USERACTIONS,
- 'siteactions': SITEACTIONS,
- 'footer': FOOTERACTIONS,
- 'manage': MANAGEACTIONS,
- 'mainactions': [actions.ModifyAction, actions.ViewSameCWEType],
- 'moreactions': [actions.ManagePermissionsAction,
- actions.AddRelatedActions,
- actions.DeleteAction,
- actions.CopyAction,
- CWETypeRQLAction,
- ],
- })
- req = self.request()
- rset = req.execute('CWEType X WHERE X name "CWRType"')
- self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()),
- {'useractions': USERACTIONS,
- 'siteactions': SITEACTIONS,
- 'footer': FOOTERACTIONS,
- 'manage': MANAGEACTIONS,
- 'mainactions': [actions.ModifyAction, actions.ViewSameCWEType],
- 'moreactions': [actions.ManagePermissionsAction,
- actions.AddRelatedActions,
- actions.DeleteAction,
- actions.CopyAction,]
- })
+ with self.admin_access.web_request() as req:
+ rset = req.execute('CWEType X WHERE X name "CWEType"')
+ self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()),
+ {'useractions': USERACTIONS,
+ 'siteactions': SITEACTIONS,
+ 'footer': FOOTERACTIONS,
+ 'manage': MANAGEACTIONS,
+ 'mainactions': [actions.ModifyAction, actions.ViewSameCWEType],
+ 'moreactions': [actions.ManagePermissionsAction,
+ actions.AddRelatedActions,
+ actions.DeleteAction,
+ actions.CopyAction,
+ CWETypeRQLAction,
+ ],
+ })
+ rset = req.execute('CWEType X WHERE X name "CWRType"')
+ self.assertDictEqual(self.pactionsdict(req, rset, skipcategories=()),
+ {'useractions': USERACTIONS,
+ 'siteactions': SITEACTIONS,
+ 'footer': FOOTERACTIONS,
+ 'manage': MANAGEACTIONS,
+ 'mainactions': [actions.ModifyAction, actions.ViewSameCWEType],
+ 'moreactions': [actions.ManagePermissionsAction,
+ actions.AddRelatedActions,
+ actions.DeleteAction,
+ actions.CopyAction,]
+ })
--- a/web/views/__init__.py Fri May 23 18:35:13 2014 +0200
+++ b/web/views/__init__.py Fri Jun 27 11:48:26 2014 +0200
@@ -103,7 +103,7 @@
def linksearch_select_url(req, rset):
- """when searching an entity to create a relation, return an url to select
+ """when searching an entity to create a relation, return a URL to select
entities in the given rset
"""
req.add_js( ('cubicweb.ajax.js', 'cubicweb.edition.js') )
--- a/web/views/ajaxcontroller.py Fri May 23 18:35:13 2014 +0200
+++ b/web/views/ajaxcontroller.py Fri Jun 27 11:48:26 2014 +0200
@@ -460,3 +460,15 @@
"""
cookiename, cookievalue = str(cookiename), str(cookievalue)
self._cw.set_cookie(cookiename, cookievalue)
+
+
+
+@ajaxfunc
+def delete_relation(self, rtype, subjeid, objeid):
+ rql = 'DELETE S %s O WHERE S eid %%(s)s, O eid %%(o)s' % rtype
+ self._cw.execute(rql, {'s': subjeid, 'o': objeid})
+
+@ajaxfunc
+def add_relation(self, rtype, subjeid, objeid):
+ rql = 'SET S %s O WHERE S eid %%(s)s, O eid %%(o)s' % rtype
+ self._cw.execute(rql, {'s': subjeid, 'o': objeid})
--- a/web/views/basecomponents.py Fri May 23 18:35:13 2014 +0200
+++ b/web/views/basecomponents.py Fri Jun 27 11:48:26 2014 +0200
@@ -97,8 +97,7 @@
context = _('header-left')
def render(self, w):
- w(u'<a href="%s"><img id="logo" src="%s" alt="logo"/></a>'
- % (self._cw.base_url(), self._cw.uiprops['LOGO']))
+ w(u'<a id="logo" href="%s"></a>' % self._cw.base_url())
class ApplicationName(HeaderComponent):
--- a/web/views/basecontrollers.py Fri May 23 18:35:13 2014 +0200
+++ b/web/views/basecontrollers.py Fri Jun 27 11:48:26 2014 +0200
@@ -92,7 +92,7 @@
def publish(self, rset=None):
"""log in the instance"""
path = self._cw.form.get('postlogin_path', '')
- # redirect expect an url, not a path. Also path may contains a query
+ # redirect expect a URL, not a path. Also path may contains a query
# string, hence should not be given to _cw.build_url()
raise Redirect(self._cw.base_url() + path)
@@ -110,7 +110,7 @@
# anonymous connection is allowed and the page will be displayed or
# we'll be redirected to the login form
msg = self._cw._('you have been logged out')
- # force base_url so on dual http/https configuration, we generate an url
+ # force base_url so on dual http/https configuration, we generate a URL
# on the http version of the site
return self._cw.build_url('view', vid='loggedout',
base_url=self._cw.vreg.config['base-url'])
--- a/web/views/basetemplates.py Fri May 23 18:35:13 2014 +0200
+++ b/web/views/basetemplates.py Fri Jun 27 11:48:26 2014 +0200
@@ -176,7 +176,6 @@
w = self.whead
lang = self._cw.lang
self.write_doctype()
- # explictly close the <base> tag to avoid IE 6 bugs while browsing DOM
self._cw.html_headers.define_var('BASE_URL', self._cw.base_url())
self._cw.html_headers.define_var('DATA_URL', self._cw.datadir_url)
w(u'<meta http-equiv="content-type" content="%s; charset=%s"/>\n'
--- a/web/views/cwsources.py Fri May 23 18:35:13 2014 +0200
+++ b/web/views/cwsources.py Fri Jun 27 11:48:26 2014 +0200
@@ -117,7 +117,7 @@
'Any X, SCH, XO ORDERBY ET WHERE X options XO, X cw_for_source S, S eid %(s)s, '
'X cw_schema SCH, SCH is ET', {'s': entity.eid})
self.wview('table', rset, 'noresult')
- checker = MAPPING_CHECKERS.get(entity.type, MappingChecker)(entity)
+ checker = MappingChecker(entity)
checker.check()
if (checker.errors or checker.warnings or checker.infos):
self.w('<h2>%s</h2>' % _('Detected problems'))
--- a/web/views/editcontroller.py Fri May 23 18:35:13 2014 +0200
+++ b/web/views/editcontroller.py Fri Jun 27 11:48:26 2014 +0200
@@ -178,7 +178,7 @@
form = req.form
# so we're able to know the main entity from the repository side
if '__maineid' in form:
- req.set_shared_data('__maineid', form['__maineid'], txdata=True)
+ req.transaction_data['__maineid'] = form['__maineid']
# no specific action, generic edition
self._to_create = req.data['eidmap'] = {}
# those two data variables are used to handle relation from/to entities
--- a/web/views/forms.py Fri May 23 18:35:13 2014 +0200
+++ b/web/views/forms.py Fri Jun 27 11:48:26 2014 +0200
@@ -189,7 +189,7 @@
def render(self, formvalues=None, renderer=None, **kwargs):
"""Render this form, using the `renderer` given as argument or the
default according to :attr:`form_renderer_id`. The rendered form is
- returned as an unicode string.
+ returned as a unicode string.
`formvalues` is an optional dictionary containing values that will be
considered as field's value.
--- a/web/views/magicsearch.py Fri May 23 18:35:13 2014 +0200
+++ b/web/views/magicsearch.py Fri Jun 27 11:48:26 2014 +0200
@@ -201,7 +201,7 @@
priority = 4
def preprocess_query(self, uquery):
- """try to get rql from an unicode query string"""
+ """try to get rql from a unicode query string"""
args = None
try:
# Process as if there was a quoted part
--- a/web/views/tableview.py Fri May 23 18:35:13 2014 +0200
+++ b/web/views/tableview.py Fri Jun 27 11:48:26 2014 +0200
@@ -1065,9 +1065,9 @@
self.w(u'</div>\n')
def page_navigation_url(self, navcomp, path, params):
- """Build an url to the current view using the <navcomp> attributes
+ """Build a URL to the current view using the <navcomp> attributes
- :param navcomp: a NavigationComponent to call an url method on.
+ :param navcomp: a NavigationComponent to call a URL method on.
:param path: expected to be json here?
:param params: params to give to build_url method
--- a/web/views/urlpublishing.py Fri May 23 18:35:13 2014 +0200
+++ b/web/views/urlpublishing.py Fri Jun 27 11:48:26 2014 +0200
@@ -34,7 +34,7 @@
3. :class:`cubicweb.web.views.urlpublishing.URLRewriteEvaluator`
selects all urlrewriter components, sorts them according to their
- priorty, call their ``rewrite()`` method, the first one that
+ priority, call their ``rewrite()`` method, the first one that
doesn't raise a ``KeyError`` wins. This is where the
:mod:`cubicweb.web.views.urlrewrite` and
:class:`cubicweb.web.views.urlrewrite.SimpleReqRewriter` comes into
@@ -97,7 +97,7 @@
self.evaluators = sorted(evaluators, key=lambda x: x.priority)
def process(self, req, path):
- """Given an url (essentialy caracterized by a path on the
+ """Given a URL (essentialy caracterized by a path on the
server, but additional information may be found in the request
object), return a publishing method identifier
(e.g. controller) and an optional result set.
--- a/web/views/xmlrss.py Fri May 23 18:35:13 2014 +0200
+++ b/web/views/xmlrss.py Fri Jun 27 11:48:26 2014 +0200
@@ -185,7 +185,7 @@
__select__ = is_instance('Any')
def rss_feed_url(self):
- """return an url to the rss feed for this entity"""
+ """return a URL to the rss feed for this entity"""
return self.entity.absolute_url(vid='rss')
--- a/wsgi/__init__.py Fri May 23 18:35:13 2014 +0200
+++ b/wsgi/__init__.py Fri Jun 27 11:48:26 2014 +0200
@@ -29,7 +29,7 @@
from email import message, message_from_string
from Cookie import SimpleCookie
from StringIO import StringIO
-from cgi import parse_header, parse_qsl
+from cgi import parse_header
from pprint import pformat as _pformat
@@ -40,13 +40,6 @@
except Exception:
return u'<could not parse>'
-def qs2dict(qs):
- """transforms a query string into a regular python dict"""
- result = {}
- for key, value in parse_qsl(qs, True):
- result.setdefault(key, []).append(value)
- return result
-
def normalize_header(header):
"""returns a normalized header name
@@ -70,31 +63,3 @@
break
fdst.write(buf)
size -= len(buf)
-
-def parse_file_upload(header_dict, post_data):
- """This is adapted FROM DJANGO"""
- raw_message = '\r\n'.join('%s:%s' % pair for pair in header_dict.iteritems())
- raw_message += '\r\n\r\n' + post_data
- msg = message_from_string(raw_message)
- post, files = {}, {}
- for submessage in msg.get_payload():
- name_dict = parse_header(submessage['Content-Disposition'])[1]
- key = name_dict['name']
- # name_dict is something like {'name': 'file', 'filename': 'test.txt'} for file uploads
- # or {'name': 'blah'} for POST fields
- # We assume all uploaded files have a 'filename' set.
- if 'filename' in name_dict:
- assert type([]) != type(submessage.get_payload()), "Nested MIME messages are not supported"
- if not name_dict['filename'].strip():
- continue
- # IE submits the full path, so trim everything but the basename.
- # (We can't use os.path.basename because that uses the server's
- # directory separator, which may not be the same as the
- # client's one.)
- filename = name_dict['filename'][name_dict['filename'].rfind("\\")+1:]
- mimetype = 'Content-Type' in submessage and submessage['Content-Type'] or None
- content = StringIO(submessage.get_payload())
- files[key] = [filename, mimetype, content]
- else:
- post.setdefault(key, []).append(submessage.get_payload())
- return post, files
--- a/wsgi/request.py Fri May 23 18:35:13 2014 +0200
+++ b/wsgi/request.py Fri Jun 27 11:48:26 2014 +0200
@@ -27,14 +27,11 @@
from StringIO import StringIO
from urllib import quote
+from urlparse import parse_qs
-from logilab.common.decorators import cached
-
+from cubicweb.multipart import copy_file, parse_form_data
from cubicweb.web.request import CubicWebRequestBase
-from cubicweb.wsgi import (pformat, qs2dict, safe_copyfileobj, parse_file_upload,
- normalize_header)
-from cubicweb.web.http_headers import Headers
-
+from cubicweb.wsgi import pformat, normalize_header
class CubicWebWsgiRequest(CubicWebRequestBase):
@@ -45,6 +42,8 @@
self.environ = environ
self.path = environ['PATH_INFO']
self.method = environ['REQUEST_METHOD'].upper()
+
+ # content_length "may be empty or absent"
try:
length = int(environ['CONTENT_LENGTH'])
except (KeyError, ValueError):
@@ -54,8 +53,9 @@
self.content = StringIO()
else:
self.content = tempfile.TemporaryFile()
- safe_copyfileobj(environ['wsgi.input'], self.content, size=length)
+ copy_file(environ['wsgi.input'], self.content, maxread=length)
self.content.seek(0, 0)
+ environ['wsgi.input'] = self.content
headers_in = dict((normalize_header(k[5:]), v) for k, v in self.environ.items()
if k.startswith('HTTP_'))
@@ -65,10 +65,11 @@
super(CubicWebWsgiRequest, self).__init__(vreg, https, post,
headers= headers_in)
if files is not None:
- for key, (name, _, stream) in files.iteritems():
- if name is not None:
- name = unicode(name, self.encoding)
- self.form[key] = (name, stream)
+ for key, part in files.iteritems():
+ name = None
+ if part.filename is not None:
+ name = unicode(part.filename, self.encoding)
+ self.form[key] = (name, part.file)
def __repr__(self):
# Since this is called as part of error handling, we need to be very
@@ -108,23 +109,11 @@
def get_posted_data(self):
# The WSGI spec says 'QUERY_STRING' may be absent.
- post = qs2dict(self.environ.get('QUERY_STRING', ''))
+ post = parse_qs(self.environ.get('QUERY_STRING', ''))
files = None
if self.method == 'POST':
- if self.environ.get('CONTENT_TYPE', '').startswith('multipart'):
- header_dict = dict((normalize_header(k[5:]), v)
- for k, v in self.environ.items()
- if k.startswith('HTTP_'))
- header_dict['Content-Type'] = self.environ.get('CONTENT_TYPE', '')
- post_, files = parse_file_upload(header_dict, self.raw_post_data)
- post.update(post_)
- else:
- post.update(qs2dict(self.raw_post_data))
+ forms, files = parse_form_data(self.environ, strict=True,
+ mem_limit=self.vreg.config['max-post-length'])
+ post.update(forms)
+ self.content.seek(0, 0)
return post, files
-
- @property
- @cached
- def raw_post_data(self):
- postdata = self.content.read()
- self.content.seek(0, 0)
- return postdata