--- a/dataimport.py Mon Apr 22 14:54:22 2013 +0200
+++ b/dataimport.py Tue Apr 23 18:15:10 2013 +0200
@@ -772,7 +772,7 @@
self.source.add_info(session, entity, self.source, None, complete=False)
kwargs = dict()
if inspect.getargspec(self.add_relation).keywords:
- kwargs['subjtype'] = entity.__regid__
+ kwargs['subjtype'] = entity.cw_etype
for rtype, targeteids in rels.iteritems():
# targeteids may be a single eid or a list of eids
inlined = self.rschema(rtype).inlined
@@ -1069,13 +1069,13 @@
def add_entity(self, session, entity):
with self._storage_handler(entity, 'added'):
attrs = self.preprocess_entity(entity)
- rtypes = self._inlined_rtypes_cache.get(entity.__regid__, ())
+ rtypes = self._inlined_rtypes_cache.get(entity.cw_etype, ())
if isinstance(rtypes, str):
rtypes = (rtypes,)
for rtype in rtypes:
if rtype not in attrs:
attrs[rtype] = None
- sql = self.sqlgen.insert(SQL_PREFIX + entity.__regid__, attrs)
+ sql = self.sqlgen.insert(SQL_PREFIX + entity.cw_etype, attrs)
self._sql.eid_insertdicts[entity.eid] = attrs
self._append_to_entities(sql, attrs)
@@ -1108,7 +1108,7 @@
assert isinstance(extid, str)
extid = b64encode(extid)
uri = 'system' if source.copy_based_source else source.uri
- attrs = {'type': entity.__regid__, 'eid': entity.eid, 'extid': extid,
+ attrs = {'type': entity.cw_etype, 'eid': entity.eid, 'extid': extid,
'source': uri, 'asource': source.uri, 'mtime': datetime.utcnow()}
self._handle_insert_entity_sql(session, self.sqlgen.insert('entities', attrs), attrs)
# insert core relations: is, is_instance_of and cw_source
@@ -1127,7 +1127,7 @@
self._handle_is_relation_sql(session, 'INSERT INTO cw_source_relation(eid_from,eid_to) VALUES (%s,%s)',
(entity.eid, source.eid))
# now we can update the full text index
- if self.do_fti and self.need_fti_indexation(entity.__regid__):
+ if self.do_fti and self.need_fti_indexation(entity.cw_etype):
if complete:
entity.complete(entity.e_schema.indexable_attributes())
self.index_entity(session, entity=entity)
--- a/doc/3.17.rst Mon Apr 22 14:54:22 2013 +0200
+++ b/doc/3.17.rst Tue Apr 23 18:15:10 2013 +0200
@@ -28,8 +28,17 @@
* The email sending views and controllers have been removed from CubicWeb and
moved to the `massmailing` cube.
+* ``RenderAndSendNotificationView`` is deprecated in favor of
+ ``ActualNotificationOp`` the new operation use the more efficient *data*
+ idiom.
+Deprecation
+---------------------
+
+* ``ldapuser`` have been deprecated. It'll be fully dropped in the next
+ version. If you are still using ldapuser switch to ``ldapfeed`` **NOW**!
+
Deprecated Code Drops
----------------------
--- a/entities/__init__.py Mon Apr 22 14:54:22 2013 +0200
+++ b/entities/__init__.py Tue Apr 23 18:15:10 2013 +0200
@@ -22,7 +22,7 @@
from warnings import warn
from logilab.common.deprecation import deprecated
-from logilab.common.decorators import cached
+from logilab.common.decorators import cached, classproperty
from cubicweb import Unauthorized
from cubicweb.entity import Entity
@@ -60,6 +60,11 @@
# meta data api ###########################################################
+ @classproperty
+ def cw_etype(self):
+ """entity Etype as a string"""
+ return self.__regid__
+
def dc_title(self):
"""return a suitable *unicode* title for this entity"""
for rschema, attrschema in self.e_schema.attribute_definitions():
--- a/entities/adapters.py Mon Apr 22 14:54:22 2013 +0200
+++ b/entities/adapters.py Tue Apr 23 18:15:10 2013 +0200
@@ -330,7 +330,7 @@
_done = set()
for child in self.children():
if child.eid in _done:
- self.error('loop in %s tree: %s', child.__regid__.lower(), child)
+ self.error('loop in %s tree: %s', child.cw_etype.lower(), child)
continue
yield child
_done.add(child.eid)
@@ -357,7 +357,7 @@
entity = adapter.entity
while entity is not None:
if entity.eid in path:
- self.error('loop in %s tree: %s', entity.__regid__.lower(), entity)
+ self.error('loop in %s tree: %s', entity.cw_etype.lower(), entity)
break
path.append(entity.eid)
try:
--- a/entities/sources.py Mon Apr 22 14:54:22 2013 +0200
+++ b/entities/sources.py Tue Apr 23 18:15:10 2013 +0200
@@ -124,7 +124,7 @@
fetch_attrs, cw_fetch_order = fetch_config(['cw_for_source', 'cw_schema', 'options'])
def dc_title(self):
- return self._cw._(self.__regid__) + ' #%s' % self.eid
+ return self._cw._(self.cw_etype) + ' #%s' % self.eid
@property
def schema(self):
--- a/entities/test/unittest_base.py Mon Apr 22 14:54:22 2013 +0200
+++ b/entities/test/unittest_base.py Tue Apr 23 18:15:10 2013 +0200
@@ -48,8 +48,13 @@
self.assertEqual(entity.dc_creator(), u'member')
def test_type(self):
+ #dc_type may be translated
self.assertEqual(self.member.dc_type(), 'CWUser')
+ def test_cw_etype(self):
+ #cw_etype is never translated
+ self.assertEqual(self.member.cw_etype, 'CWUser')
+
def test_entity_meta_attributes(self):
# XXX move to yams
self.assertEqual(self.schema['CWUser'].meta_attributes(), {})
@@ -172,7 +177,7 @@
self.assertEqual(eclass.__bases__[0].__bases__, (Foo,))
# check Division eclass is still selected for plain Division entities
eclass = self.select_eclass('Division')
- self.assertEqual(eclass.__regid__, 'Division')
+ self.assertEqual(eclass.cw_etype, 'Division')
if __name__ == '__main__':
unittest_main()
--- a/entities/wfobjs.py Mon Apr 22 14:54:22 2013 +0200
+++ b/entities/wfobjs.py Tue Apr 23 18:15:10 2013 +0200
@@ -186,7 +186,7 @@
fetch_attrs, cw_fetch_order = fetch_config(['name', 'type'])
def __init__(self, *args, **kwargs):
- if self.__regid__ == 'BaseTransition':
+ if self.cw_etype == 'BaseTransition':
raise WorkflowException('should not be instantiated')
super(BaseTransition, self).__init__(*args, **kwargs)
@@ -449,10 +449,10 @@
"""return the default workflow for entities of this type"""
# XXX CWEType method
wfrset = self._cw.execute('Any WF WHERE ET default_workflow WF, '
- 'ET name %(et)s', {'et': self.entity.__regid__})
+ 'ET name %(et)s', {'et': self.entity.cw_etype})
if wfrset:
return wfrset.get_entity(0, 0)
- self.warning("can't find any workflow for %s", self.entity.__regid__)
+ self.warning("can't find any workflow for %s", self.entity.cw_etype)
return None
@property
--- a/entity.py Mon Apr 22 14:54:22 2013 +0200
+++ b/entity.py Tue Apr 23 18:15:10 2013 +0200
@@ -795,7 +795,7 @@
for rtype in self.skip_copy_for:
skip_copy_for['subject'].add(rtype)
warn('[3.14] skip_copy_for on entity classes (%s) is deprecated, '
- 'use cw_skip_for instead with list of couples (rtype, role)' % self.__regid__,
+ 'use cw_skip_for instead with list of couples (rtype, role)' % self.cw_etype,
DeprecationWarning)
for rtype, role in self.cw_skip_copy_for:
assert role in ('subject', 'object'), role
@@ -847,7 +847,7 @@
def as_rset(self): # XXX .cw_as_rset
"""returns a resultset containing `self` information"""
rset = ResultSet([(self.eid,)], 'Any X WHERE X eid %(x)s',
- {'x': self.eid}, [(self.__regid__,)])
+ {'x': self.eid}, [(self.cw_etype,)])
rset.req = self._cw
return rset
--- a/hooks/metadata.py Mon Apr 22 14:54:22 2013 +0200
+++ b/hooks/metadata.py Tue Apr 23 18:15:10 2013 +0200
@@ -158,7 +158,7 @@
entity = self.entity
extid = entity.cw_metainformation()['extid']
repo._type_source_cache[entity.eid] = (
- entity.__regid__, self.newsource.uri, None, self.newsource.uri)
+ entity.cw_etype, self.newsource.uri, None, self.newsource.uri)
if self.oldsource.copy_based_source:
uri = 'system'
else:
@@ -216,7 +216,7 @@
# but has been moved, ignore it'.
self._cw.system_sql('UPDATE entities SET eid=-eid WHERE eid=%(eid)s',
{'eid': self.eidfrom})
- attrs = {'type': entity.__regid__, 'eid': entity.eid, 'extid': None,
+ attrs = {'type': entity.cw_etype, 'eid': entity.eid, 'extid': None,
'source': 'system', 'asource': 'system',
'mtime': datetime.now()}
self._cw.system_sql(syssource.sqlgen.insert('entities', attrs), attrs)
--- a/hooks/notification.py Mon Apr 22 14:54:22 2013 +0200
+++ b/hooks/notification.py Tue Apr 23 18:15:10 2013 +0200
@@ -20,23 +20,51 @@
__docformat__ = "restructuredtext en"
from logilab.common.textutils import normalize_text
+from logilab.common.deprecation import deprecated
from cubicweb import RegistryNotFound
from cubicweb.predicates import is_instance
from cubicweb.server import hook
from cubicweb.sobjects.supervising import SupervisionMailOp
-class RenderAndSendNotificationView(hook.Operation):
- """delay rendering of notification view until precommit"""
- view = None # make pylint happy
+
+@deprecated('[3.17] use ActualNotificationOp instead (using the 3.10 data API)')
+def RenderAndSendNotificationView(session, view, viewargs=None):
+ if viewargs is None:
+ viewargs = {}
+ notif_op = ActualNotificationOp.get_instance(session)
+ notif_op.add_data((view, viewargs))
+ return ActualNotificationOp
+
+
+class ActualNotificationOp(hook.DataOperationMixIn, hook.Operation):
+ """End of the notification chain. Do render and send views after commit
+
+ All others Operations end up adding data to this Operation.
+ The notification are done on ``postcommit_event`` to make sure to prevent
+ sending notification about rollbacked data.
+ """
+
+ containercls = list
def postcommit_event(self):
- view = self.view
- if view.cw_rset is not None and not view.cw_rset:
- return # entity added and deleted in the same transaction (cache effect)
- if view.cw_rset and self.session.deleted_in_transaction(view.cw_rset[view.cw_row or 0][view.cw_col or 0]):
- return # entity added and deleted in the same transaction
- self.view.render_and_send(**getattr(self, 'viewargs', {}))
+ deleted = self.session.deleted_in_transaction
+ for view, viewargs in self.get_data():
+ if view.cw_rset is not None:
+ if not view.cw_rset:
+ # entity added and deleted in the same transaction
+ # (cache effect)
+ continue
+ elif deleted(view.cw_rset[view.cw_row or 0][view.cw_col or 0]):
+ # entity added and deleted in the same transaction
+ continue
+ try:
+ view.render_and_send(**viewargs)
+ except Exception:
+ # error in post commit are not propagated
+ # We keep this logic here to prevent a small notification error
+ # to prevent them all.
+ self.exception('Notification failed')
class NotificationHook(hook.Hook):
@@ -73,9 +101,11 @@
# #103822)
if comment and entity.comment_format != 'text/rest':
comment = normalize_text(comment, 80)
- RenderAndSendNotificationView(self._cw, view=view, viewargs={
- 'comment': comment, 'previous_state': entity.previous_state.name,
- 'current_state': entity.new_state.name})
+ notif_op = ActualNotificationOp.get_instance(self._cw)
+ viewargs = {'comment': comment,
+ 'previous_state': entity.previous_state.name,
+ 'current_state': entity.new_state.name}
+ notif_op.add_data((view, viewargs))
class RelationChangeHook(NotificationHook):
__regid__ = 'notifyrelationchange'
@@ -91,7 +121,8 @@
rset=rset, row=0)
if view is None:
return
- RenderAndSendNotificationView(self._cw, view=view)
+ notif_op = ActualNotificationOp.get_instance(self._cw)
+ notif_op.add_data((view, {}))
class EntityChangeHook(NotificationHook):
@@ -106,18 +137,22 @@
view = self.select_view('notif_%s' % self.event, rset=rset, row=0)
if view is None:
return
- RenderAndSendNotificationView(self._cw, view=view)
+ notif_op = ActualNotificationOp.get_instance(self._cw)
+ notif_op.add_data((view, {}))
class EntityUpdatedNotificationOp(hook.SingleLastOperation):
+ """scrap all changed entity to prepare a Notification Operation for them"""
def precommit_event(self):
+ # precommit event that creates postcommit operation
session = self.session
for eid in session.transaction_data['changes']:
view = session.vreg['views'].select('notif_entity_updated', session,
rset=session.eid_rset(eid),
row=0)
- RenderAndSendNotificationView(session, view=view)
+ notif_op = ActualNotificationOp.get_instance(self._cw)
+ notif_op.add_data((view, {}))
class EntityUpdateHook(NotificationHook):
@@ -198,5 +233,5 @@
# missing required relation
title = '#%s' % self.entity.eid
self._cw.transaction_data.setdefault('pendingchanges', []).append(
- ('delete_entity', (self.entity.eid, self.entity.__regid__, title)))
+ ('delete_entity', (self.entity.eid, self.entity.cw_etype, title)))
return True
--- a/misc/scripts/drop_external_entities.py Mon Apr 22 14:54:22 2013 +0200
+++ b/misc/scripts/drop_external_entities.py Tue Apr 23 18:15:10 2013 +0200
@@ -10,14 +10,14 @@
try:
suri = ecnx.describe(meta['extid'])[1]
except UnknownEid:
- print 'cant describe', e.__regid__, e.eid, meta
+ print 'cant describe', e.cw_etype, e.eid, meta
continue
if suri != 'system':
try:
- print 'deleting', e.__regid__, e.eid, suri, e.dc_title().encode('utf8')
+ print 'deleting', e.cw_etype, e.eid, suri, e.dc_title().encode('utf8')
repo.delete_info(session, e, suri, scleanup=e.eid)
except UnknownEid:
- print ' cant delete', e.__regid__, e.eid, meta
+ print ' cant delete', e.cw_etype, e.eid, meta
commit()
--- a/misc/scripts/ldapuser2ldapfeed.py Mon Apr 22 14:54:22 2013 +0200
+++ b/misc/scripts/ldapuser2ldapfeed.py Tue Apr 23 18:15:10 2013 +0200
@@ -39,7 +39,7 @@
extids = set()
duplicates = []
for entity in rql('Any X WHERE X cw_source S, S eid %(s)s', {'s': source.eid}).entities():
- etype = entity.__regid__
+ etype = entity.cw_etype
if not source.support_entity(etype):
print "source doesn't support %s, delete %s" % (etype, entity.eid)
todelete[etype].append(entity)
--- a/misc/scripts/pyroforge2datafeed.py Mon Apr 22 14:54:22 2013 +0200
+++ b/misc/scripts/pyroforge2datafeed.py Tue Apr 23 18:15:10 2013 +0200
@@ -47,7 +47,7 @@
todelete = {}
host = source.config['base-url'].split('://')[1]
for entity in rql('Any X WHERE X cw_source S, S eid %(s)s', {'s': source.eid}).entities():
- etype = entity.__regid__
+ etype = entity.cw_etype
if not source.support_entity(etype):
print "source doesn't support %s, delete %s" % (etype, entity.eid)
elif etype in DONT_GET_BACK_ETYPES:
@@ -84,8 +84,8 @@
for mappart in rql('Any X,SCH WHERE X cw_schema SCH, X cw_for_source S, S eid %(s)s',
{'s': source.eid}).entities():
schemaent = mappart.cw_schema[0]
- if schemaent.__regid__ != 'CWEType':
- assert schemaent.__regid__ == 'CWRType'
+ if schemaent.cw_etype != 'CWEType':
+ assert schemaent.cw_etype == 'CWRType'
sch = schema._eid_index[schemaent.eid]
for rdef in sch.rdefs.itervalues():
if not source.support_entity(rdef.subject) \
--- a/mixins.py Mon Apr 22 14:54:22 2013 +0200
+++ b/mixins.py Tue Apr 23 18:15:10 2013 +0200
@@ -236,7 +236,7 @@
self.close_item(entity)
def open_item(self, entity):
- self.w(u'<li class="%s">\n' % entity.__regid__.lower())
+ self.w(u'<li class="%s">\n' % entity.cw_etype.lower())
def close_item(self, entity):
self.w(u'</li>\n')
--- a/server/checkintegrity.py Mon Apr 22 14:54:22 2013 +0200
+++ b/server/checkintegrity.py Tue Apr 23 18:15:10 2013 +0200
@@ -329,7 +329,7 @@
else:
rql = 'Any X WHERE NOT Y %s X, X is %s' % (rschema, etype)
for entity in session.execute(rql).entities():
- sys.stderr.write(msg % (entity.__regid__, entity.eid, role, rschema))
+ sys.stderr.write(msg % (entity.cw_etype, entity.eid, role, rschema))
if fix:
#if entity.cw_describe()['source']['uri'] == 'system': XXX
entity.cw_delete() # XXX this is BRUTAL!
@@ -350,7 +350,7 @@
rql = 'Any X WHERE X %s NULL, X is %s, X cw_source S, S name "system"' % (
rschema, rdef.subject)
for entity in session.execute(rql).entities():
- sys.stderr.write(msg % (entity.__regid__, entity.eid, rschema))
+ sys.stderr.write(msg % (entity.cw_etype, entity.eid, rschema))
if fix:
entity.cw_delete()
notify_fixed(fix)
--- a/server/ldaputils.py Mon Apr 22 14:54:22 2013 +0200
+++ b/server/ldaputils.py Tue Apr 23 18:15:10 2013 +0200
@@ -88,8 +88,8 @@
('user-base-dn',
{'type' : 'string',
- 'default': 'ou=People,dc=logilab,dc=fr',
- 'help': 'base DN to lookup for users',
+ 'default': '',
+ 'help': 'base DN to lookup for users; disable user importation mechanism if unset',
'group': 'ldap-source', 'level': 1,
}),
('user-scope',
@@ -328,7 +328,6 @@
else:
itemdict = self._process_ldap_item(rec_dn, items)
result.append(itemdict)
- #print '--->', result
self.debug('ldap built results %s', len(result))
return result
--- a/server/repository.py Mon Apr 22 14:54:22 2013 +0200
+++ b/server/repository.py Tue Apr 23 18:15:10 2013 +0200
@@ -1336,7 +1336,7 @@
suri = 'system'
extid = source.get_extid(entity)
self._extid_cache[(str(extid), suri)] = entity.eid
- self._type_source_cache[entity.eid] = (entity.__regid__, suri, extid,
+ self._type_source_cache[entity.eid] = (entity.cw_etype, suri, extid,
source.uri)
return extid
@@ -1350,13 +1350,13 @@
entity._cw_is_saved = False # entity has an eid but is not yet saved
# init edited_attributes before calling before_add_entity hooks
entity.cw_edited = edited
- source = self.locate_etype_source(entity.__regid__)
+ source = self.locate_etype_source(entity.cw_etype)
# allocate an eid to the entity before calling hooks
entity.eid = self.system_source.create_eid(session)
# set caches asap
extid = self.init_entity_caches(session, entity, source)
if server.DEBUG & server.DBG_REPO:
- print 'ADD entity', self, entity.__regid__, entity.eid, edited
+ print 'ADD entity', self, entity.cw_etype, entity.eid, edited
prefill_entity_caches(entity)
if source.should_call_hooks:
self.hm.call_hooks('before_add_entity', session, entity=entity)
@@ -1389,7 +1389,7 @@
"""
entity = edited.entity
if server.DEBUG & server.DBG_REPO:
- print 'UPDATE entity', entity.__regid__, entity.eid, \
+ print 'UPDATE entity', entity.cw_etype, entity.eid, \
entity.cw_attr_cache, edited
hm = self.hm
eschema = entity.e_schema
--- a/server/sources/extlite.py Mon Apr 22 14:54:22 2013 +0200
+++ b/server/sources/extlite.py Tue Apr 23 18:15:10 2013 +0200
@@ -247,7 +247,7 @@
entity is deleted.
"""
attrs = {'cw_eid': entity.eid}
- sql = self.sqladapter.sqlgen.delete(SQL_PREFIX + entity.__regid__, attrs)
+ sql = self.sqladapter.sqlgen.delete(SQL_PREFIX + entity.cw_etype, attrs)
self.doexec(session, sql, attrs)
def local_add_relation(self, session, subject, rtype, object):
--- a/server/sources/ldapuser.py Mon Apr 22 14:54:22 2013 +0200
+++ b/server/sources/ldapuser.py Tue Apr 23 18:15:10 2013 +0200
@@ -27,6 +27,7 @@
from rql.nodes import Relation, VariableRef, Constant, Function
+import warnings
from cubicweb import UnknownEid, RepositoryError
from cubicweb.server import ldaputils
from cubicweb.server.utils import cartesian_product
@@ -45,6 +46,11 @@
}
+# module is lazily imported
+warnings.warn('Imminent drop of ldapuser. Switch to ldapfeed now!',
+ DeprecationWarning)
+
+
class LDAPUserSource(ldaputils.LDAPSourceMixIn, AbstractSource):
"""LDAP read-only CWUser source"""
support_entities = {'CWUser': False}
--- a/server/sources/native.py Mon Apr 22 14:54:22 2013 +0200
+++ b/server/sources/native.py Tue Apr 23 18:15:10 2013 +0200
@@ -197,7 +197,7 @@
sentity, oentity = entities
try:
rschema = session.vreg.schema.rschema(rtype)
- rdef = rschema.rdefs[(sentity.__regid__, oentity.__regid__)]
+ rdef = rschema.rdefs[(sentity.cw_etype, oentity.cw_etype)]
except KeyError:
raise _UndoException(session._(
"Can't restore relation %(rtype)s between %(subj)s and "
@@ -630,38 +630,38 @@
"""add a new entity to the source"""
with self._storage_handler(entity, 'added'):
attrs = self.preprocess_entity(entity)
- sql = self.sqlgen.insert(SQL_PREFIX + entity.__regid__, attrs)
+ sql = self.sqlgen.insert(SQL_PREFIX + entity.cw_etype, attrs)
self.doexec(session, sql, attrs)
- if session.ertype_supports_undo(entity.__regid__):
+ if session.ertype_supports_undo(entity.cw_etype):
self._record_tx_action(session, 'tx_entity_actions', 'C',
- etype=entity.__regid__, eid=entity.eid)
+ etype=entity.cw_etype, eid=entity.eid)
def update_entity(self, session, entity):
"""replace an entity in the source"""
with self._storage_handler(entity, 'updated'):
attrs = self.preprocess_entity(entity)
- if session.ertype_supports_undo(entity.__regid__):
+ if session.ertype_supports_undo(entity.cw_etype):
changes = self._save_attrs(session, entity, attrs)
self._record_tx_action(session, 'tx_entity_actions', 'U',
- etype=entity.__regid__, eid=entity.eid,
+ etype=entity.cw_etype, eid=entity.eid,
changes=self._binary(dumps(changes)))
- sql = self.sqlgen.update(SQL_PREFIX + entity.__regid__, attrs,
+ sql = self.sqlgen.update(SQL_PREFIX + entity.cw_etype, attrs,
['cw_eid'])
self.doexec(session, sql, attrs)
def delete_entity(self, session, entity):
"""delete an entity from the source"""
with self._storage_handler(entity, 'deleted'):
- if session.ertype_supports_undo(entity.__regid__):
+ if session.ertype_supports_undo(entity.cw_etype):
attrs = [SQL_PREFIX + r.type
for r in entity.e_schema.subject_relations()
if (r.final or r.inlined) and not r in VIRTUAL_RTYPES]
changes = self._save_attrs(session, entity, attrs)
self._record_tx_action(session, 'tx_entity_actions', 'D',
- etype=entity.__regid__, eid=entity.eid,
+ etype=entity.cw_etype, eid=entity.eid,
changes=self._binary(dumps(changes)))
attrs = {'cw_eid': entity.eid}
- sql = self.sqlgen.delete(SQL_PREFIX + entity.__regid__, attrs)
+ sql = self.sqlgen.delete(SQL_PREFIX + entity.cw_etype, attrs)
self.doexec(session, sql, attrs)
def add_relation(self, session, subject, rtype, object, inlined=False):
@@ -978,7 +978,7 @@
assert isinstance(extid, str)
extid = b64encode(extid)
uri = 'system' if source.copy_based_source else source.uri
- attrs = {'type': entity.__regid__, 'eid': entity.eid, 'extid': extid,
+ attrs = {'type': entity.cw_etype, 'eid': entity.eid, 'extid': extid,
'source': uri, 'asource': source.uri, 'mtime': datetime.utcnow()}
self._handle_insert_entity_sql(session, self.sqlgen.insert('entities', attrs), attrs)
# insert core relations: is, is_instance_of and cw_source
@@ -997,7 +997,7 @@
self._handle_is_relation_sql(session, 'INSERT INTO cw_source_relation(eid_from,eid_to) VALUES (%s,%s)',
(entity.eid, source.eid))
# now we can update the full text index
- if self.do_fti and self.need_fti_indexation(entity.__regid__):
+ if self.do_fti and self.need_fti_indexation(entity.cw_etype):
if complete:
entity.complete(entity.e_schema.indexable_attributes())
self.index_entity(session, entity=entity)
@@ -1009,7 +1009,7 @@
# one indexable attribute
self.index_entity(session, entity=entity)
# update entities.mtime.
- # XXX Only if entity.__regid__ in self.multisources_etypes?
+ # XXX Only if entity.cw_etype in self.multisources_etypes?
attrs = {'eid': entity.eid, 'mtime': datetime.utcnow()}
self.doexec(session, self.sqlgen.update('entities', attrs, ['eid']), attrs)
@@ -1191,7 +1191,7 @@
attributes of the entity
"""
restr = {'cw_eid': entity.eid}
- sql = self.sqlgen.select(SQL_PREFIX + entity.__regid__, restr, attrs)
+ sql = self.sqlgen.select(SQL_PREFIX + entity.cw_etype, restr, attrs)
cu = self.doexec(session, sql, restr)
values = dict(zip(attrs, cu.fetchone()))
# ensure backend specific binary are converted back to string
@@ -1302,7 +1302,7 @@
# restore record in entities (will update fti if needed)
self.add_info(session, entity, self, None, True)
# remove record from deleted_entities if entity's type is multi-sources
- if entity.__regid__ in self.multisources_etypes:
+ if entity.cw_etype in self.multisources_etypes:
self.doexec(session,
'DELETE FROM deleted_entities WHERE eid=%s' % eid)
self.repo.hm.call_hooks('after_add_entity', session, entity=entity)
@@ -1365,7 +1365,7 @@
# XXX check removal of inlined relation?
# delete the entity
attrs = {'cw_eid': eid}
- sql = self.sqlgen.delete(SQL_PREFIX + entity.__regid__, attrs)
+ sql = self.sqlgen.delete(SQL_PREFIX + entity.cw_etype, attrs)
self.doexec(session, sql, attrs)
# remove record from entities (will update fti if needed)
self.delete_info_multi(session, [entity], self.uri)
@@ -1385,7 +1385,7 @@
self._reedit_entity(entity, action.changes, err)
entity.cw_edited.check()
self.repo.hm.call_hooks('before_update_entity', session, entity=entity)
- sql = self.sqlgen.update(SQL_PREFIX + entity.__regid__, action.changes,
+ sql = self.sqlgen.update(SQL_PREFIX + entity.cw_etype, action.changes,
['cw_eid'])
self.doexec(session, sql, action.changes)
self.repo.hm.call_hooks('after_update_entity', session, entity=entity)
@@ -1403,7 +1403,7 @@
rschema = rdef.rtype
if rschema.inlined:
sql = 'SELECT 1 FROM cw_%s WHERE cw_eid=%s and cw_%s=%s'\
- % (sentity.__regid__, subj, rtype, obj)
+ % (sentity.cw_etype, subj, rtype, obj)
else:
sql = 'SELECT 1 FROM %s_relation WHERE eid_from=%s and eid_to=%s'\
% (rtype, subj, obj)
--- a/server/sources/remoterql.py Mon Apr 22 14:54:22 2013 +0200
+++ b/server/sources/remoterql.py Tue Apr 23 18:15:10 2013 +0200
@@ -415,7 +415,7 @@
self._query_cache.clear()
return
cu = session.cnxset[self.uri]
- cu.execute('DELETE %s X WHERE X eid %%(x)s' % entity.__regid__,
+ cu.execute('DELETE %s X WHERE X eid %%(x)s' % entity.cw_etype,
{'x': self.repo.eid2extid(self, entity.eid, session)})
self._query_cache.clear()
--- a/server/sources/rql2sql.py Mon Apr 22 14:54:22 2013 +0200
+++ b/server/sources/rql2sql.py Tue Apr 23 18:15:10 2013 +0200
@@ -1506,15 +1506,14 @@
value = constant.value
if constant.type == 'etype':
return value
- if constant.type == 'Int': # XXX Float?
+ # don't substitute int, causes pb when used as sorting column number
+ if constant.type == 'Int':
return str(value)
if constant.type in ('Date', 'Datetime'):
rel = constant.relation()
if rel is not None:
rel._q_needcast = value
return self.keyword_map[value]()
- if constant.type == 'Boolean':
- return str(self.dbhelper.boolean_value(value))
if constant.type == 'Substitute':
try:
# we may found constant from simplified var in varmap
--- a/server/sources/storages.py Mon Apr 22 14:54:22 2013 +0200
+++ b/server/sources/storages.py Tue Apr 23 18:15:10 2013 +0200
@@ -239,7 +239,7 @@
sysource = entity._cw.cnxset.source('system')
cu = sysource.doexec(entity._cw,
'SELECT cw_%s FROM cw_%s WHERE cw_eid=%s' % (
- attr, entity.__regid__, entity.eid))
+ attr, entity.cw_etype, entity.eid))
rawvalue = cu.fetchone()[0]
if rawvalue is None: # no previous value
return None
@@ -253,7 +253,7 @@
session = entity._cw
source = session.repo.system_source
attrs = source.preprocess_entity(entity)
- sql = source.sqlgen.update('cw_' + entity.__regid__, attrs,
+ sql = source.sqlgen.update('cw_' + entity.cw_etype, attrs,
['cw_eid'])
source.doexec(session, sql, attrs)
entity.cw_edited = None
--- a/server/test/data/ldap_test.ldif Mon Apr 22 14:54:22 2013 +0200
+++ b/server/test/data/ldap_test.ldif Tue Apr 23 18:15:10 2013 +0200
@@ -12,11 +12,10 @@
dn: uid=syt,ou=People,dc=cubicweb,dc=test
loginShell: /bin/bash
-objectClass: inetOrgPerson
+objectClass: OpenLDAPperson
objectClass: posixAccount
objectClass: top
objectClass: shadowAccount
-structuralObjectClass: inetOrgPerson
cn: Sylvain Thenault
sn: Thenault
shadowMax: 99999
@@ -35,7 +34,7 @@
dn: uid=adim,ou=People,dc=cubicweb,dc=test
loginShell: /bin/bash
-objectClass: inetOrgPerson
+objectClass: OpenLDAPperson
objectClass: posixAccount
objectClass: top
objectClass: shadowAccount
@@ -46,7 +45,6 @@
uid: adim
homeDirectory: /home/adim
uidNumber: 1006
-structuralObjectClass: inetOrgPerson
givenName: Adrien
telephoneNumber: 109
displayName: adimascio
--- a/server/test/unittest_ldapsource.py Mon Apr 22 14:54:22 2013 +0200
+++ b/server/test/unittest_ldapsource.py Tue Apr 23 18:15:10 2013 +0200
@@ -18,6 +18,7 @@
"""cubicweb.server.sources.ldapusers unit and functional tests"""
import os
+import sys
import shutil
import time
from os.path import join, exists
@@ -34,7 +35,8 @@
from cubicweb.server.sources.ldapuser import GlobTrFunc, UnknownEid, RQL2LDAPFilter
-CONFIG = u'user-base-dn=ou=People,dc=cubicweb,dc=test'
+CONFIG_LDAPFEED = CONFIG_LDAPUSER = u'''user-base-dn=ou=People,dc=cubicweb,dc=test'''
+
URL = None
def create_slapd_configuration(cls):
@@ -49,8 +51,15 @@
# fill ldap server with some data
ldiffile = join(config.apphome, "ldap_test.ldif")
config.info('Initing ldap database')
- cmdline = "/usr/sbin/slapadd -f %s -l %s -c" % (slapdconf, ldiffile)
- subprocess.call(cmdline, shell=True)
+ cmdline = ['/usr/sbin/slapadd', '-f', slapdconf, '-l', ldiffile, '-c']
+ PIPE = subprocess.PIPE
+ slapproc = subprocess.Popen(cmdline, stdout=PIPE, stderr=PIPE)
+ stdout, stderr = slapproc.communicate()
+ if slapproc.returncode:
+ print >> sys.stderr, ('slapadd returned with status: %s'
+ % slapproc.returncode)
+ sys.stdout.write(stdout)
+ sys.stderr.write(stderr)
#ldapuri = 'ldapi://' + join(basedir, "ldapi").replace('/', '%2f')
port = get_available_port(xrange(9000, 9100))
@@ -58,10 +67,11 @@
ldapuri = 'ldap://%s' % host
cmdline = ["/usr/sbin/slapd", "-f", slapdconf, "-h", ldapuri, "-d", "0"]
config.info('Starting slapd:', ' '.join(cmdline))
- cls.slapd_process = subprocess.Popen(cmdline)
+ PIPE = subprocess.PIPE
+ cls.slapd_process = subprocess.Popen(cmdline, stdout=PIPE, stderr=PIPE)
time.sleep(0.2)
if cls.slapd_process.poll() is None:
- config.info('slapd started with pid %s' % cls.slapd_process.pid)
+ config.info('slapd started with pid %s', cls.slapd_process.pid)
else:
raise EnvironmentError('Cannot start slapd with cmdline="%s" (from directory "%s")' %
(" ".join(cmdline), os.getcwd()))
@@ -77,10 +87,17 @@
else:
import os, signal
os.kill(cls.slapd_process.pid, signal.SIGTERM)
- cls.slapd_process.wait()
+ stdout, stderr = cls.slapd_process.communicate()
+ if cls.slapd_process.returncode:
+ print >> sys.stderr, ('slapd returned with status: %s'
+ % cls.slapd_process.returncode)
+ sys.stdout.write(stdout)
+ sys.stderr.write(stderr)
config.info('DONE')
-class LDAPTestBase(CubicWebTC):
+
+class LDAPFeedTestBase(CubicWebTC):
+ test_db_id = 'ldap-feed'
loglevel = 'ERROR'
@classmethod
@@ -97,12 +114,75 @@
except:
pass
-class CheckWrongGroup(LDAPTestBase):
+ @classmethod
+ def pre_setup_database(cls, session, config):
+ session.create_entity('CWSource', name=u'ldap', type=u'ldapfeed', parser=u'ldapfeed',
+ url=URL, config=CONFIG_LDAPFEED)
+
+ session.commit()
+ return cls._pull(session)
+
+ @classmethod
+ def _pull(cls, session):
+ with session.repo.internal_session() as isession:
+ lfsource = isession.repo.sources_by_uri['ldap']
+ stats = lfsource.pull_data(isession, force=True, raise_on_error=True)
+ isession.commit()
+ return stats
+
+ def pull(self):
+ return self._pull(self.session)
+
+ def setup_database(self):
+ if self.test_db_id == 'ldap-feed':
+ with self.session.repo.internal_session(safe=True) as session:
+ session.execute('DELETE Any E WHERE E cw_source S, S name "ldap"')
+ session.commit()
+ if self.test_db_id == 'ldap-feed':
+ src = self.sexecute('CWSource S WHERE S name "ldap"').get_entity(0,0)
+ src.cw_set(config=CONFIG_LDAPFEED)
+ self.session.commit()
+ self.pull()
+
+ def delete_ldap_entry(self, dn):
+ """
+ delete an LDAP entity
+ """
+ modcmd = ['dn: %s'%dn, 'changetype: delete']
+ self._ldapmodify(modcmd)
+
+ def update_ldap_entry(self, dn, mods):
+ """
+ modify one or more attributes of an LDAP entity
+ """
+ modcmd = ['dn: %s'%dn, 'changetype: modify']
+ for (kind, key), values in mods.iteritems():
+ modcmd.append('%s: %s' % (kind, key))
+ if isinstance(values, basestring):
+ values = [values]
+ for value in values:
+ modcmd.append('%s: %s'%(key, value))
+ modcmd.append('-')
+ self._ldapmodify(modcmd)
+
+ def _ldapmodify(self, modcmd):
+ uri = self.repo.sources_by_uri['ldap'].urls[0]
+ updatecmd = ['ldapmodify', '-H', uri, '-v', '-x', '-D',
+ 'cn=admin,dc=cubicweb,dc=test', '-w', 'cw']
+ PIPE = subprocess.PIPE
+ p = subprocess.Popen(updatecmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
+ p.stdin.write('\n'.join(modcmd))
+ p.stdin.close()
+ if p.wait():
+ raise RuntimeError("ldap update failed: %s"%('\n'.join(p.stderr.readlines())))
+
+class CheckWrongGroup(LDAPFeedTestBase):
+ """
+ A testcase for situations where the default group for CWUser
+ created from LDAP is wrongly configured.
+ """
def test_wrong_group(self):
- self.session.create_entity('CWSource', name=u'ldapuser', type=u'ldapfeed', parser=u'ldapfeed',
- url=URL, config=CONFIG)
- self.commit()
with self.session.repo.internal_session(safe=True) as session:
source = self.session.execute('CWSource S WHERE S type="ldapfeed"').get_entity(0,0)
config = source.repo_source.check_config(source)
@@ -114,108 +194,19 @@
stats = source.repo_source.pull_data(session, force=True, raise_on_error=True)
session.commit()
-class DeleteStuffFromLDAPFeedSourceTC(LDAPTestBase):
- test_db_id = 'ldap-feed'
- @classmethod
- def pre_setup_database(cls, session, config):
- session.create_entity('CWSource', name=u'ldapuser', type=u'ldapfeed', parser=u'ldapfeed',
- url=URL, config=CONFIG)
- session.commit()
- with session.repo.internal_session(safe=True) as isession:
- lfsource = isession.repo.sources_by_uri['ldapuser']
- stats = lfsource.pull_data(isession, force=True, raise_on_error=True)
-
- def _pull(self):
- with self.session.repo.internal_session() as isession:
- lfsource = isession.repo.sources_by_uri['ldapuser']
- stats = lfsource.pull_data(isession, force=True, raise_on_error=True)
- isession.commit()
-
- def test_a_filter_inactivate(self):
- """ filtered out people should be deactivated, unable to authenticate """
- source = self.session.execute('CWSource S WHERE S type="ldapfeed"').get_entity(0,0)
- config = source.repo_source.check_config(source)
- # filter with adim's phone number
- config['user-filter'] = u'(%s=%s)' % ('telephoneNumber', '109')
- source.repo_source.update_config(source, config)
- self.commit()
- self._pull()
- self.assertRaises(AuthenticationError, self.repo.connect, 'syt', password='syt')
- self.assertEqual(self.execute('Any N WHERE U login "syt", '
- 'U in_state S, S name N').rows[0][0],
- 'deactivated')
- self.assertEqual(self.execute('Any N WHERE U login "adim", '
- 'U in_state S, S name N').rows[0][0],
- 'activated')
- # unfilter, syt should be activated again
- config['user-filter'] = u''
- source.repo_source.update_config(source, config)
- self.commit()
- self._pull()
- self.assertEqual(self.execute('Any N WHERE U login "syt", '
- 'U in_state S, S name N').rows[0][0],
- 'activated')
- self.assertEqual(self.execute('Any N WHERE U login "adim", '
- 'U in_state S, S name N').rows[0][0],
- 'activated')
- def test_delete(self):
- """ delete syt, pull, check deactivation, repull,
- readd syt, pull, check activation
- """
- uri = self.repo.sources_by_uri['ldapuser'].urls[0]
- deletecmd = ("ldapdelete -H %s 'uid=syt,ou=People,dc=cubicweb,dc=test' "
- "-v -x -D cn=admin,dc=cubicweb,dc=test -w'cw'" % uri)
- os.system(deletecmd)
- self._pull()
- self.assertRaises(AuthenticationError, self.repo.connect, 'syt', password='syt')
- self.assertEqual(self.execute('Any N WHERE U login "syt", '
- 'U in_state S, S name N').rows[0][0],
- 'deactivated')
- # check that it doesn't choke
- self._pull()
- # reset the fscking ldap thing
- self.tearDownClass()
- self.setUpClass()
- self._pull()
- self.assertEqual(self.execute('Any N WHERE U login "syt", '
- 'U in_state S, S name N').rows[0][0],
- 'activated')
- # test reactivating the user isn't enough to authenticate, as the native source
- # refuse to authenticate user from other sources
- os.system(deletecmd)
- self._pull()
- user = self.execute('CWUser U WHERE U login "syt"').get_entity(0, 0)
- user.cw_adapt_to('IWorkflowable').fire_transition('activate')
- self.commit()
- self.assertRaises(AuthenticationError, self.repo.connect, 'syt', password='syt')
-
-class LDAPFeedSourceTC(LDAPTestBase):
- test_db_id = 'ldap-feed'
-
- @classmethod
- def pre_setup_database(cls, session, config):
- session.create_entity('CWSource', name=u'ldapuser', type=u'ldapfeed', parser=u'ldapfeed',
- url=URL, config=CONFIG)
- session.commit()
- isession = session.repo.internal_session(safe=True)
- lfsource = isession.repo.sources_by_uri['ldapuser']
- stats = lfsource.pull_data(isession, force=True, raise_on_error=True)
-
- def setUp(self):
- super(LDAPFeedSourceTC, self).setUp()
- # ldap source url in the database may use a different port as the one
- # just attributed
- lfsource = self.repo.sources_by_uri['ldapuser']
- lfsource.urls = [URL]
+class LDAPFeedUserTC(LDAPFeedTestBase):
+ """
+ A testcase for CWUser support in ldapfeed (basic tests and authentication).
+ """
def assertMetadata(self, entity):
self.assertTrue(entity.creation_date)
self.assertTrue(entity.modification_date)
def test_authenticate(self):
- source = self.repo.sources_by_uri['ldapuser']
+ source = self.repo.sources_by_uri['ldap']
self.session.set_cnxset()
# ensure we won't be logged against
self.assertRaises(AuthenticationError,
@@ -241,7 +232,7 @@
self.assertEqual(rset.rows, [[e.eid]])
def test_copy_to_system_source(self):
- source = self.repo.sources_by_uri['ldapuser']
+ source = self.repo.sources_by_uri['ldap']
eid = self.sexecute('CWUser X WHERE X login %(login)s', {'login': 'syt'})[0][0]
self.sexecute('SET X cw_source S WHERE X eid %(x)s, S name "system"', {'x': eid})
self.commit()
@@ -265,14 +256,88 @@
self.session, 'syt', password='syt'))
-class LDAPUserSourceTC(LDAPFeedSourceTC):
+class LDAPFeedUserDeletionTC(LDAPFeedTestBase):
+ """
+ A testcase for situations where users are deleted from or
+ unavailabe in the LDAP database.
+ """
+ def test_a_filter_inactivate(self):
+ """ filtered out people should be deactivated, unable to authenticate """
+ source = self.session.execute('CWSource S WHERE S type="ldapfeed"').get_entity(0,0)
+ config = source.repo_source.check_config(source)
+ # filter with adim's phone number
+ config['user-filter'] = u'(%s=%s)' % ('telephoneNumber', '109')
+ source.repo_source.update_config(source, config)
+ self.commit()
+ self.pull()
+ self.assertRaises(AuthenticationError, self.repo.connect, 'syt', password='syt')
+ self.assertEqual(self.execute('Any N WHERE U login "syt", '
+ 'U in_state S, S name N').rows[0][0],
+ 'deactivated')
+ self.assertEqual(self.execute('Any N WHERE U login "adim", '
+ 'U in_state S, S name N').rows[0][0],
+ 'activated')
+ # unfilter, syt should be activated again
+ config['user-filter'] = u''
+ source.repo_source.update_config(source, config)
+ self.commit()
+ self.pull()
+ self.assertEqual(self.execute('Any N WHERE U login "syt", '
+ 'U in_state S, S name N').rows[0][0],
+ 'activated')
+ self.assertEqual(self.execute('Any N WHERE U login "adim", '
+ 'U in_state S, S name N').rows[0][0],
+ 'activated')
+
+ def test_delete(self):
+ """ delete syt, pull, check deactivation, repull,
+ read syt, pull, check activation
+ """
+ self.delete_ldap_entry('uid=syt,ou=People,dc=cubicweb,dc=test')
+ self.pull()
+ self.assertRaises(AuthenticationError, self.repo.connect, 'syt', password='syt')
+ self.assertEqual(self.execute('Any N WHERE U login "syt", '
+ 'U in_state S, S name N').rows[0][0],
+ 'deactivated')
+ # check that it doesn't choke
+ self.pull()
+ # reset the ldap database
+ self.tearDownClass()
+ self.setUpClass()
+ self.pull()
+ self.assertEqual(self.execute('Any N WHERE U login "syt", '
+ 'U in_state S, S name N').rows[0][0],
+ 'activated')
+
+ def test_reactivate_deleted(self):
+ # test reactivating BY HAND the user isn't enough to
+ # authenticate, as the native source refuse to authenticate
+ # user from other sources
+ self.delete_ldap_entry('uid=syt,ou=People,dc=cubicweb,dc=test')
+ self.pull()
+ # reactivate user (which source is still ldap-feed)
+ user = self.execute('CWUser U WHERE U login "syt"').get_entity(0, 0)
+ user.cw_adapt_to('IWorkflowable').fire_transition('activate')
+ self.commit()
+ with self.assertRaises(AuthenticationError):
+ self.repo.connect('syt', password='syt')
+
+ # ok now let's try to make it a system user
+ self.sexecute('SET X cw_source S WHERE X eid %(x)s, S name "system"', {'x': user.eid})
+ self.commit()
+ # and that we can now authenticate again
+ self.assertRaises(AuthenticationError, self.repo.connect, 'syt', password='toto')
+ self.assertTrue(self.repo.connect('syt', password='syt'))
+
+
+class LDAPUserSourceTC(LDAPFeedTestBase):
test_db_id = 'ldap-user'
tags = CubicWebTC.tags | Tags(('ldap'))
@classmethod
def pre_setup_database(cls, session, config):
- session.create_entity('CWSource', name=u'ldapuser', type=u'ldapuser',
- url=URL, config=CONFIG)
+ session.create_entity('CWSource', name=u'ldap', type=u'ldapuser',
+ url=URL, config=CONFIG_LDAPUSER)
session.commit()
# XXX keep it there
session.execute('CWUser U')
@@ -282,7 +347,7 @@
self.assertEqual(entity.modification_date, None)
def test_synchronize(self):
- source = self.repo.sources_by_uri['ldapuser']
+ source = self.repo.sources_by_uri['ldap']
source.synchronize()
def test_base(self):
@@ -597,8 +662,8 @@
def setUp(self):
self.handler = get_test_db_handler(LDAPUserSourceTC.config)
- self.handler.build_db_cache('ldap-user', LDAPUserSourceTC.pre_setup_database)
- self.handler.restore_database('ldap-user')
+ self.handler.build_db_cache('ldap-rqlgenerator', LDAPUserSourceTC.pre_setup_database)
+ self.handler.restore_database('ldap-rqlgenerator')
self._repo = repo = self.handler.get_repo()
self._schema = repo.schema
super(RQL2LDAPFilterTC, self).setUp()
--- a/server/test/unittest_rql2sql.py Mon Apr 22 14:54:22 2013 +0200
+++ b/server/test/unittest_rql2sql.py Tue Apr 23 18:15:10 2013 +0200
@@ -106,12 +106,12 @@
("Personne P WHERE P test TRUE",
'''SELECT _P.cw_eid
FROM cw_Personne AS _P
-WHERE _P.cw_test=TRUE'''),
+WHERE _P.cw_test=True'''),
("Personne P WHERE P test false",
'''SELECT _P.cw_eid
FROM cw_Personne AS _P
-WHERE _P.cw_test=FALSE'''),
+WHERE _P.cw_test=False'''),
("Personne P WHERE P eid -1",
'''SELECT -1'''),
@@ -532,7 +532,7 @@
("Any X WHERE X eid 0, X test TRUE",
'''SELECT _X.cw_eid
FROM cw_Personne AS _X
-WHERE _X.cw_eid=0 AND _X.cw_test=TRUE'''),
+WHERE _X.cw_eid=0 AND _X.cw_test=True'''),
('Any 1 WHERE X in_group G, X is CWUser',
'''SELECT 1
@@ -1756,7 +1756,7 @@
class SqlServer2005SQLGeneratorTC(PostgresSQLGeneratorTC):
backend = 'sqlserver2005'
def _norm_sql(self, sql):
- return sql.strip().replace(' SUBSTR', ' SUBSTRING').replace(' || ', ' + ').replace(' ILIKE ', ' LIKE ').replace('TRUE', '1').replace('FALSE', '0')
+ return sql.strip().replace(' SUBSTR', ' SUBSTRING').replace(' || ', ' + ').replace(' ILIKE ', ' LIKE ')
def test_has_text(self):
for t in self._parse(HAS_TEXT_LG_INDEXER):
@@ -1941,7 +1941,7 @@
backend = 'sqlite'
def _norm_sql(self, sql):
- return sql.strip().replace(' ILIKE ', ' LIKE ').replace('TRUE', '1').replace('FALSE', '0')
+ return sql.strip().replace(' ILIKE ', ' LIKE ')
def test_date_extraction(self):
self._check("Any MONTH(D) WHERE P is Personne, P creation_date D",
@@ -2108,7 +2108,7 @@
backend = 'mysql'
def _norm_sql(self, sql):
- sql = sql.strip().replace(' ILIKE ', ' LIKE ').replace('TRUE', '1').replace('FALSE', '0')
+ sql = sql.strip().replace(' ILIKE ', ' LIKE ')
newsql = []
latest = None
for line in sql.splitlines(False):
--- a/sobjects/cwxmlparser.py Mon Apr 22 14:54:22 2013 +0200
+++ b/sobjects/cwxmlparser.py Tue Apr 23 18:15:10 2013 +0200
@@ -218,7 +218,7 @@
return entity
def process_relations(self, entity, rels):
- etype = entity.__regid__
+ etype = entity.cw_etype
for (rtype, role, action), rules in self.source.mapping.get(etype, {}).iteritems():
try:
related_items = rels[role][rtype]
--- a/sobjects/ldapparser.py Mon Apr 22 14:54:22 2013 +0200
+++ b/sobjects/ldapparser.py Tue Apr 23 18:15:10 2013 +0200
@@ -42,11 +42,13 @@
@cachedproperty
def source_entities_by_extid(self):
source = self.source
- return dict((userdict['dn'], userdict)
- for userdict in source._search(self._cw,
- source.user_base_dn,
- source.user_base_scope,
- self.searchfilterstr))
+ if source.user_base_dn.strip():
+ return dict((userdict['dn'], userdict)
+ for userdict in source._search(self._cw,
+ source.user_base_dn,
+ source.user_base_scope,
+ self.searchfilterstr))
+ return {}
def process(self, url, raise_on_error=False):
"""IDataFeedParser main entry point"""
@@ -85,7 +87,7 @@
# disable read security to allow password selection
with entity._cw.security_enabled(read=False):
entity.complete(tuple(attrs))
- if entity.__regid__ == 'CWUser':
+ if entity.cw_etype == 'CWUser':
wf = entity.cw_adapt_to('IWorkflowable')
if wf.state == 'deactivated':
wf.fire_transition('activate')
@@ -112,7 +114,7 @@
return tdict
def before_entity_copy(self, entity, sourceparams):
- if entity.__regid__ == 'EmailAddress':
+ if entity.cw_etype == 'EmailAddress':
entity.cw_edited['address'] = sourceparams['address']
else:
self.ldap2cwattrs(sourceparams, entity.cw_edited)
@@ -126,7 +128,7 @@
def after_entity_copy(self, entity, sourceparams):
super(DataFeedLDAPAdapter, self).after_entity_copy(entity, sourceparams)
- if entity.__regid__ == 'EmailAddress':
+ if entity.cw_etype == 'EmailAddress':
return
groups = filter(None, [self._get_group(name)
for name in self.source.user_default_groups])
--- a/sobjects/notification.py Mon Apr 22 14:54:22 2013 +0200
+++ b/sobjects/notification.py Tue Apr 23 18:15:10 2013 +0200
@@ -195,12 +195,3 @@
entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0)
return u'%s #%s (%s)' % (self._cw.__('Updated %s' % entity.e_schema),
entity.eid, self.user_data['login'])
-
-
-from cubicweb.hooks.notification import RenderAndSendNotificationView
-from cubicweb.mail import parse_message_id
-
-NormalizedTextView = class_renamed('NormalizedTextView', ContentAddedView)
-RenderAndSendNotificationView = class_moved(RenderAndSendNotificationView)
-parse_message_id = deprecated('parse_message_id is now defined in cubicweb.mail')(parse_message_id)
-
--- a/test/unittest_rset.py Mon Apr 22 14:54:22 2013 +0200
+++ b/test/unittest_rset.py Tue Apr 23 18:15:10 2013 +0200
@@ -363,7 +363,7 @@
('CWGroup', 'users'))
for entity in rset.entities(): # test get_entity for each row actually
etype, n = expected[entity.cw_row]
- self.assertEqual(entity.__regid__, etype)
+ self.assertEqual(entity.cw_etype, etype)
attr = etype == 'Bookmark' and 'title' or 'name'
self.assertEqual(entity.cw_attr_cache[attr], n)
@@ -385,7 +385,7 @@
self.assertEqual(rtype, 'title')
self.assertEqual(entity.title, 'aaaa')
entity, rtype = rset.related_entity(1, 1)
- self.assertEqual(entity.__regid__, 'CWGroup')
+ self.assertEqual(entity.cw_etype, 'CWGroup')
self.assertEqual(rtype, 'name')
self.assertEqual(entity.name, 'guests')
--- a/web/box.py Mon Apr 22 14:54:22 2013 +0200
+++ b/web/box.py Tue Apr 23 18:15:10 2013 +0200
@@ -174,7 +174,7 @@
self._cw.add_js('cubicweb.ajax.js')
entity = self.cw_rset.get_entity(row, col)
title = display_name(self._cw, self.rtype, get_role(self),
- context=entity.__regid__)
+ context=entity.cw_etype)
box = SideBoxWidget(title, self.__regid__)
related = self.related_boxitems(entity)
unrelated = self.unrelated_boxitems(entity)
--- a/web/component.py Mon Apr 22 14:54:22 2013 +0200
+++ b/web/component.py Tue Apr 23 18:15:10 2013 +0200
@@ -462,7 +462,7 @@
eid = entity.eid
else:
eid = None
- form['etype'] = entity.__regid__
+ form['etype'] = entity.cw_etype
form['tempEid'] = entity.eid
args = [json_dumps(x) for x in (registry, oid, eid, params)]
return self._cw.ajax_replace_url(
@@ -546,7 +546,7 @@
for _, eid in field.vocabulary(form):
if eid not in skip:
entity = self._cw.entity_from_eid(eid)
- if filteretype is None or entity.__regid__ == filteretype:
+ if filteretype is None or entity.cw_etype == filteretype:
entities.append(entity)
return entities
@@ -562,7 +562,7 @@
def render_title(self, w):
w(display_name(self._cw, self.rtype, role(self),
- context=self.entity.__regid__))
+ context=self.entity.cw_etype))
def render_body(self, w):
self._cw.add_js('cubicweb.ajax.js')
@@ -614,7 +614,7 @@
def render_title(self, w):
w(self.rdef.rtype.display_name(self._cw, self.role,
- context=self.entity.__regid__))
+ context=self.entity.cw_etype))
def render_body(self, w):
req = self._cw
--- a/web/views/autoform.py Mon Apr 22 14:54:22 2013 +0200
+++ b/web/views/autoform.py Tue Apr 23 18:15:10 2013 +0200
@@ -272,7 +272,7 @@
**kwargs)
def form_title(self, entity, i18nctx):
- return self._cw.pgettext(i18nctx, entity.__regid__)
+ return self._cw.pgettext(i18nctx, entity.cw_etype)
def add_hiddens(self, form, entity):
"""to ease overriding (see cubes.vcsfile.views.forms for instance)"""
@@ -498,7 +498,7 @@
for rschema, role, related in field.relations_table(form):
# already linked entities
if related:
- label = rschema.display_name(req, role, context=form.edited_entity.__regid__)
+ label = rschema.display_name(req, role, context=form.edited_entity.cw_etype)
w(u'<tr><th class="labelCol">%s</th>' % label)
w(u'<td>')
w(u'<ul>')
@@ -608,11 +608,11 @@
pendingid = 'id' + pendingid
if int(eidfrom) == entity.eid: # subject
label = display_name(form._cw, rtype, 'subject',
- entity.__regid__)
+ entity.cw_etype)
reid = eidto
else:
label = display_name(form._cw, rtype, 'object',
- entity.__regid__)
+ entity.cw_etype)
reid = eidfrom
jscall = "javascript: cancelPendingInsert('%s', 'tr', null, %s);" \
% (pendingid, entity.eid)
@@ -852,7 +852,7 @@
for rschema, _, role in self._relations_by_section('relations',
strict=True):
result.append( (rschema.display_name(self.edited_entity._cw, role,
- self.edited_entity.__regid__),
+ self.edited_entity.cw_etype),
rschema, role) )
return sorted(result)
--- a/web/views/baseviews.py Mon Apr 22 14:54:22 2013 +0200
+++ b/web/views/baseviews.py Tue Apr 23 18:15:10 2013 +0200
@@ -593,7 +593,7 @@
year, month = key
label = u'%s %s [%s]' % (self._cw._(calendar.MONTHNAMES[int(month)-1]),
year, len(items))
- etypes = set(entity.__regid__ for entity in items)
+ etypes = set(entity.cw_etype for entity in items)
vtitle = '%s %s' % (', '.join(display_name(self._cw, etype, 'plural')
for etype in etypes),
label)
@@ -620,7 +620,7 @@
if key[0] is None:
return
label = u'%s [%s]' % (key[0], len(items))
- etypes = set(entity.__regid__ for entity in items)
+ etypes = set(entity.cw_etype for entity in items)
vtitle = self._cw._('%(etype)s by %(author)s') % {
'etype': ', '.join(display_name(self._cw, etype, 'plural')
for etype in etypes),
--- a/web/views/forms.py Mon Apr 22 14:54:22 2013 +0200
+++ b/web/views/forms.py Tue Apr 23 18:15:10 2013 +0200
@@ -358,7 +358,7 @@
'autoform_field', self._cw, entity=self.edited_entity)
self.uicfg_affk = self._cw.vreg['uicfg'].select(
'autoform_field_kwargs', self._cw, entity=self.edited_entity)
- self.add_hidden('__type', self.edited_entity.__regid__, eidparam=True)
+ self.add_hidden('__type', self.edited_entity.cw_etype, eidparam=True)
self.add_hidden('eid', self.edited_entity.eid)
# mainform default to true in parent, hence default to True
if kwargs.get('mainform', True) or kwargs.get('mainentity', False):
--- a/web/views/json.py Mon Apr 22 14:54:22 2013 +0200
+++ b/web/views/json.py Tue Apr 23 18:15:10 2013 +0200
@@ -114,7 +114,7 @@
entity.complete() # fetch all attributes
# hack to add extra metadata
entity.cw_attr_cache.update({
- '__cwetype__': entity.__regid__,
+ '__cwetype__': entity.cw_etype,
})
entities.append(entity)
self.wdata(entities)
--- a/web/views/primary.py Mon Apr 22 14:54:22 2013 +0200
+++ b/web/views/primary.py Tue Apr 23 18:15:10 2013 +0200
@@ -362,7 +362,7 @@
label = self._cw._(dispctrl['label'])
else:
label = display_name(self._cw, rschema.type, role,
- context=entity.__regid__)
+ context=entity.cw_etype)
return label
return u''
--- a/web/views/treeview.py Mon Apr 22 14:54:22 2013 +0200
+++ b/web/views/treeview.py Tue Apr 23 18:15:10 2013 +0200
@@ -68,7 +68,7 @@
self.close_item(entity)
def open_item(self, entity):
- self.w(u'<li class="%s">\n' % entity.__regid__.lower())
+ self.w(u'<li class="%s">\n' % entity.cw_etype.lower())
def close_item(self, entity):
self.w(u'</li>\n')
--- a/web/views/xmlrss.py Mon Apr 22 14:54:22 2013 +0200
+++ b/web/views/xmlrss.py Tue Apr 23 18:15:10 2013 +0200
@@ -78,7 +78,7 @@
entity.complete()
source = entity.cw_metainformation()['source']['uri']
self.w(u'<%s eid="%s" cwuri="%s" cwsource="%s">\n'
- % (entity.__regid__, entity.eid, xml_escape(entity.cwuri),
+ % (entity.cw_etype, entity.eid, xml_escape(entity.cwuri),
xml_escape(source)))
for rschema, attrschema in entity.e_schema.attribute_definitions():
attr = rschema.type