[schema / sources] store data sources as cubicweb entities
this implies several changes:
* new CWSource / CWSourceHostConfig entity types
* only the system sources and default admin login/password stored in sources file
(other stuff will be ignored)
* on startup, get sources definition from the database
* every entities have a cw_source relation
* a facet allow filtering
--- a/__pkginfo__.py Sat Oct 09 00:05:50 2010 +0200
+++ b/__pkginfo__.py Sat Oct 09 00:05:52 2010 +0200
@@ -22,7 +22,7 @@
modname = distname = "cubicweb"
-numversion = (3, 9, 8)
+numversion = (3, 10, 0)
version = '.'.join(str(num) for num in numversion)
description = "a repository of entities / relations for knowledge management"
--- a/dataimport.py Sat Oct 09 00:05:50 2010 +0200
+++ b/dataimport.py Sat Oct 09 00:05:52 2010 +0200
@@ -651,6 +651,11 @@
class MetaGenerator(object):
+ META_RELATIONS = (META_RTYPES
+ - VIRTUAL_RTYPES
+ - set(('eid', 'cwuri',
+ 'is', 'is_instance_of', 'cw_source')))
+
def __init__(self, session, baseurl=None):
self.session = session
self.source = session.repo.system_source
@@ -669,17 +674,11 @@
#self.entity_rels = [] XXX not handled (YAGNI?)
schema = session.vreg.schema
rschema = schema.rschema
- for rtype in META_RTYPES:
- if rtype in ('eid', 'cwuri') or rtype in VIRTUAL_RTYPES:
- continue
+ for rtype in self.META_RELATIONS:
if rschema(rtype).final:
self.etype_attrs.append(rtype)
else:
self.etype_rels.append(rtype)
- if not schema._eid_index:
- # test schema loaded from the fs
- self.gen_is = self.test_gen_is
- self.gen_is_instance_of = self.test_gen_is_instanceof
@cached
def base_etype_dicts(self, etype):
@@ -710,26 +709,7 @@
def gen_modification_date(self, entity):
return self.time
- def gen_is(self, entity):
- return entity.e_schema.eid
- def gen_is_instance_of(self, entity):
- eids = []
- for etype in entity.e_schema.ancestors() + [entity.e_schema]:
- eids.append(entity.e_schema.eid)
- return eids
-
def gen_created_by(self, entity):
return self.session.user.eid
def gen_owned_by(self, entity):
return self.session.user.eid
-
- # implementations of gen_is / gen_is_instance_of to use during test where
- # schema has been loaded from the fs (hence entity type schema eids are not
- # known)
- def test_gen_is(self, entity):
- return eschema_eid(self.session, entity.e_schema)
- def test_gen_is_instanceof(self, entity):
- eids = []
- for eschema in entity.e_schema.ancestors() + [entity.e_schema]:
- eids.append(eschema_eid(self.session, eschema))
- return eids
--- a/devtools/__init__.py Sat Oct 09 00:05:50 2010 +0200
+++ b/devtools/__init__.py Sat Oct 09 00:05:52 2010 +0200
@@ -158,6 +158,8 @@
sources = super(TestServerConfiguration, self).sources()
if not sources:
sources = DEFAULT_SOURCES
+ if 'admin' not in sources:
+ sources['admin'] = DEFAULT_SOURCES['admin']
return sources
--- a/devtools/fake.py Sat Oct 09 00:05:50 2010 +0200
+++ b/devtools/fake.py Sat Oct 09 00:05:52 2010 +0200
@@ -170,6 +170,7 @@
self.config = config or FakeConfig()
self.vreg = vreg or CubicWebVRegistry(self.config, initlog=False)
self.vreg.schema = schema
+ self.sources = []
def internal_session(self):
return FakeSession(self)
--- a/devtools/repotest.py Sat Oct 09 00:05:50 2010 +0200
+++ b/devtools/repotest.py Sat Oct 09 00:05:52 2010 +0200
@@ -284,8 +284,7 @@
self.repo.vreg.rqlhelper.backend = 'postgres' # so FTIRANK is considered
def add_source(self, sourcecls, uri):
- self.sources.append(sourcecls(self.repo, self.o.schema,
- {'uri': uri}))
+ self.sources.append(sourcecls(self.repo, {'uri': uri}))
self.repo.sources_by_uri[uri] = self.sources[-1]
setattr(self, uri, self.sources[-1])
self.newsources += 1
--- a/devtools/testlib.py Sat Oct 09 00:05:50 2010 +0200
+++ b/devtools/testlib.py Sat Oct 09 00:05:52 2010 +0200
@@ -294,6 +294,9 @@
def set_debug(self, debugmode):
server.set_debug(debugmode)
+ def debugged(self, debugmode):
+ return server.debugged(debugmode)
+
# default test setup and teardown #########################################
def setUp(self):
--- a/entities/schemaobjs.py Sat Oct 09 00:05:50 2010 +0200
+++ b/entities/schemaobjs.py Sat Oct 09 00:05:52 2010 +0200
@@ -15,12 +15,15 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
-"""schema definition related entities
+"""schema definition related entities"""
-"""
__docformat__ = "restructuredtext en"
+import re
+from socket import gethostname
+
from logilab.common.decorators import cached
+from logilab.common.textutils import text_to_dict
from yams.schema import role_name
@@ -30,6 +33,41 @@
from cubicweb.entities import AnyEntity, fetch_config
+
+class CWSource(AnyEntity):
+ __regid__ = 'CWSource'
+ fetch_attrs, fetch_order = fetch_config(['name', 'type'])
+
+ @property
+ def dictconfig(self):
+ return self.config and text_to_dict(self.config) or {}
+
+ @property
+ def host_config(self):
+ dictconfig = self.dictconfig
+ host = gethostname()
+ for hostcfg in self.host_configs:
+ if hostcfg.match(hostname):
+ dictconfig.update(hostcfg.dictconfig)
+ return dictconfig
+
+ @property
+ def host_configs(self):
+ return self.reverse_cw_host_config_of
+
+
+class CWSourceHostConfig(AnyEntity):
+ __regid__ = 'CWSourceHostConfig'
+ fetch_attrs, fetch_order = fetch_config(['match_host', 'config'])
+
+ @property
+ def dictconfig(self):
+ return self.config and text_to_dict(self.config) or {}
+
+ def match(self, hostname):
+ return re.match(self.match_host, hostname)
+
+
class CWEType(AnyEntity):
__regid__ = 'CWEType'
fetch_attrs, fetch_order = fetch_config(['name'])
--- a/hooks/metadata.py Sat Oct 09 00:05:50 2010 +0200
+++ b/hooks/metadata.py Sat Oct 09 00:05:52 2010 +0200
@@ -23,7 +23,6 @@
from cubicweb.selectors import is_instance
from cubicweb.server import hook
-from cubicweb.server.utils import eschema_eid
class MetaDataHook(hook.Hook):
@@ -77,30 +76,6 @@
session.add_relation(eid, 'created_by', session.user.eid)
-class SetIsHook(MetaDataHook):
- """create a new entity -> set is and is_instance_of relations
-
- those relations are inserted using sql so they are not hookable.
- """
- __regid__ = 'setis'
- events = ('after_add_entity',)
-
- def __call__(self):
- if hasattr(self.entity, '_cw_recreating'):
- return
- session = self._cw
- entity = self.entity
- try:
- session.system_sql('INSERT INTO is_relation(eid_from,eid_to) VALUES (%s,%s)'
- % (entity.eid, eschema_eid(session, entity.e_schema)))
- except IndexError:
- # during schema serialization, skip
- return
- for eschema in entity.e_schema.ancestors() + [entity.e_schema]:
- session.system_sql('INSERT INTO is_instance_of_relation(eid_from,eid_to) VALUES (%s,%s)'
- % (entity.eid, eschema_eid(session, eschema)))
-
-
class SetOwnershipHook(MetaDataHook):
"""create a new entity -> set owner and creator metadata"""
__regid__ = 'setowner'
--- a/hooks/syncschema.py Sat Oct 09 00:05:50 2010 +0200
+++ b/hooks/syncschema.py Sat Oct 09 00:05:52 2010 +0200
@@ -257,7 +257,12 @@
gmap = group_mapping(session)
cmap = ss.cstrtype_mapping(session)
for rtype in (META_RTYPES - VIRTUAL_RTYPES):
- rschema = schema[rtype]
+ try:
+ rschema = schema[rtype]
+ except:
+ if rtype == 'cw_source':
+ continue # XXX 3.10 migration
+ raise
sampletype = rschema.subjects()[0]
desttype = rschema.objects()[0]
rdef = copy(rschema.rdef(sampletype, desttype))
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/hooks/syncsources.py Sat Oct 09 00:05:52 2010 +0200
@@ -0,0 +1,33 @@
+from cubicweb import ValidationError
+from cubicweb.selectors import is_instance
+from cubicweb.server import hook
+
+class SourceHook(hook.Hook):
+ __abstract__ = True
+ category = 'cw.sources'
+
+
+class SourceAddedOp(hook.Operation):
+ def precommit_event(self):
+ self.session.repo.add_source(self.entity)
+
+class SourceAddedHook(SourceHook):
+ __regid__ = 'cw.sources.added'
+ __select__ = SourceHook.__select__ & is_instance('CWSource')
+ events = ('after_add_entity',)
+ def __call__(self):
+ SourceAddedOp(self._cw, entity=self.entity)
+
+
+class SourceRemovedOp(hook.Operation):
+ def precommit_event(self):
+ self.session.repo.remove_source(self.uri)
+
+class SourceRemovedHook(SourceHook):
+ __regid__ = 'cw.sources.removed'
+ __select__ = SourceHook.__select__ & is_instance('CWSource')
+ events = ('before_delete_entity',)
+ def __call__(self):
+ if self.entity.name == 'system':
+ raise ValidationError(self.entity.eid, {None: 'cant remove system source'})
+ SourceRemovedOp(self._cw, uri=self.entity.name)
--- a/misc/migration/3.10.0_Any.py Sat Oct 09 00:05:50 2010 +0200
+++ b/misc/migration/3.10.0_Any.py Sat Oct 09 00:05:52 2010 +0200
@@ -1,3 +1,33 @@
+from cubicweb.server.session import hooks_control
+
+for uri, cfg in config.sources().items():
+ if uri in ('system', 'admin'):
+ continue
+ repo.sources_by_uri[uri] = repo.get_source(cfg['adapter'], uri, cfg)
+
+add_entity_type('CWSource')
+add_relation_definition('CWSource', 'cw_source', 'CWSource')
+add_entity_type('CWSourceHostConfig')
+
+with hooks_control(session, session.HOOKS_ALLOW_ALL, 'cw.sources'):
+ create_entity('CWSource', type=u'native', name=u'system')
+commit()
+
+sql('INSERT INTO cw_source_relation(eid_from,eid_to) '
+ 'SELECT e.eid,s.cw_eid FROM entities as e, cw_CWSource as s '
+ 'WHERE s.cw_name=e.type')
+commit()
+
+for uri, cfg in config.sources().items():
+ if uri in ('system', 'admin'):
+ continue
+ repo.sources_by_uri.pop(uri)
+ config = u'\n'.join('%s=%s' % (key, value) for key, value in cfg.items()
+ if key != 'adapter')
+ create_entity('CWSource', name=unicode(uri), type=unicode(cfg['adapter']),
+ config=config)
+commit()
+
# rename cwprops for boxes/contentnavigation
for x in rql('Any X,XK WHERE X pkey XK, '
'X pkey ~= "boxes.%s" OR '
--- a/misc/migration/postcreate.py Sat Oct 09 00:05:50 2010 +0200
+++ b/misc/migration/postcreate.py Sat Oct 09 00:05:52 2010 +0200
@@ -15,9 +15,8 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
-"""cubicweb post creation script, set user's workflow
+"""cubicweb post creation script, set user's workflow"""
-"""
# insert versions
create_entity('CWProperty', pkey=u'system.version.cubicweb',
value=unicode(config.cubicweb_version()))
--- a/schema.py Sat Oct 09 00:05:50 2010 +0200
+++ b/schema.py Sat Oct 09 00:05:52 2010 +0200
@@ -49,7 +49,7 @@
# set of meta-relations available for every entity types
META_RTYPES = set((
'owned_by', 'created_by', 'is', 'is_instance_of', 'identity',
- 'eid', 'creation_date', 'modification_date', 'has_text', 'cwuri',
+ 'eid', 'creation_date', 'cw_source', 'modification_date', 'has_text', 'cwuri',
))
WORKFLOW_RTYPES = set(('custom_workflow', 'in_state', 'wf_info_for'))
WORKFLOW_DEF_RTYPES = set(('workflow_of', 'state_of', 'transition_of',
--- a/schemas/base.py Sat Oct 09 00:05:50 2010 +0200
+++ b/schemas/base.py Sat Oct 09 00:05:52 2010 +0200
@@ -20,8 +20,8 @@
__docformat__ = "restructuredtext en"
_ = unicode
-from yams.buildobjs import (EntityType, RelationType, SubjectRelation,
- String, Datetime, Password)
+from yams.buildobjs import (EntityType, RelationType, RelationDefinition,
+ SubjectRelation, String, Datetime, Password)
from cubicweb.schema import (
RQLConstraint, WorkflowableEntityType, ERQLExpression, RRQLExpression,
PUB_SYSTEM_ENTITY_PERMS, PUB_SYSTEM_REL_PERMS, PUB_SYSTEM_ATTR_PERMS)
@@ -62,7 +62,7 @@
}
alias = String(fulltextindexed=True, maxsize=56)
- address = String(required=True, fulltextindexed=True,
+ address = String(required=True, fulltextindexed=True,
indexed=True, unique=True, maxsize=128)
prefered_form = SubjectRelation('EmailAddress', cardinality='?*',
description=_('when multiple addresses are equivalent \
@@ -198,6 +198,7 @@
uri = String(required=True, unique=True, maxsize=256,
description=_('the URI of the object'))
+
class same_as(RelationType):
"""generic relation to specify that an external entity represent the same
object as a local one:
@@ -216,6 +217,7 @@
# in the cube's schema.
object = 'ExternalUri'
+
class CWCache(EntityType):
"""a simple cache entity characterized by a name and
a validity date.
@@ -234,12 +236,74 @@
'delete': ('managers',),
}
- name = String(required=True, unique=True, indexed=True, maxsize=128,
+ name = String(required=True, unique=True, maxsize=128,
description=_('name of the cache'))
timestamp = Datetime(default='NOW')
-# "abtract" relation types, not used in cubicweb itself
+class CWSource(EntityType):
+ name = String(required=True, unique=True, maxsize=128,
+ description=_('name of the source'))
+ type = String(required=True, maxsize=20, description=_('type of the source'))
+ config = String(description=_('source\'s configuration. One key=value per '
+ 'line, authorized keys depending on the '
+ 'source\'s type'),
+ __permissions__={
+ 'read': ('managers',),
+ 'update': ('managers',),
+ })
+
+
+class CWSourceHostConfig(EntityType):
+ __permissions__ = {
+ 'read': ('managers',),
+ 'add': ('managers',),
+ 'update': ('managers',),
+ 'delete': ('managers',),
+ }
+ match_host = String(required=True, unique=True, maxsize=128,
+ description=_('regexp matching host(s) to which this config applies'))
+ config = String(required=True,
+ description=_('Source\'s configuration for a particular host. '
+ 'One key=value per line, authorized keys '
+ 'depending on the source\'s type, overriding '
+ 'values defined on the source.'),
+ __permissions__={
+ 'read': ('managers',),
+ 'update': ('managers',),
+ })
+
+
+class cw_host_config_of(RelationDefinition):
+ subject = 'CWSourceHostConfig'
+ object = 'CWSource'
+ cardinality = '1*'
+ composite = 'object'
+ inlined = True
+
+class cw_source(RelationDefinition):
+ __permissions__ = {
+ 'read': ('managers', 'users', 'guests'),
+ 'add': (),
+ 'delete': (),
+ }
+ subject = '*'
+ object = 'CWSource'
+ cardinality = '1*'
+
+class cw_support(RelationDefinition):
+ subject = 'CWSource'
+ object = ('CWEType', 'CWRType')
+
+class cw_dont_cross(RelationDefinition):
+ subject = 'CWSource'
+ object = 'CWRType'
+
+class cw_may_cross(RelationDefinition):
+ subject = 'CWSource'
+ object = 'CWRType'
+
+# "abtract" relation types, no definition in cubicweb itself ###################
class identical_to(RelationType):
"""identical to"""
--- a/server/__init__.py Sat Oct 09 00:05:50 2010 +0200
+++ b/server/__init__.py Sat Oct 09 00:05:52 2010 +0200
@@ -19,8 +19,8 @@
(repository) side
This module contains functions to initialize a new repository.
+"""
-"""
from __future__ import with_statement
__docformat__ = "restructuredtext en"
@@ -61,7 +61,6 @@
else:
DEBUG |= debugmode
-
class debugged(object):
"""repository debugging context manager / decorator
@@ -132,7 +131,6 @@
config.consider_user_state = False
config.set_language = False
# only enable the system source at initialization time
- config.enabled_sources = ('system',)
repo = Repository(config, vreg=vreg)
schema = repo.schema
sourcescfg = config.sources()
@@ -162,6 +160,12 @@
sqlcnx.commit()
sqlcnx.close()
session = repo.internal_session()
+ # insert entity representing the system source
+ ssource = session.create_entity('CWSource', type=u'native', name=u'system')
+ repo.system_source.eid = ssource.eid
+ session.execute('SET X cw_source X WHERE X eid %(x)s', {'x': ssource.eid})
+ # insert base groups and default admin
+ print '-> inserting default user and default groups.'
try:
login = unicode(sourcescfg['admin']['login'])
pwd = sourcescfg['admin']['password']
@@ -171,17 +175,18 @@
login, pwd = manager_userpasswd(msg=msg, confirm=True)
else:
login, pwd = unicode(source['db-user']), source['db-password']
- print '-> inserting default user and default groups.'
# sort for eid predicatability as expected in some server tests
for group in sorted(BASE_GROUPS):
- session.execute('INSERT CWGroup X: X name %(name)s',
- {'name': unicode(group)})
- create_user(session, login, pwd, 'managers')
+ session.create_entity('CWGroup', name=unicode(group))
+ admin = create_user(session, login, pwd, 'managers')
+ session.execute('SET X owned_by U WHERE X is IN (CWGroup,CWSource), U eid %(u)s',
+ {'u': admin.eid})
session.commit()
repo.shutdown()
# reloging using the admin user
config._cubes = None # avoid assertion error
repo, cnx = in_memory_cnx(config, login, password=pwd)
+ repo.system_source.eid = ssource.eid # redo this manually
# trigger vreg initialisation of entity classes
config.cubicweb_appobject_path = set(('entities',))
config.cube_appobject_path = set(('entities',))
@@ -197,13 +202,7 @@
initialize_schema(config, schema, handler)
# yoo !
cnx.commit()
- config.enabled_sources = None
- for uri, source_config in config.sources().items():
- if uri in ('admin', 'system'):
- # not an actual source or init_creating already called
- continue
- source = repo.get_source(uri, source_config)
- source.init_creating()
+ repo.system_source.init_creating()
cnx.commit()
cnx.close()
session.close()
--- a/server/migractions.py Sat Oct 09 00:05:50 2010 +0200
+++ b/server/migractions.py Sat Oct 09 00:05:52 2010 +0200
@@ -63,7 +63,7 @@
from cubicweb.server import hook
try:
from cubicweb.server import SOURCE_TYPES, schemaserial as ss
- from cubicweb.server.utils import manager_userpasswd, ask_source_config
+ from cubicweb.server.utils import manager_userpasswd
from cubicweb.server.sqlutils import sqlexec, SQL_PREFIX
except ImportError: # LAX
pass
@@ -642,13 +642,6 @@
for cube in newcubes:
self.cmd_set_property('system.version.'+cube,
self.config.cube_version(cube))
- if cube in SOURCE_TYPES:
- # don't use config.sources() in case some sources have been
- # disabled for migration
- sourcescfg = self.config.read_sources_file()
- sourcescfg[cube] = ask_source_config(cube)
- self.config.write_sources_file(sourcescfg)
- clear_cache(self.config, 'read_sources_file')
# ensure added cube is in config cubes
# XXX worth restoring on error?
if not cube in self.config._cubes:
--- a/server/msplanner.py Sat Oct 09 00:05:50 2010 +0200
+++ b/server/msplanner.py Sat Oct 09 00:05:52 2010 +0200
@@ -84,9 +84,8 @@
1. return the result of Any X WHERE X owned_by Y from system source, that's
enough (optimization of the sql querier will avoid join on CWUser, so we
will directly get local eids)
-
+"""
-"""
__docformat__ = "restructuredtext en"
from itertools import imap, ifilterfalse
@@ -94,6 +93,7 @@
from logilab.common.compat import any
from logilab.common.decorators import cached
+from rql import BadRQLQuery
from rql.stmts import Union, Select
from rql.nodes import (VariableRef, Comparison, Relation, Constant, Variable,
Not, Exists, SortTerm, Function)
@@ -434,11 +434,14 @@
# add source for relations
rschema = self._schema.rschema
termssources = {}
+ sourcerels = []
for rel in self.rqlst.iget_nodes(Relation):
# process non final relations only
# note: don't try to get schema for 'is' relation (not available
# during bootstrap)
- if not (rel.is_types_restriction() or rschema(rel.r_type).final):
+ if rel.r_type == 'cw_source':
+ sourcerels.append(rel)
+ elif not (rel.is_types_restriction() or rschema(rel.r_type).final):
# nothing to do if relation is not supported by multiple sources
# or if some source has it listed in its cross_relations
# attribute
@@ -469,6 +472,64 @@
self._handle_cross_relation(rel, relsources, termssources)
self._linkedterms.setdefault(lhsv, set()).add((rhsv, rel))
self._linkedterms.setdefault(rhsv, set()).add((lhsv, rel))
+ # extract information from cw_source relation
+ for srel in sourcerels:
+ vref = srel.children[1].children[0]
+ sourceeids, sourcenames = [], []
+ if isinstance(vref, Constant):
+ # simplified variable
+ sourceeids = None, (vref.eval(self.plan.args),)
+ else:
+ var = vref.variable
+ for rel in var.stinfo['relations'] - var.stinfo['rhsrelations']:
+ if rel.r_type in ('eid', 'name'):
+ if rel.r_type == 'eid':
+ slist = sourceeids
+ else:
+ slist = sourcenames
+ sources = [cst.eval(self.plan.args)
+ for cst in rel.children[1].get_nodes(Constant)]
+ if sources:
+ if slist:
+ # don't attempt to do anything
+ sourcenames = sourceeids = None
+ break
+ slist[:] = (rel, sources)
+ if sourceeids:
+ rel, values = sourceeids
+ sourcesdict = self._repo.sources_by_eid
+ elif sourcenames:
+ rel, values = sourcenames
+ sourcesdict = self._repo.sources_by_uri
+ else:
+ sourcesdict = None
+ if sourcesdict is not None:
+ lhs = srel.children[0]
+ try:
+ sources = [sourcesdict[key] for key in values]
+ except KeyError:
+ raise BadRQLQuery('source conflict for term %s' % lhs.as_string())
+ if isinstance(lhs, Constant):
+ source = self._session.source_from_eid(lhs.eval(self.plan.args))
+ if not source in sources:
+ raise BadRQLQuery('source conflict for term %s' % lhs.as_string())
+ else:
+ lhs = getattr(lhs, 'variable', lhs)
+ # XXX NOT NOT
+ neged = srel.neged(traverse_scope=True) or (rel and rel.neged(strict=True))
+ if neged:
+ for source in sources:
+ self._remove_source_term(source, lhs, check=True)
+ else:
+ for source, terms in sourcesterms.items():
+ if lhs in terms and not source in sources:
+ self._remove_source_term(source, lhs, check=True)
+ if rel is None:
+ self._remove_source_term(self.system_source, vref)
+ srel.parent.remove(srel)
+ elif len(var.stinfo['relations']) == 2 and not var.stinfo['selected']:
+ self._remove_source_term(self.system_source, var)
+ self.rqlst.undefine_variable(var)
return termssources
def _handle_cross_relation(self, rel, relsources, termssources):
@@ -713,9 +774,18 @@
assert isinstance(term, (rqlb.BaseNode, Variable)), repr(term)
continue # may occur with subquery column alias
if not sourcesterms[source][term]:
- del sourcesterms[source][term]
- if not sourcesterms[source]:
- del sourcesterms[source]
+ self._remove_source_term(source, term)
+
+ def _remove_source_term(self, source, term, check=False):
+ poped = self._sourcesterms[source].pop(term, None)
+ if not self._sourcesterms[source]:
+ del self._sourcesterms[source]
+ if poped is not None and check:
+ for terms in self._sourcesterms.itervalues():
+ if term in terms:
+ break
+ else:
+ raise BadRQLQuery('source conflict for term %s' % term.as_string())
def crossed_relation(self, source, relation):
return relation in self._crossrelations.get(source, ())
--- a/server/pool.py Sat Oct 09 00:05:50 2010 +0200
+++ b/server/pool.py Sat Oct 09 00:05:52 2010 +0200
@@ -34,7 +34,7 @@
# dictionnary of (source, connection), indexed by sources'uri
self.source_cnxs = {}
for source in sources:
- self.source_cnxs[source.uri] = (source, source.get_connection())
+ self.add_source(source)
if not 'system' in self.source_cnxs:
self.source_cnxs['system'] = self.source_cnxs[sources[0].uri]
self._cursors = {}
@@ -50,6 +50,15 @@
self._cursors[uri] = cursor
return cursor
+ def add_source(self, source):
+ assert not source.uri in self.source_cnxs
+ self.source_cnxs[source.uri] = (source, source.get_connection())
+
+ def remove_source(self, source):
+ source, cnx = self.source_cnxs.pop(source.uri)
+ cnx.close()
+ self._cursors.pop(source.uri, None)
+
def commit(self):
"""commit the current transaction for this user"""
# FIXME: what happends if a commit fail
--- a/server/querier.py Sat Oct 09 00:05:50 2010 +0200
+++ b/server/querier.py Sat Oct 09 00:05:52 2010 +0200
@@ -602,9 +602,7 @@
self._parse = rqlhelper.parse
self._annotate = rqlhelper.annotate
# rql planner
- # note: don't use repo.sources, may not be built yet, and also "admin"
- # isn't an actual source
- if len([uri for uri in repo.config.sources() if uri != 'admin']) < 2:
+ if len(repo.sources) < 2:
from cubicweb.server.ssplanner import SSPlanner
self._planner = SSPlanner(schema, rqlhelper)
else:
@@ -613,6 +611,14 @@
# sql generation annotator
self.sqlgen_annotate = SQLGenAnnotator(schema).annotate
+ def set_planner(self):
+ if len(self._repo.sources) < 2:
+ from cubicweb.server.ssplanner import SSPlanner
+ self._planner = SSPlanner(self.schema, self._repo.vreg.rqlhelper)
+ else:
+ from cubicweb.server.msplanner import MSPlanner
+ self._planner = MSPlanner(self.schema, self._repo.vreg.rqlhelper)
+
def parse(self, rql, annotate=False):
"""return a rql syntax tree for the given rql"""
try:
--- a/server/repository.py Sat Oct 09 00:05:50 2010 +0200
+++ b/server/repository.py Sat Oct 09 00:05:52 2010 +0200
@@ -39,7 +39,7 @@
from datetime import datetime
from time import time, localtime, strftime
-from logilab.common.decorators import cached
+from logilab.common.decorators import cached, clear_cache
from logilab.common.compat import any
from logilab.common import flatten
@@ -122,27 +122,15 @@
# initial schema, should be build or replaced latter
self.schema = schema.CubicWebSchema(config.appid)
self.vreg.schema = self.schema # until actual schema is loaded...
- # querier helper, need to be created after sources initialization
- self.querier = querier.QuerierHelper(self, self.schema)
- # sources
- self.sources = []
- self.sources_by_uri = {}
# shutdown flag
self.shutting_down = False
- # FIXME: store additional sources info in the system database ?
- # FIXME: sources should be ordered (add_entity priority)
- for uri, source_config in config.sources().items():
- if uri == 'admin':
- # not an actual source
- continue
- source = self.get_source(uri, source_config)
- self.sources_by_uri[uri] = source
- if config.source_enabled(uri):
- self.sources.append(source)
- self.system_source = self.sources_by_uri['system']
- # ensure system source is the first one
- self.sources.remove(self.system_source)
- self.sources.insert(0, self.system_source)
+ # sources (additional sources info in the system database)
+ self.system_source = self.get_source('native', 'system',
+ config.sources()['system'])
+ self.sources = [self.system_source]
+ self.sources_by_uri = {'system': self.system_source}
+ # querier helper, need to be created after sources initialization
+ self.querier = querier.QuerierHelper(self, self.schema)
# cache eid -> type / source
self._type_source_cache = {}
# cache (extid, source uri) -> eid
@@ -194,6 +182,7 @@
config.bootstrap_cubes()
self.set_schema(config.load_schema())
if not config.creating:
+ self.init_sources_from_database()
if 'CWProperty' in self.schema:
self.vreg.init_properties(self.properties())
# call source's init method to complete their initialisation if
@@ -210,7 +199,7 @@
# close initialization pool and reopen fresh ones for proper
# initialization now that we know cubes
self._get_pool().close(True)
- # list of available pools (we can't iterated on Queue instance)
+ # list of available pools (we can't iterate on Queue instance)
self.pools = []
for i in xrange(config['connections-pool-size']):
self.pools.append(pool.ConnectionsPool(self.sources))
@@ -221,9 +210,60 @@
# internals ###############################################################
- def get_source(self, uri, source_config):
+ def init_sources_from_database(self):
+ self.sources_by_eid = {}
+ if not 'CWSource' in self.schema:
+ # 3.10 migration
+ return
+ session = self.internal_session()
+ try:
+ # FIXME: sources should be ordered (add_entity priority)
+ for sourceent in session.execute(
+ 'Any S, SN, SA, SC WHERE S is CWSource, '
+ 'S name SN, S type SA, S config SC').entities():
+ if sourceent.name == 'system':
+ self.system_source.eid = sourceent.eid
+ self.sources_by_eid[sourceent.eid] = self.system_source
+ continue
+ self.add_source(sourceent, add_to_pools=False)
+ finally:
+ session.close()
+
+ def _clear_planning_caches(self):
+ for cache in ('source_defs', 'is_multi_sources_relation',
+ 'can_cross_relation', 'rel_type_sources'):
+ clear_cache(self, cache)
+
+ def add_source(self, sourceent, add_to_pools=True):
+ source = self.get_source(sourceent.type, sourceent.name,
+ sourceent.host_config)
+ source.eid = sourceent.eid
+ self.sources_by_eid[sourceent.eid] = source
+ self.sources_by_uri[sourceent.name] = source
+ if self.config.source_enabled(source):
+ self.sources.append(source)
+ self.querier.set_planner()
+ if add_to_pools:
+ for pool in self.pools:
+ pool.add_source(source)
+ self._clear_planning_caches()
+
+ def remove_source(self, uri):
+ source = self.sources_by_uri.pop(uri)
+ del self.sources_by_eid[source.eid]
+ if self.config.source_enabled(source):
+ self.sources.remove(source)
+ self.querier.set_planner()
+ for pool in self.pools:
+ pool.remove_source(source)
+ self._clear_planning_caches()
+
+ def get_source(self, type, uri, source_config):
+ # set uri and type in source config so it's available through
+ # source_defs()
source_config['uri'] = uri
- return sources.get_source(source_config, self.schema, self)
+ source_config['type'] = type
+ return sources.get_source(type, source_config, self)
def set_schema(self, schema, resetvreg=True, rebuildinfered=True):
if rebuildinfered:
@@ -525,14 +565,10 @@
This is a public method, not requiring a session id.
"""
- sources = self.config.sources().copy()
- # remove manager information
- sources.pop('admin', None)
+ sources = {}
# remove sensitive information
- for uri, sourcedef in sources.iteritems():
- sourcedef = sourcedef.copy()
- self.sources_by_uri[uri].remove_sensitive_information(sourcedef)
- sources[uri] = sourcedef
+ for uri, source in self.sources_by_uri.iteritems():
+ sources[uri] = source.cfg
return sources
def properties(self):
@@ -1016,10 +1052,6 @@
source.after_entity_insertion(session, extid, entity)
if source.should_call_hooks:
self.hm.call_hooks('after_add_entity', session, entity=entity)
- else:
- # minimal meta-data
- session.execute('SET X is E WHERE X eid %(x)s, E name %(name)s',
- {'x': entity.eid, 'name': entity.__regid__})
session.commit(reset_pool)
return eid
except:
--- a/server/serverconfig.py Sat Oct 09 00:05:50 2010 +0200
+++ b/server/serverconfig.py Sat Oct 09 00:05:52 2010 +0200
@@ -19,10 +19,11 @@
__docformat__ = "restructuredtext en"
+import sys
from os.path import join, exists
+from StringIO import StringIO
-from logilab.common.configuration import REQUIRED, Method, Configuration, \
- ini_format_section
+import logilab.common.configuration as lgconfig
from logilab.common.decorators import wproperty, cached
from cubicweb.toolsutils import read_config, restrict_perms_to_user
@@ -38,13 +39,13 @@
'level': 0,
}),
('password', {'type' : 'password',
- 'default': REQUIRED,
+ 'default': lgconfig.REQUIRED,
'help': "cubicweb manager account's password",
'level': 0,
}),
)
-class SourceConfiguration(Configuration):
+class SourceConfiguration(lgconfig.Configuration):
def __init__(self, appconfig, options):
self.appconfig = appconfig # has to be done before super call
super(SourceConfiguration, self).__init__(options=options)
@@ -54,54 +55,36 @@
return self.appconfig.appid
def input_option(self, option, optdict, inputlevel):
- if self['db-driver'] == 'sqlite':
- if option in ('db-user', 'db-password'):
- return
- if option == 'db-name':
- optdict = optdict.copy()
- optdict['help'] = 'path to the sqlite database'
- optdict['default'] = join(self.appconfig.appdatahome,
- self.appconfig.appid + '.sqlite')
+ try:
+ dbdriver = self['db-driver']
+ except lgconfig.OptionError:
+ pass
+ else:
+ if dbdriver == 'sqlite':
+ if option in ('db-user', 'db-password'):
+ return
+ if option == 'db-name':
+ optdict = optdict.copy()
+ optdict['help'] = 'path to the sqlite database'
+ optdict['default'] = join(self.appconfig.appdatahome,
+ self.appconfig.appid + '.sqlite')
super(SourceConfiguration, self).input_option(option, optdict, inputlevel)
-def generate_sources_file(appconfig, sourcesfile, sourcescfg, keys=None):
- """serialize repository'sources configuration into a INI like file
+
+def ask_source_config(appconfig, type, inputlevel=0):
+ options = SOURCE_TYPES[type].options
+ sconfig = SourceConfiguration(appconfig, options=options)
+ sconfig.input_config(inputlevel=inputlevel)
+ return sconfig
- the `keys` parameter may be used to sort sections
- """
- if keys is None:
- keys = sourcescfg.keys()
- else:
- for key in sourcescfg:
- if not key in keys:
- keys.append(key)
- stream = open(sourcesfile, 'w')
- for uri in keys:
- sconfig = sourcescfg[uri]
- if isinstance(sconfig, dict):
- # get a Configuration object
- if uri == 'admin':
- options = USER_OPTIONS
- else:
- options = SOURCE_TYPES[sconfig['adapter']].options
- _sconfig = SourceConfiguration(appconfig, options=options)
- for attr, val in sconfig.items():
- if attr == 'uri':
- continue
- if attr == 'adapter':
- _sconfig.adapter = val
- else:
- _sconfig.set_option(attr, val)
- sconfig = _sconfig
- optsbysect = list(sconfig.options_by_section())
- assert len(optsbysect) == 1, 'all options for a source should be in the same group'
- ini_format_section(stream, uri, optsbysect[0][1])
- if hasattr(sconfig, 'adapter'):
- print >> stream
- print >> stream, '# adapter for this source (YOU SHOULD NOT CHANGE THIS)'
- print >> stream, 'adapter=%s' % sconfig.adapter
- print >> stream
+def generate_source_config(sconfig):
+ """serialize a repository source configuration as text"""
+ stream = StringIO()
+ optsbysect = list(sconfig.options_by_section())
+ assert len(optsbysect) == 1, 'all options for a source should be in the same group'
+ lgconfig.ini_format(stream, optsbysect[0][1], sys.stdin.encoding)
+ return stream.getvalue()
class ServerConfiguration(CubicWebConfiguration):
@@ -121,7 +104,7 @@
}),
('pid-file',
{'type' : 'string',
- 'default': Method('default_pid_file'),
+ 'default': lgconfig.Method('default_pid_file'),
'help': 'repository\'s pid file',
'group': 'main', 'level': 2,
}),
@@ -282,16 +265,43 @@
"""
return self.read_sources_file()
- def source_enabled(self, uri):
- return not self.enabled_sources or uri in self.enabled_sources
+ def source_enabled(self, source):
+ if self.sources_mode is not None:
+ if 'migration' in self.sources_mode:
+ assert len(self.sources_mode) == 1
+ if source.connect_for_migration:
+ return True
+ print 'not connecting to source', uri, 'during migration'
+ return False
+ if 'all' in self.sources_mode:
+ assert len(self.sources_mode) == 1
+ return True
+ return source.uri in self.sources_mode
+ if self.quick_start:
+ return False
+ return (not source.disabled and (
+ not self.enabled_sources or source.uri in self.enabled_sources))
def write_sources_file(self, sourcescfg):
+ """serialize repository'sources configuration into a INI like file"""
sourcesfile = self.sources_file()
if exists(sourcesfile):
import shutil
shutil.copy(sourcesfile, sourcesfile + '.bak')
- generate_sources_file(self, sourcesfile, sourcescfg,
- ['admin', 'system'])
+ stream = open(sourcesfile, 'w')
+ for section in ('admin', 'system'):
+ sconfig = sourcescfg[section]
+ if isinstance(sconfig, dict):
+ # get a Configuration object
+ assert section == 'system'
+ _sconfig = SourceConfiguration(
+ self, options=SOURCE_TYPES['native'].options)
+ for attr, val in sconfig.items():
+ _sconfig.set_option(attr, val)
+ sconfig = _sconfig
+ print >> stream, '[%s]' % section
+ print >> stream, generate_source_config(sconfig)
+ print >> stream
restrict_perms_to_user(sourcesfile)
def pyro_enabled(self):
@@ -318,27 +328,9 @@
schema.name = 'bootstrap'
return schema
+ sources_mode = None
def set_sources_mode(self, sources):
- if 'migration' in sources:
- from cubicweb.server.sources import source_adapter
- assert len(sources) == 1
- enabled_sources = []
- for uri, config in self.sources().iteritems():
- if uri == 'admin':
- continue
- if source_adapter(config).connect_for_migration:
- enabled_sources.append(uri)
- else:
- print 'not connecting to source', uri, 'during migration'
- elif 'all' in sources:
- assert len(sources) == 1
- enabled_sources = None
- else:
- known_sources = self.sources()
- for uri in sources:
- assert uri in known_sources, uri
- enabled_sources = sources
- self.enabled_sources = enabled_sources
+ self.sources_mode = sources
def migration_handler(self, schema=None, interactive=True,
cnx=None, repo=None, connect=True, verbosity=None):
--- a/server/serverctl.py Sat Oct 09 00:05:50 2010 +0200
+++ b/server/serverctl.py Sat Oct 09 00:05:52 2010 +0200
@@ -32,8 +32,9 @@
from cubicweb.toolsutils import Command, CommandHandler, underline_title
from cubicweb.cwctl import CWCTL
from cubicweb.server import SOURCE_TYPES
-from cubicweb.server.serverconfig import (USER_OPTIONS, ServerConfiguration,
- SourceConfiguration)
+from cubicweb.server.serverconfig import (
+ USER_OPTIONS, ServerConfiguration, SourceConfiguration,
+ ask_source_config, generate_source_config)
# utility functions ###########################################################
@@ -161,7 +162,6 @@
"""create an instance by copying files from the given cube and by asking
information necessary to build required configuration files
"""
- from cubicweb.server.utils import ask_source_config
config = self.config
print underline_title('Configuring the repository')
config.input_config('email', inputlevel)
@@ -176,37 +176,9 @@
# defs (in native.py)
sconfig = SourceConfiguration(config,
options=SOURCE_TYPES['native'].options)
- sconfig.adapter = 'native'
sconfig.input_config(inputlevel=inputlevel)
sourcescfg = {'system': sconfig}
- for cube in cubes:
- # if a source is named as the cube containing it, we need the
- # source to use the cube, so add it.
- if cube in SOURCE_TYPES:
- sourcescfg[cube] = ask_source_config(cube, inputlevel)
print
- while ASK.confirm('Enter another source ?', default_is_yes=False):
- available = sorted(stype for stype in SOURCE_TYPES
- if not stype in cubes)
- while True:
- sourcetype = raw_input('source type (%s): ' % ', '.join(available))
- if sourcetype in available:
- break
- print '-> unknown source type, use one of the available types.'
- while True:
- sourceuri = raw_input('source identifier (a unique name used to tell sources apart): ').strip()
- if sourceuri != 'admin' and sourceuri not in sourcescfg:
- break
- print '-> uri already used, choose another one.'
- sourcescfg[sourceuri] = ask_source_config(sourcetype, inputlevel)
- sourcemodule = SOURCE_TYPES[sourcetype].module
- if not sourcemodule.startswith('cubicweb.'):
- # module names look like cubes.mycube.themodule
- sourcecube = SOURCE_TYPES[sourcetype].module.split('.', 2)[1]
- # if the source adapter is coming from an external component,
- # ensure it's specified in used cubes
- if not sourcecube in cubes:
- cubes.append(sourcecube)
sconfig = Configuration(options=USER_OPTIONS)
sconfig.input_config(inputlevel=inputlevel)
sourcescfg['admin'] = sconfig
@@ -294,7 +266,7 @@
You will be prompted for a login / password to use to connect to
the system database. The given user should have almost all rights
- on the database (ie a super user on the dbms allowed to create
+ on the database (ie a super user on the DBMS allowed to create
database, users, languages...).
<instance>
@@ -383,9 +355,8 @@
class InitInstanceCommand(Command):
"""Initialize the system database of an instance (run after 'db-create').
- You will be prompted for a login / password to use to connect to
- the system database. The given user should have the create tables,
- and grant permissions.
+ Notice this will be done using user specified in the sources files, so this
+ user should have the create tables grant permissions on the database.
<instance>
the identifier of the instance to initialize.
@@ -422,6 +393,63 @@
'the %s file. Resolve this first (error: %s).'
% (config.sources_file(), str(ex).strip()))
init_repository(config, drop=self.config.drop)
+ while ASK.confirm('Enter another source ?', default_is_yes=False):
+ CWCTL.run(['add-source', config.appid])
+
+
+class AddSourceCommand(Command):
+ """Add a data source to an instance.
+
+ <instance>
+ the identifier of the instance to initialize.
+ """
+ name = 'add-source'
+ arguments = '<instance>'
+ min_args = max_args = 1
+ options = ()
+
+ def run(self, args):
+ appid = args[0]
+ config = ServerConfiguration.config_for(appid)
+ config.quick_start = True
+ repo, cnx = repo_cnx(config)
+ req = cnx.request()
+ used = set(n for n, in req.execute('Any SN WHERE S is CWSource, S name SN'))
+ cubes = repo.get_cubes()
+ while True:
+ type = raw_input('source type (%s): '
+ % ', '.join(sorted(SOURCE_TYPES)))
+ if type not in SOURCE_TYPES:
+ print '-> unknown source type, use one of the available types.'
+ continue
+ sourcemodule = SOURCE_TYPES[type].module
+ if not sourcemodule.startswith('cubicweb.'):
+ # module names look like cubes.mycube.themodule
+ sourcecube = SOURCE_TYPES[type].module.split('.', 2)[1]
+ # if the source adapter is coming from an external component,
+ # ensure it's specified in used cubes
+ if not sourcecube in cubes:
+ print ('-> this source type require the %s cube which is '
+ 'not used by the instance.')
+ continue
+ break
+ while True:
+ sourceuri = raw_input('source identifier (a unique name used to '
+ 'tell sources apart): ').strip()
+ if not sourceuri:
+ print '-> mandatory.'
+ else:
+ sourceuri = unicode(sourceuri, sys.stdin.encoding)
+ if sourceuri in used:
+ print '-> uri already used, choose another one.'
+ else:
+ break
+ # XXX configurable inputlevel
+ sconfig = ask_source_config(config, type, inputlevel=0)
+ cfgstr = unicode(generate_source_config(sconfig), sys.stdin.encoding)
+ req.create_entity('CWSource', name=sourceuri,
+ type=unicode(type), config=cfgstr)
+ cnx.commit()
class GrantUserOnInstanceCommand(Command):
@@ -900,7 +928,7 @@
GrantUserOnInstanceCommand, ResetAdminPasswordCommand,
StartRepositoryCommand,
DBDumpCommand, DBRestoreCommand, DBCopyCommand,
- CheckRepositoryCommand, RebuildFTICommand,
+ AddSourceCommand, CheckRepositoryCommand, RebuildFTICommand,
SynchronizeInstanceSchemaCommand,
CheckMappingCommand,
):
--- a/server/session.py Sat Oct 09 00:05:50 2010 +0200
+++ b/server/session.py Sat Oct 09 00:05:52 2010 +0200
@@ -46,6 +46,7 @@
# anyway in the later case
NO_UNDO_TYPES.add('is')
NO_UNDO_TYPES.add('is_instance_of')
+NO_UNDO_TYPES.add('cw_source')
# XXX rememberme,forgotpwd,apycot,vcsfile
def _make_description(selected, args, solution):
--- a/server/sources/__init__.py Sat Oct 09 00:05:50 2010 +0200
+++ b/server/sources/__init__.py Sat Oct 09 00:05:52 2010 +0200
@@ -99,13 +99,18 @@
dont_cross_relations = ()
cross_relations = ()
+ # force deactivation (configuration error for instance)
+ disabled = False
- def __init__(self, repo, appschema, source_config, *args, **kwargs):
+ def __init__(self, repo, source_config, *args, **kwargs):
self.repo = repo
self.uri = source_config['uri']
set_log_methods(self, getLogger('cubicweb.sources.'+self.uri))
- self.set_schema(appschema)
+ self.set_schema(repo.schema)
self.support_relations['identity'] = False
+ self.eid = None
+ self.cfg = source_config.copy()
+ self.remove_sensitive_information(self.cfg)
def init_creating(self):
"""method called by the repository once ready to create a new instance"""
@@ -219,7 +224,7 @@
def extid2eid(self, value, etype, session=None, **kwargs):
return self.repo.extid2eid(self, value, etype, session, **kwargs)
- PUBLIC_KEYS = ('adapter', 'uri')
+ PUBLIC_KEYS = ('type', 'uri')
def remove_sensitive_information(self, sourcedef):
"""remove sensitive information such as login / password from source
definition
@@ -508,17 +513,17 @@
def cursor(self):
return None # no actual cursor support
+
from cubicweb.server import SOURCE_TYPES
-def source_adapter(source_config):
- adapter_type = source_config['adapter'].lower()
+def source_adapter(source_type):
try:
- return SOURCE_TYPES[adapter_type]
+ return SOURCE_TYPES[source_type]
except KeyError:
- raise RuntimeError('Unknown adapter %r' % adapter_type)
+ raise RuntimeError('Unknown source type %r' % source_type)
-def get_source(source_config, global_schema, repo):
+def get_source(type, source_config, repo):
"""return a source adapter according to the adapter field in the
source's configuration
"""
- return source_adapter(source_config)(repo, global_schema, source_config)
+ return source_adapter(type)(repo, source_config)
--- a/server/sources/ldapuser.py Sat Oct 09 00:05:50 2010 +0200
+++ b/server/sources/ldapuser.py Sat Oct 09 00:05:52 2010 +0200
@@ -162,9 +162,8 @@
)
- def __init__(self, repo, appschema, source_config, *args, **kwargs):
- AbstractSource.__init__(self, repo, appschema, source_config,
- *args, **kwargs)
+ def __init__(self, repo, source_config, *args, **kwargs):
+ AbstractSource.__init__(self, repo, source_config, *args, **kwargs)
self.host = source_config['host']
self.protocol = source_config.get('protocol', 'ldap')
self.authmode = source_config.get('auth-mode', 'simple')
--- a/server/sources/native.py Sat Oct 09 00:05:50 2010 +0200
+++ b/server/sources/native.py Sat Oct 09 00:05:52 2010 +0200
@@ -263,13 +263,12 @@
}),
)
- def __init__(self, repo, appschema, source_config, *args, **kwargs):
+ def __init__(self, repo, source_config, *args, **kwargs):
SQLAdapterMixIn.__init__(self, source_config)
self.authentifiers = [LoginPasswordAuthentifier(self)]
- AbstractSource.__init__(self, repo, appschema, source_config,
- *args, **kwargs)
+ AbstractSource.__init__(self, repo, source_config, *args, **kwargs)
# sql generator
- self._rql_sqlgen = self.sqlgen_class(appschema, self.dbhelper,
+ self._rql_sqlgen = self.sqlgen_class(self.schema, self.dbhelper,
ATTR_MAP.copy())
# full text index helper
self.do_fti = not repo.config['delay-full-text-indexation']
@@ -880,6 +879,21 @@
attrs = {'type': entity.__regid__, 'eid': entity.eid, 'extid': extid,
'source': source.uri, 'mtime': datetime.now()}
self.doexec(session, self.sqlgen.insert('entities', attrs), attrs)
+ # insert core relations: is, is_instance_of and cw_source
+ if not hasattr(entity, '_cw_recreating'):
+ try:
+ self.doexec(session, 'INSERT INTO is_relation(eid_from,eid_to) VALUES (%s,%s)'
+ % (entity.eid, eschema_eid(session, entity.e_schema)))
+ except IndexError:
+ # during schema serialization, skip
+ pass
+ else:
+ for eschema in entity.e_schema.ancestors() + [entity.e_schema]:
+ self.doexec(session, 'INSERT INTO is_instance_of_relation(eid_from,eid_to) VALUES (%s,%s)'
+ % (entity.eid, eschema_eid(session, eschema)))
+ if 'CWSource' in self.schema and source.eid is not None: # else, cw < 3.10
+ self.doexec(session, 'INSERT INTO cw_source_relation(eid_from,eid_to) '
+ 'VALUES (%s,%s)' % (entity.eid, source.eid))
# now we can update the full text index
if self.do_fti and self.need_fti_indexation(entity.__regid__):
if complete:
@@ -926,7 +940,7 @@
"""
for etype in etypes:
if not etype in self.multisources_etypes:
- self.critical('%s not listed as a multi-sources entity types. '
+ self.error('%s not listed as a multi-sources entity types. '
'Modify your configuration' % etype)
self.multisources_etypes.add(etype)
modsql = _modified_sql('entities', etypes)
@@ -1157,13 +1171,6 @@
action.changes['cw_eid'] = eid
sql = self.sqlgen.insert(SQL_PREFIX + etype, action.changes)
self.doexec(session, sql, action.changes)
- # add explicitly is / is_instance_of whose deletion is not recorded for
- # consistency with addition (done by sql in hooks)
- self.doexec(session, 'INSERT INTO is_relation(eid_from, eid_to) '
- 'VALUES(%s, %s)' % (eid, eschema_eid(session, eschema)))
- for eschema in entity.e_schema.ancestors() + [entity.e_schema]:
- self.doexec(session, 'INSERT INTO is_instance_of_relation(eid_from,'
- 'eid_to) VALUES(%s, %s)' % (eid, eschema_eid(session, eschema)))
# restore record in entities (will update fti if needed)
self.add_info(session, entity, self, None, True)
# remove record from deleted_entities if entity's type is multi-sources
@@ -1226,6 +1233,7 @@
# unvisible as transaction action
self.doexec(session, 'DELETE FROM is_relation WHERE eid_from=%s' % eid)
self.doexec(session, 'DELETE FROM is_instance_of_relation WHERE eid_from=%s' % eid)
+ self.doexec(session, 'DELETE FROM cw_source_relation WHERE eid_from=%s' % self.eid)
# XXX check removal of inlined relation?
# delete the entity
attrs = {'cw_eid': eid}
--- a/server/sources/pyrorql.py Sat Oct 09 00:05:50 2010 +0200
+++ b/server/sources/pyrorql.py Sat Oct 09 00:05:52 2010 +0200
@@ -64,7 +64,7 @@
assert not unknown, 'unknown mapping attribute(s): %s' % unknown
# relations that are necessarily not crossed
mapping['dont_cross_relations'] |= set(('owned_by', 'created_by'))
- for rtype in ('is', 'is_instance_of'):
+ for rtype in ('is', 'is_instance_of', 'cw_source'):
assert rtype not in mapping['dont_cross_relations'], \
'%s relation should not be in dont_cross_relations' % rtype
assert rtype not in mapping['support_relations'], \
@@ -146,17 +146,26 @@
PUBLIC_KEYS = AbstractSource.PUBLIC_KEYS + ('base-url',)
_conn = None
- def __init__(self, repo, appschema, source_config, *args, **kwargs):
- AbstractSource.__init__(self, repo, appschema, source_config,
- *args, **kwargs)
+ def __init__(self, repo, source_config, *args, **kwargs):
+ AbstractSource.__init__(self, repo, source_config, *args, **kwargs)
mappingfile = source_config['mapping-file']
if not mappingfile[0] == '/':
mappingfile = join(repo.config.apphome, mappingfile)
- mapping = load_mapping_file(mappingfile)
- self.support_entities = mapping['support_entities']
- self.support_relations = mapping['support_relations']
- self.dont_cross_relations = mapping['dont_cross_relations']
- self.cross_relations = mapping['cross_relations']
+ try:
+ mapping = load_mapping_file(mappingfile)
+ except IOError:
+ self.disabled = True
+ self.error('cant read mapping file %s, source disabled',
+ mappingfile)
+ self.support_entities = {}
+ self.support_relations = {}
+ self.dont_cross_relations = set()
+ self.cross_relations = set()
+ else:
+ self.support_entities = mapping['support_entities']
+ self.support_relations = mapping['support_relations']
+ self.dont_cross_relations = mapping['dont_cross_relations']
+ self.cross_relations = mapping['cross_relations']
baseurl = source_config.get('base-url')
if baseurl and not baseurl.endswith('/'):
source_config['base-url'] += '/'
--- a/server/test/data/sources Sat Oct 09 00:05:50 2010 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,14 +0,0 @@
-[system]
-
-db-driver = sqlite
-db-host =
-adapter = native
-db-name = tmpdb
-db-encoding = UTF-8
-db-user = admin
-db-password = gingkow
-
-[admin]
-login = admin
-password = gingkow
-
--- a/server/test/data/sources_extern Sat Oct 09 00:05:50 2010 +0200
+++ b/server/test/data/sources_extern Sat Oct 09 00:05:52 2010 +0200
@@ -1,13 +1,4 @@
[system]
-
db-driver = sqlite
-db-host =
-adapter = native
db-name = tmpdb-extern
db-encoding = UTF-8
-db-user = admin
-db-password = gingkow
-
-[admin]
-login = admin
-password = gingkow
--- a/server/test/data/sources_multi Sat Oct 09 00:05:50 2010 +0200
+++ b/server/test/data/sources_multi Sat Oct 09 00:05:52 2010 +0200
@@ -1,28 +1,5 @@
[system]
-
db-driver = sqlite
-db-host =
adapter = native
db-name = tmpdb-multi
db-encoding = UTF-8
-db-user = admin
-db-password = gingkow
-
-[extern]
-adapter = pyrorql
-pyro-ns-id = extern
-cubicweb-user = admin
-cubicweb-password = gingkow
-mapping-file = extern_mapping.py
-base-url=http://extern.org/
-
-[extern-multi]
-adapter = pyrorql
-pyro-ns-id = extern-multi
-cubicweb-user = admin
-cubicweb-password = gingkow
-mapping-file = extern_mapping.py
-
-[admin]
-login = admin
-password = gingkow
--- a/server/test/data/sourcesldap Sat Oct 09 00:05:50 2010 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,35 +0,0 @@
-[system]
-adapter=native
-# database driver (postgres or sqlite)
-db-driver=sqlite
-# database host
-db-host=
-# database name
-db-name=tmpdb
-# database user
-db-user=admin
-# database password
-db-password=gingkow
-# database encoding
-db-encoding=utf8
-
-[admin]
-login = admin
-password = gingkow
-
-[ldapuser]
-adapter=ldapuser
-# ldap host
-host=ldap1
-# base DN to lookup for usres
-user-base-dn=ou=People,dc=logilab,dc=fr
-# user search scope
-user-scope=ONELEVEL
-# classes of user
-user-classes=top,posixAccount
-# attribute used as login on authentication
-user-login-attr=uid
-# name of a group in which ldap users will be by default
-user-default-group=users
-# map from ldap user attributes to cubicweb attributes
-user-attrs-map=gecos:email,uid:login
--- a/server/test/unittest_ldapuser.py Sat Oct 09 00:05:50 2010 +0200
+++ b/server/test/unittest_ldapuser.py Sat Oct 09 00:05:52 2010 +0200
@@ -20,7 +20,6 @@
import socket
from logilab.common.testlib import TestCase, unittest_main, mock_object
-from cubicweb.devtools import TestServerConfiguration
from cubicweb.devtools.testlib import CubicWebTC
from cubicweb.devtools.repotest import RQLGeneratorTC
@@ -53,25 +52,51 @@
# don't check upassword !
return self.extid2eid(user['dn'], 'CWUser', session)
+def setup_module(*args):
+ global repo
+ LDAPUserSourceTC._init_repo()
+ repo = LDAPUserSourceTC.repo
+ add_ldap_source(LDAPUserSourceTC.cnx)
+
+def teardown_module(*args):
+ global repo
+ repo.shutdown()
+ del repo
+
+def add_ldap_source(cnx):
+ cnx.request().create_entity('CWSource', name=u'ldapuser', type=u'ldapuser',
+ config=u'''
+# ldap host
+host=ldap1
+# base DN to lookup for usres
+user-base-dn=ou=People,dc=logilab,dc=fr
+# user search scope
+user-scope=ONELEVEL
+# classes of user
+user-classes=top,posixAccount
+# attribute used as login on authentication
+user-login-attr=uid
+# name of a group in which ldap users will be by default
+user-default-group=users
+# map from ldap user attributes to cubicweb attributes
+user-attrs-map=gecos:email,uid:login
+''')
+ cnx.commit()
+ # XXX: need this first query else we get 'database is locked' from
+ # sqlite since it doesn't support multiple connections on the same
+ # database
+ # so doing, ldap inserted users don't get removed between each test
+ rset = cnx.cursor().execute('CWUser X')
+ # check we get some users from ldap
+ assert len(rset) > 1
class LDAPUserSourceTC(CubicWebTC):
- config = TestServerConfiguration('data')
- config.sources_file = lambda : 'data/sourcesldap'
def patch_authenticate(self):
self._orig_authenticate = LDAPUserSource.authenticate
LDAPUserSource.authenticate = nopwd_authenticate
- def setup_database(self):
- # XXX: need this first query else we get 'database is locked' from
- # sqlite since it doesn't support multiple connections on the same
- # database
- # so doing, ldap inserted users don't get removed between each test
- rset = self.sexecute('CWUser X')
- # check we get some users from ldap
- self.assert_(len(rset) > 1)
-
def tearDown(self):
if hasattr(self, '_orig_authenticate'):
LDAPUserSource.authenticate = self._orig_authenticate
@@ -378,19 +403,10 @@
res = trfunc.apply([[1, 2], [2, 4], [3, 6], [1, 5]])
self.assertEqual(res, [[1, 5], [2, 4], [3, 6]])
-# XXX
-LDAPUserSourceTC._init_repo()
-repo = LDAPUserSourceTC.repo
-
-def teardown_module(*args):
- global repo
- del repo
- del RQL2LDAPFilterTC.schema
-
class RQL2LDAPFilterTC(RQLGeneratorTC):
- schema = repo.schema
def setUp(self):
+ self.schema = repo.schema
RQLGeneratorTC.setUp(self)
ldapsource = repo.sources[-1]
self.pool = repo._get_pool()
--- a/server/test/unittest_migractions.py Sat Oct 09 00:05:50 2010 +0200
+++ b/server/test/unittest_migractions.py Sat Oct 09 00:05:52 2010 +0200
@@ -191,7 +191,7 @@
self.failUnless(self.execute('CWRType X WHERE X name "filed_under2"'))
self.schema.rebuild_infered_relations()
self.assertEqual(sorted(str(rs) for rs in self.schema['Folder2'].subject_relations()),
- ['created_by', 'creation_date', 'cwuri',
+ ['created_by', 'creation_date', 'cw_source', 'cwuri',
'description', 'description_format',
'eid',
'filed_under2', 'has_text',
@@ -336,7 +336,7 @@
migrschema['titre'].rdefs[('Personne', 'String')].description = 'title for this person'
delete_concerne_rqlexpr = self._rrqlexpr_rset('delete', 'concerne')
add_concerne_rqlexpr = self._rrqlexpr_rset('add', 'concerne')
-
+
self.mh.cmd_sync_schema_props_perms(commit=False)
self.assertEqual(cursor.execute('Any D WHERE X name "Personne", X description D')[0][0],
--- a/server/test/unittest_msplanner.py Sat Oct 09 00:05:50 2010 +0200
+++ b/server/test/unittest_msplanner.py Sat Oct 09 00:05:52 2010 +0200
@@ -18,6 +18,8 @@
from logilab.common.decorators import clear_cache
+from rql import BadRQLQuery
+
from cubicweb.devtools import init_test_database
from cubicweb.devtools.repotest import BasePlannerTC, test_plan
@@ -59,8 +61,9 @@
{'X': 'Bookmark'}, {'X': 'CWAttribute'}, {'X': 'CWCache'},
{'X': 'CWConstraint'}, {'X': 'CWConstraintType'}, {'X': 'CWEType'},
{'X': 'CWGroup'}, {'X': 'CWPermission'}, {'X': 'CWProperty'},
- {'X': 'CWRType'}, {'X': 'CWRelation'}, {'X': 'CWUser'},
- {'X': 'CWUniqueTogetherConstraint'},
+ {'X': 'CWRType'}, {'X': 'CWRelation'},
+ {'X': 'CWSource'}, {'X': 'CWSourceHostConfig'},
+ {'X': 'CWUser'}, {'X': 'CWUniqueTogetherConstraint'},
{'X': 'Card'}, {'X': 'Comment'}, {'X': 'Division'},
{'X': 'Email'}, {'X': 'EmailAddress'}, {'X': 'EmailPart'},
{'X': 'EmailThread'}, {'X': 'ExternalUri'}, {'X': 'File'},
@@ -537,7 +540,7 @@
[self.ldap, self.system], None,
{'AA': 'table0.C1', 'X': 'table0.C0', 'X.modification_date': 'table0.C1'}, []),
('OneFetchStep',
- [('Any X,AA ORDERBY AA WHERE 5 owned_by X, X modification_date AA, X is CWUser',
+ [('Any X,AA ORDERBY AA WHERE %s owned_by X, X modification_date AA, X is CWUser' % ueid,
[{'AA': 'Datetime', 'X': 'CWUser'}])],
None, None, [self.system],
{'AA': 'table0.C1', 'X': 'table0.C0', 'X.modification_date': 'table0.C1'}, []),
@@ -687,7 +690,7 @@
def test_complex_optional(self):
ueid = self.session.user.eid
self._test('Any U WHERE WF wf_info_for X, X eid %(x)s, WF owned_by U?, WF from_state FS',
- [('OneFetchStep', [('Any U WHERE WF wf_info_for 5, WF owned_by U?, WF from_state FS',
+ [('OneFetchStep', [('Any U WHERE WF wf_info_for %s, WF owned_by U?, WF from_state FS' % ueid,
[{'WF': 'TrInfo', 'FS': 'State', 'U': 'CWUser'}])],
None, None, [self.system], {}, [])],
{'x': ueid})
@@ -695,7 +698,7 @@
def test_complex_optional(self):
ueid = self.session.user.eid
self._test('Any U WHERE WF wf_info_for X, X eid %(x)s, WF owned_by U?, WF from_state FS',
- [('OneFetchStep', [('Any U WHERE WF wf_info_for 5, WF owned_by U?, WF from_state FS',
+ [('OneFetchStep', [('Any U WHERE WF wf_info_for %s, WF owned_by U?, WF from_state FS' % ueid,
[{'WF': 'TrInfo', 'FS': 'State', 'U': 'CWUser'}])],
None, None, [self.system], {}, [])],
{'x': ueid})
@@ -751,9 +754,10 @@
])
def test_not_identity(self):
- self._test('Any X WHERE NOT X identity U, U eid %s' % self.session.user.eid,
+ ueid = self.session.user.eid
+ self._test('Any X WHERE NOT X identity U, U eid %s' % ueid,
[('OneFetchStep',
- [('Any X WHERE NOT X identity 5, X is CWUser', [{'X': 'CWUser'}])],
+ [('Any X WHERE NOT X identity %s, X is CWUser' % ueid, [{'X': 'CWUser'}])],
None, None,
[self.ldap, self.system], {}, [])
])
@@ -777,18 +781,19 @@
def test_security_has_text(self):
# use a guest user
self.session = self.user_groups_session('guests')
+ ueid = self.session.user.eid
self._test('Any X WHERE X has_text "bla"',
[('FetchStep', [('Any E WHERE E type "X", E is Note', [{'E': 'Note'}])],
[self.cards, self.system], None, {'E': 'table0.C0'}, []),
('UnionStep', None, None,
[('OneFetchStep',
- [(u'Any X WHERE X has_text "bla", (EXISTS(X owned_by 5)) OR ((((EXISTS(D concerne C?, C owned_by 5, C type "X", X identity D, C is Division, D is Affaire)) OR (EXISTS(H concerne G?, G owned_by 5, G type "X", X identity H, G is SubDivision, H is Affaire))) OR (EXISTS(I concerne F?, F owned_by 5, F type "X", X identity I, F is Societe, I is Affaire))) OR (EXISTS(J concerne E?, E owned_by 5, X identity J, E is Note, J is Affaire))), X is Affaire',
+ [(u'Any X WHERE X has_text "bla", (EXISTS(X owned_by %(ueid)s)) OR ((((EXISTS(D concerne C?, C owned_by %(ueid)s, C type "X", X identity D, C is Division, D is Affaire)) OR (EXISTS(H concerne G?, G owned_by %(ueid)s, G type "X", X identity H, G is SubDivision, H is Affaire))) OR (EXISTS(I concerne F?, F owned_by %(ueid)s, F type "X", X identity I, F is Societe, I is Affaire))) OR (EXISTS(J concerne E?, E owned_by %(ueid)s, X identity J, E is Note, J is Affaire))), X is Affaire' % {'ueid': ueid},
[{'C': 'Division', 'E': 'Note', 'D': 'Affaire', 'G': 'SubDivision', 'F': 'Societe', 'I': 'Affaire', 'H': 'Affaire', 'J': 'Affaire', 'X': 'Affaire'}])],
None, None, [self.system], {'E': 'table0.C0'}, []),
('OneFetchStep',
- [('Any X WHERE X has_text "bla", EXISTS(X owned_by 5), X is Basket',
+ [('Any X WHERE X has_text "bla", EXISTS(X owned_by %s), X is Basket' % ueid,
[{'X': 'Basket'}]),
- ('Any X WHERE X has_text "bla", EXISTS(X owned_by 5), X is CWUser',
+ ('Any X WHERE X has_text "bla", EXISTS(X owned_by %s), X is CWUser' % ueid,
[{'X': 'CWUser'}]),
('Any X WHERE X has_text "bla", X is IN(Card, Comment, Division, Email, EmailThread, File, Folder, Note, Personne, Societe, SubDivision, Tag)',
[{'X': 'Card'}, {'X': 'Comment'},
@@ -803,18 +808,19 @@
def test_security_has_text_limit_offset(self):
# use a guest user
self.session = self.user_groups_session('guests')
+ ueid = self.session.user.eid
# note: same as the above query but because of the subquery usage, the display differs (not printing solutions for each union)
self._test('Any X LIMIT 10 OFFSET 10 WHERE X has_text "bla"',
[('FetchStep', [('Any E WHERE E type "X", E is Note', [{'E': 'Note'}])],
[self.cards, self.system], None, {'E': 'table1.C0'}, []),
('UnionFetchStep', [
- ('FetchStep', [('Any X WHERE X has_text "bla", (EXISTS(X owned_by 5)) OR ((((EXISTS(D concerne C?, C owned_by 5, C type "X", X identity D, C is Division, D is Affaire)) OR (EXISTS(H concerne G?, G owned_by 5, G type "X", X identity H, G is SubDivision, H is Affaire))) OR (EXISTS(I concerne F?, F owned_by 5, F type "X", X identity I, F is Societe, I is Affaire))) OR (EXISTS(J concerne E?, E owned_by 5, X identity J, E is Note, J is Affaire))), X is Affaire',
+ ('FetchStep', [('Any X WHERE X has_text "bla", (EXISTS(X owned_by %(ueid)s)) OR ((((EXISTS(D concerne C?, C owned_by %(ueid)s, C type "X", X identity D, C is Division, D is Affaire)) OR (EXISTS(H concerne G?, G owned_by %(ueid)s, G type "X", X identity H, G is SubDivision, H is Affaire))) OR (EXISTS(I concerne F?, F owned_by %(ueid)s, F type "X", X identity I, F is Societe, I is Affaire))) OR (EXISTS(J concerne E?, E owned_by %(ueid)s, X identity J, E is Note, J is Affaire))), X is Affaire' % {'ueid': ueid},
[{'C': 'Division', 'E': 'Note', 'D': 'Affaire', 'G': 'SubDivision', 'F': 'Societe', 'I': 'Affaire', 'H': 'Affaire', 'J': 'Affaire', 'X': 'Affaire'}])],
[self.system], {'E': 'table1.C0'}, {'X': 'table0.C0'}, []),
('FetchStep',
- [('Any X WHERE X has_text "bla", EXISTS(X owned_by 5), X is Basket',
+ [('Any X WHERE X has_text "bla", EXISTS(X owned_by %s), X is Basket' % ueid,
[{'X': 'Basket'}]),
- ('Any X WHERE X has_text "bla", EXISTS(X owned_by 5), X is CWUser',
+ ('Any X WHERE X has_text "bla", EXISTS(X owned_by %s), X is CWUser' % ueid,
[{'X': 'CWUser'}]),
('Any X WHERE X has_text "bla", X is IN(Card, Comment, Division, Email, EmailThread, File, Folder, Note, Personne, Societe, SubDivision, Tag)',
[{'X': 'Card'}, {'X': 'Comment'},
@@ -839,22 +845,24 @@
"""a guest user trying to see another user: EXISTS(X owned_by U) is automatically inserted"""
# use a guest user
self.session = self.user_groups_session('guests')
+ ueid = self.session.user.eid
self._test('Any X WHERE X login "bla"',
[('FetchStep',
[('Any X WHERE X login "bla", X is CWUser', [{'X': 'CWUser'}])],
[self.ldap, self.system], None, {'X': 'table0.C0'}, []),
('OneFetchStep',
- [('Any X WHERE EXISTS(X owned_by 5), X is CWUser', [{'X': 'CWUser'}])],
+ [('Any X WHERE EXISTS(X owned_by %s), X is CWUser' % ueid, [{'X': 'CWUser'}])],
None, None, [self.system], {'X': 'table0.C0'}, [])])
def test_security_complex_has_text(self):
# use a guest user
self.session = self.user_groups_session('guests')
+ ueid = self.session.user.eid
self._test('Any X WHERE X has_text "bla", X firstname "bla"',
[('FetchStep', [('Any X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])],
[self.ldap, self.system], None, {'X': 'table0.C0'}, []),
('UnionStep', None, None, [
- ('OneFetchStep', [('Any X WHERE X has_text "bla", EXISTS(X owned_by 5), X is CWUser', [{'X': 'CWUser'}])],
+ ('OneFetchStep', [('Any X WHERE X has_text "bla", EXISTS(X owned_by %s), X is CWUser' % ueid, [{'X': 'CWUser'}])],
None, None, [self.system], {'X': 'table0.C0'}, []),
('OneFetchStep', [('Any X WHERE X has_text "bla", X firstname "bla", X is Personne', [{'X': 'Personne'}])],
None, None, [self.system], {}, []),
@@ -864,11 +872,12 @@
def test_security_complex_has_text_limit_offset(self):
# use a guest user
self.session = self.user_groups_session('guests')
+ ueid = self.session.user.eid
self._test('Any X LIMIT 10 OFFSET 10 WHERE X has_text "bla", X firstname "bla"',
[('FetchStep', [('Any X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])],
[self.ldap, self.system], None, {'X': 'table1.C0'}, []),
('UnionFetchStep', [
- ('FetchStep', [('Any X WHERE X has_text "bla", EXISTS(X owned_by 5), X is CWUser', [{'X': 'CWUser'}])],
+ ('FetchStep', [('Any X WHERE X has_text "bla", EXISTS(X owned_by %s), X is CWUser' % ueid, [{'X': 'CWUser'}])],
[self.system], {'X': 'table1.C0'}, {'X': 'table0.C0'}, []),
('FetchStep', [('Any X WHERE X has_text "bla", X firstname "bla", X is Personne', [{'X': 'Personne'}])],
[self.system], {}, {'X': 'table0.C0'}, []),
@@ -881,26 +890,30 @@
def test_security_complex_aggregat(self):
# use a guest user
self.session = self.user_groups_session('guests')
+ ueid = self.session.user.eid
+ ALL_SOLS = X_ALL_SOLS[:]
+ ALL_SOLS.remove({'X': 'CWSourceHostConfig'}) # not authorized
self._test('Any MAX(X)',
[('FetchStep', [('Any E WHERE E type "X", E is Note', [{'E': 'Note'}])],
[self.cards, self.system], None, {'E': 'table1.C0'}, []),
('FetchStep', [('Any X WHERE X is CWUser', [{'X': 'CWUser'}])],
[self.ldap, self.system], None, {'X': 'table2.C0'}, []),
('UnionFetchStep', [
- ('FetchStep', [('Any X WHERE EXISTS(X owned_by 5), X is Basket', [{'X': 'Basket'}])],
+ ('FetchStep', [('Any X WHERE EXISTS(X owned_by %s), X is Basket' % ueid, [{'X': 'Basket'}])],
[self.system], {}, {'X': 'table0.C0'}, []),
('UnionFetchStep',
[('FetchStep', [('Any X WHERE X is IN(Card, Note, State)',
[{'X': 'Card'}, {'X': 'Note'}, {'X': 'State'}])],
[self.cards, self.system], {}, {'X': 'table0.C0'}, []),
('FetchStep',
- [('Any X WHERE X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWUniqueTogetherConstraint, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)',
+ [('Any X WHERE X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWSource, CWUniqueTogetherConstraint, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)',
[{'X': 'BaseTransition'}, {'X': 'Bookmark'},
{'X': 'CWAttribute'}, {'X': 'CWCache'},
{'X': 'CWConstraint'}, {'X': 'CWConstraintType'},
{'X': 'CWEType'}, {'X': 'CWGroup'},
{'X': 'CWPermission'}, {'X': 'CWProperty'},
{'X': 'CWRType'}, {'X': 'CWRelation'},
+ {'X': 'CWSource'},
{'X': 'CWUniqueTogetherConstraint'},
{'X': 'Comment'}, {'X': 'Division'},
{'X': 'Email'}, {'X': 'EmailAddress'},
@@ -914,21 +927,24 @@
{'X': 'Workflow'}, {'X': 'WorkflowTransition'}])],
[self.system], {}, {'X': 'table0.C0'}, []),
]),
- ('FetchStep', [('Any X WHERE EXISTS(X owned_by 5), X is CWUser', [{'X': 'CWUser'}])],
+ ('FetchStep', [('Any X WHERE EXISTS(X owned_by %s), X is CWUser' % ueid, [{'X': 'CWUser'}])],
[self.system], {'X': 'table2.C0'}, {'X': 'table0.C0'}, []),
- ('FetchStep', [('Any X WHERE (EXISTS(X owned_by 5)) OR ((((EXISTS(D concerne C?, C owned_by 5, C type "X", X identity D, C is Division, D is Affaire)) OR (EXISTS(H concerne G?, G owned_by 5, G type "X", X identity H, G is SubDivision, H is Affaire))) OR (EXISTS(I concerne F?, F owned_by 5, F type "X", X identity I, F is Societe, I is Affaire))) OR (EXISTS(J concerne E?, E owned_by 5, X identity J, E is Note, J is Affaire))), X is Affaire',
+ ('FetchStep', [('Any X WHERE (EXISTS(X owned_by %(ueid)s)) OR ((((EXISTS(D concerne C?, C owned_by %(ueid)s, C type "X", X identity D, C is Division, D is Affaire)) OR (EXISTS(H concerne G?, G owned_by %(ueid)s, G type "X", X identity H, G is SubDivision, H is Affaire))) OR (EXISTS(I concerne F?, F owned_by %(ueid)s, F type "X", X identity I, F is Societe, I is Affaire))) OR (EXISTS(J concerne E?, E owned_by %(ueid)s, X identity J, E is Note, J is Affaire))), X is Affaire' % {'ueid': ueid},
[{'C': 'Division', 'E': 'Note', 'D': 'Affaire', 'G': 'SubDivision', 'F': 'Societe', 'I': 'Affaire', 'H': 'Affaire', 'J': 'Affaire', 'X': 'Affaire'}])],
[self.system], {'E': 'table1.C0'}, {'X': 'table0.C0'}, []),
]),
- ('OneFetchStep', [('Any MAX(X)', X_ALL_SOLS)],
+ ('OneFetchStep', [('Any MAX(X)', ALL_SOLS)],
None, None, [self.system], {'X': 'table0.C0'}, [])
])
def test_security_complex_aggregat2(self):
# use a guest user
self.session = self.user_groups_session('guests')
+ ueid = self.session.user.eid
X_ET_ALL_SOLS = []
for s in X_ALL_SOLS:
+ if s == {'X': 'CWSourceHostConfig'}:
+ continue # not authorized
ets = {'ET': 'CWEType'}
ets.update(s)
X_ET_ALL_SOLS.append(ets)
@@ -941,28 +957,29 @@
('FetchStep', [('Any X WHERE X is CWUser', [{'X': 'CWUser'}])],
[self.ldap, self.system], None, {'X': 'table3.C0'}, []),
('UnionFetchStep',
- [('FetchStep', [('Any ET,X WHERE X is ET, EXISTS(X owned_by 5), ET is CWEType, X is Basket',
+ [('FetchStep', [('Any ET,X WHERE X is ET, EXISTS(X owned_by %s), ET is CWEType, X is Basket' % ueid,
[{'ET': 'CWEType', 'X': 'Basket'}])],
[self.system], {}, {'ET': 'table0.C0', 'X': 'table0.C1'}, []),
- ('FetchStep', [('Any ET,X WHERE X is ET, (EXISTS(X owned_by 5)) OR ((((EXISTS(D concerne C?, C owned_by 5, C type "X", X identity D, C is Division, D is Affaire)) OR (EXISTS(H concerne G?, G owned_by 5, G type "X", X identity H, G is SubDivision, H is Affaire))) OR (EXISTS(I concerne F?, F owned_by 5, F type "X", X identity I, F is Societe, I is Affaire))) OR (EXISTS(J concerne E?, E owned_by 5, X identity J, E is Note, J is Affaire))), ET is CWEType, X is Affaire',
+ ('FetchStep', [('Any ET,X WHERE X is ET, (EXISTS(X owned_by %(ueid)s)) OR ((((EXISTS(D concerne C?, C owned_by %(ueid)s, C type "X", X identity D, C is Division, D is Affaire)) OR (EXISTS(H concerne G?, G owned_by %(ueid)s, G type "X", X identity H, G is SubDivision, H is Affaire))) OR (EXISTS(I concerne F?, F owned_by %(ueid)s, F type "X", X identity I, F is Societe, I is Affaire))) OR (EXISTS(J concerne E?, E owned_by %(ueid)s, X identity J, E is Note, J is Affaire))), ET is CWEType, X is Affaire' % {'ueid': ueid},
[{'C': 'Division', 'E': 'Note', 'D': 'Affaire',
'G': 'SubDivision', 'F': 'Societe', 'I': 'Affaire',
'H': 'Affaire', 'J': 'Affaire', 'X': 'Affaire',
'ET': 'CWEType'}])],
[self.system], {'E': 'table2.C0'}, {'ET': 'table0.C0', 'X': 'table0.C1'},
[]),
- ('FetchStep', [('Any ET,X WHERE X is ET, EXISTS(X owned_by 5), ET is CWEType, X is CWUser',
+ ('FetchStep', [('Any ET,X WHERE X is ET, EXISTS(X owned_by %s), ET is CWEType, X is CWUser' % ueid,
[{'ET': 'CWEType', 'X': 'CWUser'}])],
[self.system], {'X': 'table3.C0'}, {'ET': 'table0.C0', 'X': 'table0.C1'}, []),
# extra UnionFetchStep could be avoided but has no cost, so don't care
('UnionFetchStep',
- [('FetchStep', [('Any ET,X WHERE X is ET, ET is CWEType, X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWUniqueTogetherConstraint, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)',
+ [('FetchStep', [('Any ET,X WHERE X is ET, ET is CWEType, X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWSource, CWUniqueTogetherConstraint, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)',
[{'X': 'BaseTransition', 'ET': 'CWEType'},
{'X': 'Bookmark', 'ET': 'CWEType'}, {'X': 'CWAttribute', 'ET': 'CWEType'},
{'X': 'CWCache', 'ET': 'CWEType'}, {'X': 'CWConstraint', 'ET': 'CWEType'},
{'X': 'CWConstraintType', 'ET': 'CWEType'}, {'X': 'CWEType', 'ET': 'CWEType'},
{'X': 'CWGroup', 'ET': 'CWEType'}, {'X': 'CWPermission', 'ET': 'CWEType'},
{'X': 'CWProperty', 'ET': 'CWEType'}, {'X': 'CWRType', 'ET': 'CWEType'},
+ {'X': 'CWSource', 'ET': 'CWEType'},
{'X': 'CWRelation', 'ET': 'CWEType'},
{'X': 'CWUniqueTogetherConstraint', 'ET': 'CWEType'},
{'X': 'Comment', 'ET': 'CWEType'},
@@ -993,6 +1010,7 @@
def test_security_3sources(self):
# use a guest user
self.session = self.user_groups_session('guests')
+ ueid = self.session.user.eid
self._test('Any X, XT WHERE X is Card, X owned_by U, X title XT, U login "syt"',
[('FetchStep',
[('Any X,XT WHERE X title XT, X is Card', [{'X': 'Card', 'XT': 'String'}])],
@@ -1001,7 +1019,7 @@
[('Any U WHERE U login "syt", U is CWUser', [{'U': 'CWUser'}])],
[self.ldap, self.system], None, {'U': 'table1.C0'}, []),
('OneFetchStep',
- [('Any X,XT WHERE X owned_by U, X title XT, EXISTS(U owned_by 5), U is CWUser, X is Card',
+ [('Any X,XT WHERE X owned_by U, X title XT, EXISTS(U owned_by %s), U is CWUser, X is Card' % ueid,
[{'X': 'Card', 'U': 'CWUser', 'XT': 'String'}])],
None, None, [self.system],
{'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1', 'U': 'table1.C0'}, [])
@@ -1011,12 +1029,13 @@
self.restore_orig_cwuser_security()
# use a guest user
self.session = self.user_groups_session('guests')
+ ueid = self.session.user.eid
self._test('Any X, XT WHERE X is Card, X owned_by U, X title XT, U login "syt"',
[('FetchStep',
[('Any X,XT WHERE X title XT, X is Card', [{'X': 'Card', 'XT': 'String'}])],
[self.cards, self.system], None, {'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1'}, []),
('OneFetchStep',
- [('Any X,XT WHERE X owned_by U, X title XT, U login "syt", EXISTS(U identity 5), U is CWUser, X is Card',
+ [('Any X,XT WHERE X owned_by U, X title XT, U login "syt", EXISTS(U identity %s), U is CWUser, X is Card' % ueid,
[{'U': 'CWUser', 'X': 'Card', 'XT': 'String'}])],
None, None, [self.system], {'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1'}, [])
])
@@ -1025,9 +1044,10 @@
self.restore_orig_cwuser_security()
# use a guest user
self.session = self.user_groups_session('guests')
+ ueid = self.session.user.eid
self._test('Any X,XT,U WHERE X is Card, X owned_by U?, X title XT, U login L',
[('FetchStep',
- [('Any U,L WHERE U login L, EXISTS(U identity 5), U is CWUser',
+ [('Any U,L WHERE U login L, EXISTS(U identity %s), U is CWUser' % ueid,
[{'L': 'String', u'U': 'CWUser'}])],
[self.system], {}, {'L': 'table0.C1', 'U': 'table0.C0', 'U.login': 'table0.C1'}, []),
('FetchStep',
@@ -1046,6 +1066,7 @@
def test_security_3sources_limit_offset(self):
# use a guest user
self.session = self.user_groups_session('guests')
+ ueid = self.session.user.eid
self._test('Any X, XT LIMIT 10 OFFSET 10 WHERE X is Card, X owned_by U, X title XT, U login "syt"',
[('FetchStep',
[('Any X,XT WHERE X title XT, X is Card', [{'X': 'Card', 'XT': 'String'}])],
@@ -1054,7 +1075,7 @@
[('Any U WHERE U login "syt", U is CWUser', [{'U': 'CWUser'}])],
[self.ldap, self.system], None, {'U': 'table1.C0'}, []),
('OneFetchStep',
- [('Any X,XT LIMIT 10 OFFSET 10 WHERE X owned_by U, X title XT, EXISTS(U owned_by 5), U is CWUser, X is Card',
+ [('Any X,XT LIMIT 10 OFFSET 10 WHERE X owned_by U, X title XT, EXISTS(U owned_by %s), U is CWUser, X is Card' % ueid,
[{'X': 'Card', 'U': 'CWUser', 'XT': 'String'}])],
10, 10, [self.system],
{'X': 'table0.C0', 'X.title': 'table0.C1', 'XT': 'table0.C1', 'U': 'table1.C0'}, [])
@@ -1150,7 +1171,7 @@
'X.login': 'table0.C1',
'X.modification_date': 'table0.C4',
'X.surname': 'table0.C3'}, []),
- ('OneFetchStep', [('Any X,AA,AB,AC,AD ORDERBY AA WHERE X login AA, X firstname AB, X surname AC, X modification_date AD, EXISTS(((X identity 5) OR (EXISTS(X in_group C, C name IN("managers", "staff"), C is CWGroup))) OR (EXISTS(X in_group D, 5 in_group D, NOT D name "users", D is CWGroup))), X is CWUser',
+ ('OneFetchStep', [('Any X,AA,AB,AC,AD ORDERBY AA WHERE X login AA, X firstname AB, X surname AC, X modification_date AD, EXISTS(((X identity %(ueid)s) OR (EXISTS(X in_group C, C name IN("managers", "staff"), C is CWGroup))) OR (EXISTS(X in_group D, %(ueid)s in_group D, NOT D name "users", D is CWGroup))), X is CWUser' % {'ueid': ueid},
[{'AA': 'String', 'AB': 'String', 'AC': 'String', 'AD': 'Datetime',
'C': 'CWGroup', 'D': 'CWGroup', 'X': 'CWUser'}])],
None, None, [self.system],
@@ -1227,7 +1248,7 @@
# in the source where %(x)s is not coming from and will be removed during rql
# generation for the external source
self._test('Any SN WHERE NOT X in_state S, X eid %(x)s, S name SN',
- [('OneFetchStep', [('Any SN WHERE NOT EXISTS(5 in_state S), S name SN, S is State',
+ [('OneFetchStep', [('Any SN WHERE NOT EXISTS(%s in_state S), S name SN, S is State' % ueid,
[{'S': 'State', 'SN': 'String'}])],
None, None, [self.cards, self.system], {}, [])],
{'x': ueid})
@@ -1280,12 +1301,13 @@
def test_simplified_var(self):
+ ueid = self.session.user.eid
repo._type_source_cache[999999] = ('Note', 'cards', 999999)
self._test('Any U WHERE U in_group G, (G name IN ("managers", "logilab") OR (X require_permission P?, P name "bla", P require_group G)), X eid %(x)s, U eid %(u)s',
- [('OneFetchStep', [('Any 5 WHERE 5 in_group G, (G name IN("managers", "logilab")) OR (X require_permission P?, P name "bla", P require_group G), X eid 999999',
+ [('OneFetchStep', [('Any %s WHERE %s in_group G, (G name IN("managers", "logilab")) OR (X require_permission P?, P name "bla", P require_group G), X eid 999999' % (ueid, ueid),
[{'X': 'Note', 'G': 'CWGroup', 'P': 'CWPermission'}])],
None, None, [self.system], {}, [])],
- {'x': 999999, 'u': self.session.user.eid})
+ {'x': 999999, 'u': ueid})
def test_has_text(self):
self._test('Card X WHERE X has_text "toto"',
@@ -1325,13 +1347,14 @@
def test_security_has_text_orderby_rank(self):
# use a guest user
self.session = self.user_groups_session('guests')
+ ueid = self.session.user.eid
self._test('Any X ORDERBY FTIRANK(X) WHERE X has_text "bla", X firstname "bla"',
[('FetchStep', [('Any X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])],
[self.ldap, self.system], None, {'X': 'table1.C0'}, []),
('UnionFetchStep',
[('FetchStep', [('Any X WHERE X firstname "bla", X is Personne', [{'X': 'Personne'}])],
[self.system], {}, {'X': 'table0.C0'}, []),
- ('FetchStep', [('Any X WHERE EXISTS(X owned_by 5), X is CWUser', [{'X': 'CWUser'}])],
+ ('FetchStep', [('Any X WHERE EXISTS(X owned_by %s), X is CWUser' % ueid, [{'X': 'CWUser'}])],
[self.system], {'X': 'table1.C0'}, {'X': 'table0.C0'}, [])]),
('OneFetchStep', [('Any X ORDERBY FTIRANK(X) WHERE X has_text "bla"',
[{'X': 'CWUser'}, {'X': 'Personne'}])],
@@ -1354,11 +1377,12 @@
def test_security_has_text_select_rank(self):
# use a guest user
self.session = self.user_groups_session('guests')
+ ueid = self.session.user.eid
self._test('Any X, FTIRANK(X) WHERE X has_text "bla", X firstname "bla"',
[('FetchStep', [('Any X,X WHERE X firstname "bla", X is CWUser', [{'X': 'CWUser'}])],
[self.ldap, self.system], None, {'X': 'table0.C1'}, []),
('UnionStep', None, None, [
- ('OneFetchStep', [('Any X,FTIRANK(X) WHERE X has_text "bla", EXISTS(X owned_by 5), X is CWUser', [{'X': 'CWUser'}])],
+ ('OneFetchStep', [('Any X,FTIRANK(X) WHERE X has_text "bla", EXISTS(X owned_by %s), X is CWUser' % ueid, [{'X': 'CWUser'}])],
None, None, [self.system], {'X': 'table0.C1'}, []),
('OneFetchStep', [('Any X,FTIRANK(X) WHERE X has_text "bla", X firstname "bla", X is Personne', [{'X': 'Personne'}])],
None, None, [self.system], {}, []),
@@ -1436,6 +1460,7 @@
])
def test_subquery_1(self):
+ ueid = self.session.user.eid
self._test('DISTINCT Any B,C ORDERBY C WHERE A created_by B, B login C, EXISTS(B owned_by D), D eid %(E)s '
'WITH A,N BEING ((Any X,N WHERE X is Tag, X name N) UNION (Any X,T WHERE X is Bookmark, X title T))',
[('FetchStep', [('Any X,N WHERE X is Tag, X name N', [{'N': 'String', 'X': 'Tag'}]),
@@ -1445,7 +1470,7 @@
('FetchStep',
[('Any B,C WHERE B login C, B is CWUser', [{'B': 'CWUser', 'C': 'String'}])],
[self.ldap, self.system], None, {'B': 'table1.C0', 'B.login': 'table1.C1', 'C': 'table1.C1'}, []),
- ('OneFetchStep', [('DISTINCT Any B,C ORDERBY C WHERE A created_by B, B login C, EXISTS(B owned_by 5), B is CWUser, A is IN(Bookmark, Tag)',
+ ('OneFetchStep', [('DISTINCT Any B,C ORDERBY C WHERE A created_by B, B login C, EXISTS(B owned_by %s), B is CWUser, A is IN(Bookmark, Tag)' % ueid,
[{'A': 'Bookmark', 'B': 'CWUser', 'C': 'String'},
{'A': 'Tag', 'B': 'CWUser', 'C': 'String'}])],
None, None, [self.system],
@@ -1454,9 +1479,10 @@
'C': 'table1.C1',
'N': 'table0.C1'},
[])],
- {'E': self.session.user.eid})
+ {'E': ueid})
def test_subquery_2(self):
+ ueid = self.session.user.eid
self._test('DISTINCT Any B,C ORDERBY C WHERE A created_by B, B login C, EXISTS(B owned_by D), D eid %(E)s '
'WITH A,N BEING ((Any X,N WHERE X is Tag, X name N) UNION (Any X,T WHERE X is Card, X title T))',
[('UnionFetchStep',
@@ -1479,7 +1505,7 @@
('FetchStep',
[('Any B,C WHERE B login C, B is CWUser', [{'B': 'CWUser', 'C': 'String'}])],
[self.ldap, self.system], None, {'B': 'table1.C0', 'B.login': 'table1.C1', 'C': 'table1.C1'}, []),
- ('OneFetchStep', [('DISTINCT Any B,C ORDERBY C WHERE A created_by B, B login C, EXISTS(B owned_by 5), B is CWUser, A is IN(Card, Tag)',
+ ('OneFetchStep', [('DISTINCT Any B,C ORDERBY C WHERE A created_by B, B login C, EXISTS(B owned_by %s), B is CWUser, A is IN(Card, Tag)' % ueid,
[{'A': 'Card', 'B': 'CWUser', 'C': 'String'},
{'A': 'Tag', 'B': 'CWUser', 'C': 'String'}])],
None, None, [self.system],
@@ -1488,7 +1514,7 @@
'C': 'table1.C1',
'N': 'table0.C1'},
[])],
- {'E': self.session.user.eid})
+ {'E': ueid})
def test_eid_dont_cross_relation_1(self):
repo._type_source_cache[999999] = ('Personne', 'system', 999999)
@@ -1662,7 +1688,7 @@
ueid = self.session.user.eid
self._test('DELETE X created_by Y WHERE X eid %(x)s, NOT Y eid %(y)s',
[('DeleteRelationsStep', [
- ('OneFetchStep', [('Any 5,Y WHERE %s created_by Y, NOT Y eid %s, Y is CWUser'%(ueid, ueid),
+ ('OneFetchStep', [('Any %s,Y WHERE %s created_by Y, NOT Y eid %s, Y is CWUser' % (ueid, ueid, ueid),
[{'Y': 'CWUser'}])],
None, None, [self.system], {}, []),
]),
@@ -1805,6 +1831,120 @@
del self.cards.support_relations['see_also']
self.cards.cross_relations.remove('see_also')
+ def test_state_of_cross(self):
+ self._test('DELETE State X WHERE NOT X state_of Y',
+ [('DeleteEntitiesStep',
+ [('OneFetchStep',
+ [('Any X WHERE NOT X state_of Y, X is State, Y is Workflow',
+ [{'X': 'State', 'Y': 'Workflow'}])],
+ None, None, [self.system], {}, [])])]
+ )
+
+
+ def test_source_specified_0_0(self):
+ self._test('Card X WHERE X cw_source S, S eid 1',
+ [('OneFetchStep', [('Any X WHERE X is Card',
+ [{'X': 'Card'}])],
+ None, None,
+ [self.system],{}, [])
+ ])
+
+ def test_source_specified_0_1(self):
+ self._test('Any X, S WHERE X is Card, X cw_source S, S eid 1',
+ [('OneFetchStep', [('Any X,1 WHERE X is Card',
+ [{'X': 'Card'}])],
+ None, None,
+ [self.system],{}, [])
+ ])
+
+ def test_source_specified_1_0(self):
+ self._test('Card X WHERE X cw_source S, S name "system"',
+ [('OneFetchStep', [('Any X WHERE X is Card',
+ [{'X': 'Card'}])],
+ None, None,
+ [self.system],{}, [])
+ ])
+
+ def test_source_specified_1_1(self):
+ self._test('Any X, SN WHERE X is Card, X cw_source S, S name "system", S name SN',
+ [('OneFetchStep', [('Any X,SN WHERE X is Card, X cw_source S, S name "system", '
+ 'S name SN',
+ [{'S': 'CWSource', 'SN': 'String', 'X': 'Card'}])],
+ None, None, [self.system], {}, [])
+ ])
+
+ def test_source_specified_2_0(self):
+ self._test('Card X WHERE X cw_source S, NOT S eid 1',
+ [('OneFetchStep', [('Any X WHERE X is Card',
+ [{'X': 'Card'}])],
+ None, None,
+ [self.cards],{}, [])
+ ])
+ self._test('Card X WHERE NOT X cw_source S, S eid 1',
+ [('OneFetchStep', [('Any X WHERE X is Card',
+ [{'X': 'Card'}])],
+ None, None,
+ [self.cards],{}, [])
+ ])
+
+ def test_source_specified_2_1(self):
+ self._test('Card X WHERE X cw_source S, NOT S name "system"',
+ [('OneFetchStep', [('Any X WHERE X is Card',
+ [{'X': 'Card'}])],
+ None, None,
+ [self.cards],{}, [])
+ ])
+ self._test('Card X WHERE NOT X cw_source S, S name "system"',
+ [('OneFetchStep', [('Any X WHERE X is Card',
+ [{'X': 'Card'}])],
+ None, None,
+ [self.cards],{}, [])
+ ])
+
+ def test_source_conflict_1(self):
+ self.repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+ ex = self.assertRaises(BadRQLQuery,
+ self._test, 'Any X WHERE X cw_source S, S name "system", X eid %(x)s',
+ [], {'x': 999999})
+ self.assertEqual(str(ex), 'source conflict for term %(x)s')
+
+ def test_source_conflict_2(self):
+ ex = self.assertRaises(BadRQLQuery,
+ self._test, 'Card X WHERE X cw_source S, S name "systeme"', [])
+ self.assertEqual(str(ex), 'source conflict for term X')
+
+ def test_source_conflict_3(self):
+ ex = self.assertRaises(BadRQLQuery,
+ self._test, 'CWSource X WHERE X cw_source S, S name "cards"', [])
+ self.assertEqual(str(ex), 'source conflict for term X')
+
+ def test_ambigous_cross_relation_source_specified(self):
+ self.repo._type_source_cache[999999] = ('Note', 'cards', 999999)
+ self.cards.support_relations['see_also'] = True
+ self.cards.cross_relations.add('see_also')
+ try:
+ self._test('Any X,AA ORDERBY AA WHERE E eid %(x)s, E see_also X, X modification_date AA',
+ [('AggrStep',
+ 'SELECT table0.C0, table0.C1 FROM table0 ORDER BY table0.C1',
+ None,
+ [('FetchStep',
+ [('Any X,AA WHERE 999999 see_also X, X modification_date AA, X is Note',
+ [{'AA': 'Datetime', 'X': 'Note'}])], [self.cards, self.system], {},
+ {'AA': 'table0.C1', 'X': 'table0.C0',
+ 'X.modification_date': 'table0.C1'},
+ []),
+ ('FetchStep',
+ [('Any X,AA WHERE 999999 see_also X, X modification_date AA, X is Bookmark',
+ [{'AA': 'Datetime', 'X': 'Bookmark'}])],
+ [self.system], {},
+ {'AA': 'table0.C1', 'X': 'table0.C0',
+ 'X.modification_date': 'table0.C1'},
+ [])])],
+ {'x': 999999})
+ finally:
+ del self.cards.support_relations['see_also']
+ self.cards.cross_relations.remove('see_also')
+
# non regression tests ####################################################
def test_nonregr1(self):
@@ -1864,15 +2004,16 @@
)
def test_nonregr4(self):
+ ueid = self.session.user.eid
self._test('Any U ORDERBY D DESC WHERE WF wf_info_for X, WF creation_date D, WF from_state FS, '
'WF owned_by U?, X eid %(x)s',
[#('FetchStep', [('Any U WHERE U is CWUser', [{'U': 'CWUser'}])],
# [self.ldap, self.system], None, {'U': 'table0.C0'}, []),
- ('OneFetchStep', [('Any U ORDERBY D DESC WHERE WF wf_info_for 5, WF creation_date D, WF from_state FS, WF owned_by U?',
+ ('OneFetchStep', [('Any U ORDERBY D DESC WHERE WF wf_info_for %s, WF creation_date D, WF from_state FS, WF owned_by U?' % ueid,
[{'WF': 'TrInfo', 'FS': 'State', 'U': 'CWUser', 'D': 'Datetime'}])],
None, None,
[self.system], {}, [])],
- {'x': self.session.user.eid})
+ {'x': ueid})
def test_nonregr5(self):
# original jpl query:
@@ -1997,6 +2138,7 @@
{'x': 999999})
def test_nonregr13_1(self):
+ ueid = self.session.user.eid
# identity wrapped into exists:
# should'nt propagate constraint that U is in the same source as ME
self._test('Any B,U,UL GROUPBY B,U,UL WHERE B created_by U?, B is File '
@@ -2008,7 +2150,7 @@
[self.ldap, self.system], None,
{'U': 'table0.C0', 'U.login': 'table0.C1', 'UL': 'table0.C1'},
[]),
- ('FetchStep', [('Any U,UL WHERE ((EXISTS(U identity 5)) OR (EXISTS(U in_group G, G name IN("managers", "staff"), G is CWGroup))) OR (EXISTS(U in_group H, 5 in_group H, NOT H name "users", H is CWGroup)), U login UL, U is CWUser',
+ ('FetchStep', [('Any U,UL WHERE ((EXISTS(U identity %s)) OR (EXISTS(U in_group G, G name IN("managers", "staff"), G is CWGroup))) OR (EXISTS(U in_group H, %s in_group H, NOT H name "users", H is CWGroup)), U login UL, U is CWUser' % (ueid, ueid),
[{'G': 'CWGroup', 'H': 'CWGroup', 'U': 'CWUser', 'UL': 'String'}])],
[self.system],
{'U': 'table0.C0', 'U.login': 'table0.C1', 'UL': 'table0.C1'},
@@ -2019,7 +2161,7 @@
None, None, [self.system],
{'U': 'table1.C0', 'UL': 'table1.C1'},
[])],
- {'x': self.session.user.eid})
+ {'x': ueid})
def test_nonregr13_2(self):
# identity *not* wrapped into exists.
@@ -2033,6 +2175,7 @@
# explain constraint propagation rules, and so why this should be
# wrapped in exists() if used in multi-source
self.skipTest('take a look at me if you wish')
+ ueid = self.session.user.eid
self._test('Any B,U,UL GROUPBY B,U,UL WHERE B created_by U?, B is File '
'WITH U,UL BEING (Any U,UL WHERE ME eid %(x)s, (U identity ME '
'OR (EXISTS(U in_group G, G name IN("managers", "staff")))) '
@@ -2042,7 +2185,7 @@
[self.ldap, self.system], None,
{'U': 'table0.C0', 'U.login': 'table0.C1', 'UL': 'table0.C1'},
[]),
- ('FetchStep', [('Any U,UL WHERE ((U identity 5) OR (EXISTS(U in_group G, G name IN("managers", "staff"), G is CWGroup))) OR (EXISTS(U in_group H, 5 in_group H, NOT H name "users", H is CWGroup)), U login UL, U is CWUser',
+ ('FetchStep', [('Any U,UL WHERE ((U identity %s) OR (EXISTS(U in_group G, G name IN("managers", "staff"), G is CWGroup))) OR (EXISTS(U in_group H, %s in_group H, NOT H name "users", H is CWGroup)), U login UL, U is CWUser' % (ueid, ueid),
[{'G': 'CWGroup', 'H': 'CWGroup', 'U': 'CWUser', 'UL': 'String'}])],
[self.system],
{'U': 'table0.C0', 'U.login': 'table0.C1', 'UL': 'table0.C1'},
@@ -2094,14 +2237,6 @@
None, None, [self.system], {}, [])],
{'x': 999999, 'u': 999998})
- def test_state_of_cross(self):
- self._test('DELETE State X WHERE NOT X state_of Y',
- [('DeleteEntitiesStep',
- [('OneFetchStep',
- [('Any X WHERE NOT X state_of Y, X is State, Y is Workflow',
- [{'X': 'State', 'Y': 'Workflow'}])],
- None, None, [self.system], {}, [])])]
- )
class MSPlannerTwoSameExternalSourcesTC(BasePlannerTC):
"""test planner related feature on a 3-sources repository:
@@ -2257,7 +2392,7 @@
None, {'X': 'table0.C0'}, []),
('UnionStep', None, None,
[('OneFetchStep',
- [(u'Any X WHERE X owned_by U, U login "anon", U is CWUser, X is IN(Affaire, BaseTransition, Basket, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWUniqueTogetherConstraint, CWUser, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)',
+ [(u'Any X WHERE X owned_by U, U login "anon", U is CWUser, X is IN(Affaire, BaseTransition, Basket, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWSource, CWSourceHostConfig, CWUniqueTogetherConstraint, CWUser, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Personne, RQLExpression, Societe, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)',
[{'U': 'CWUser', 'X': 'Affaire'},
{'U': 'CWUser', 'X': 'BaseTransition'},
{'U': 'CWUser', 'X': 'Basket'},
@@ -2272,6 +2407,8 @@
{'U': 'CWUser', 'X': 'CWProperty'},
{'U': 'CWUser', 'X': 'CWRType'},
{'U': 'CWUser', 'X': 'CWRelation'},
+ {'U': 'CWUser', 'X': 'CWSource'},
+ {'U': 'CWUser', 'X': 'CWSourceHostConfig'},
{'U': 'CWUser', 'X': 'CWUniqueTogetherConstraint'},
{'U': 'CWUser', 'X': 'CWUser'},
{'U': 'CWUser', 'X': 'Division'},
--- a/server/test/unittest_multisources.py Sat Oct 09 00:05:50 2010 +0200
+++ b/server/test/unittest_multisources.py Sat Oct 09 00:05:52 2010 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+ # copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -15,29 +15,29 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with CubicWeb. If not, see <http://www.gnu.org/licenses/>.
-from os.path import dirname, join, abspath
+
from datetime import datetime, timedelta
-from logilab.common.decorators import cached
-
from cubicweb.devtools import TestServerConfiguration, init_test_database
from cubicweb.devtools.testlib import CubicWebTC, refresh_repo
from cubicweb.devtools.repotest import do_monkey_patch, undo_monkey_patch
-class TwoSourcesConfiguration(TestServerConfiguration):
- sourcefile = 'sources_multi'
-
-
class ExternalSource1Configuration(TestServerConfiguration):
sourcefile = 'sources_extern'
class ExternalSource2Configuration(TestServerConfiguration):
- sourcefile = 'sources_multi2'
+ sourcefile = 'sources_multi'
MTIME = datetime.now() - timedelta(0, 10)
-repo2, cnx2 = init_test_database(config=ExternalSource1Configuration('data'))
-repo3, cnx3 = init_test_database(config=ExternalSource2Configuration('data'))
+
+EXTERN_SOURCE_CFG = u'''
+pyro-ns-id = extern
+cubicweb-user = admin
+cubicweb-password = gingkow
+mapping-file = extern_mapping.py
+base-url=http://extern.org/
+'''
# hi-jacking
from cubicweb.server.sources.pyrorql import PyroRQLSource
@@ -47,6 +47,13 @@
Connection_close = Connection.close
def setup_module(*args):
+ global repo2, cnx2, repo3, cnx3
+ repo2, cnx2 = init_test_database(config=ExternalSource1Configuration('data'))
+ repo3, cnx3 = init_test_database(config=ExternalSource2Configuration('data'))
+ cnx3.request().create_entity('CWSource', name=u'extern', type=u'pyrorql',
+ config=EXTERN_SOURCE_CFG)
+ cnx3.commit()
+
TestServerConfiguration.no_sqlite_wrap = True
# hi-jack PyroRQLSource.get_connection to access existing connection (no
# pyro connection)
@@ -67,7 +74,6 @@
TestServerConfiguration.no_sqlite_wrap = False
class TwoSourcesTC(CubicWebTC):
- config = TwoSourcesConfiguration('data')
@classmethod
def _refresh_repo(cls):
@@ -82,6 +88,8 @@
do_monkey_patch()
def tearDown(self):
+ for source in self.repo.sources[1:]:
+ self.repo.remove_source(source.uri)
CubicWebTC.tearDown(self)
undo_monkey_patch()
@@ -91,6 +99,17 @@
cu.execute('INSERT Card X: X title "C4: Ze external card", X wikiid "zzz"')
self.aff1 = cu.execute('INSERT Affaire X: X ref "AFFREF"')[0][0]
cnx2.commit()
+ for uri, config in [('extern', EXTERN_SOURCE_CFG),
+ ('extern-multi', '''
+pyro-ns-id = extern-multi
+cubicweb-user = admin
+cubicweb-password = gingkow
+mapping-file = extern_mapping.py
+''')]:
+ self.request().create_entity('CWSource', name=unicode(uri),
+ type=u'pyrorql',
+ config=unicode(config))
+ self.commit()
# trigger discovery
self.sexecute('Card X')
self.sexecute('Affaire X')
@@ -112,11 +131,11 @@
# since they are orderd by eid, we know the 3 first one is coming from the system source
# and the others from external source
self.assertEqual(rset.get_entity(0, 0).cw_metainformation(),
- {'source': {'adapter': 'native', 'uri': 'system'},
+ {'source': {'type': 'native', 'uri': 'system'},
'type': u'Card', 'extid': None})
externent = rset.get_entity(3, 0)
metainf = externent.cw_metainformation()
- self.assertEqual(metainf['source'], {'adapter': 'pyrorql', 'base-url': 'http://extern.org/', 'uri': 'extern'})
+ self.assertEqual(metainf['source'], {'type': 'pyrorql', 'base-url': 'http://extern.org/', 'uri': 'extern'})
self.assertEqual(metainf['type'], 'Card')
self.assert_(metainf['extid'])
etype = self.sexecute('Any ETN WHERE X is ET, ET name ETN, X eid %(x)s',
@@ -184,7 +203,7 @@
def test_simplifiable_var_2(self):
affeid = self.sexecute('Affaire X WHERE X ref "AFFREF"')[0][0]
rset = self.sexecute('Any E WHERE E eid %(x)s, E in_state S, NOT S name "moved"',
- {'x': affeid, 'u': self.session.user.eid})
+ {'x': affeid, 'u': self.session.user.eid})
self.assertEqual(len(rset), 1)
def test_sort_func(self):
@@ -270,7 +289,6 @@
def test_not_relation(self):
states = set(tuple(x) for x in self.sexecute('Any S,SN WHERE S is State, S name SN'))
- self.session.user.clear_all_caches()
userstate = self.session.user.in_state[0]
states.remove((userstate.eid, userstate.name))
notstates = set(tuple(x) for x in self.sexecute('Any S,SN WHERE S is State, S name SN, NOT X in_state S, X eid %(x)s',
--- a/server/test/unittest_querier.py Sat Oct 09 00:05:50 2010 +0200
+++ b/server/test/unittest_querier.py Sat Oct 09 00:05:52 2010 +0200
@@ -130,7 +130,7 @@
'X': 'Affaire',
'ET': 'CWEType', 'ETN': 'String'}])
rql, solutions = partrqls[1]
- self.assertEqual(rql, 'Any ETN,X WHERE X is ET, ET name ETN, ET is CWEType, X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWUniqueTogetherConstraint, CWUser, Card, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Note, Personne, RQLExpression, Societe, State, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)')
+ self.assertEqual(rql, 'Any ETN,X WHERE X is ET, ET name ETN, ET is CWEType, X is IN(BaseTransition, Bookmark, CWAttribute, CWCache, CWConstraint, CWConstraintType, CWEType, CWGroup, CWPermission, CWProperty, CWRType, CWRelation, CWSource, CWUniqueTogetherConstraint, CWUser, Card, Comment, Division, Email, EmailAddress, EmailPart, EmailThread, ExternalUri, File, Folder, Note, Personne, RQLExpression, Societe, State, SubDivision, SubWorkflowExitPoint, Tag, TrInfo, Transition, Workflow, WorkflowTransition)')
self.assertListEqual(sorted(solutions),
sorted([{'X': 'BaseTransition', 'ETN': 'String', 'ET': 'CWEType'},
{'X': 'Bookmark', 'ETN': 'String', 'ET': 'CWEType'},
@@ -147,6 +147,7 @@
{'X': 'CWPermission', 'ETN': 'String', 'ET': 'CWEType'},
{'X': 'CWProperty', 'ETN': 'String', 'ET': 'CWEType'},
{'X': 'CWRType', 'ETN': 'String', 'ET': 'CWEType'},
+ {'X': 'CWSource', 'ETN': 'String', 'ET': 'CWEType'},
{'X': 'CWUniqueTogetherConstraint', 'ETN': 'String', 'ET': 'CWEType'},
{'X': 'CWUser', 'ETN': 'String', 'ET': 'CWEType'},
{'X': 'Email', 'ETN': 'String', 'ET': 'CWEType'},
@@ -251,15 +252,15 @@
def test_select_1(self):
rset = self.execute('Any X ORDERBY X WHERE X is CWGroup')
result, descr = rset.rows, rset.description
- self.assertEqual(tuplify(result), [(1,), (2,), (3,), (4,)])
+ self.assertEqual(tuplify(result), [(2,), (3,), (4,), (5,)])
self.assertEqual(descr, [('CWGroup',), ('CWGroup',), ('CWGroup',), ('CWGroup',)])
def test_select_2(self):
rset = self.execute('Any X ORDERBY N WHERE X is CWGroup, X name N')
- self.assertEqual(tuplify(rset.rows), [(1,), (2,), (3,), (4,)])
+ self.assertEqual(tuplify(rset.rows), [(2,), (3,), (4,), (5,)])
self.assertEqual(rset.description, [('CWGroup',), ('CWGroup',), ('CWGroup',), ('CWGroup',)])
rset = self.execute('Any X ORDERBY N DESC WHERE X is CWGroup, X name N')
- self.assertEqual(tuplify(rset.rows), [(4,), (3,), (2,), (1,)])
+ self.assertEqual(tuplify(rset.rows), [(5,), (4,), (3,), (2,)])
def test_select_3(self):
rset = self.execute('Any N GROUPBY N WHERE X is CWGroup, X name N')
@@ -302,7 +303,7 @@
def test_select_5(self):
rset = self.execute('Any X, TMP ORDERBY TMP WHERE X name TMP, X is CWGroup')
- self.assertEqual(tuplify(rset.rows), [(1, 'guests',), (2, 'managers',), (3, 'owners',), (4, 'users',)])
+ self.assertEqual(tuplify(rset.rows), [(2, 'guests',), (3, 'managers',), (4, 'owners',), (5, 'users',)])
self.assertEqual(rset.description, [('CWGroup', 'String',), ('CWGroup', 'String',), ('CWGroup', 'String',), ('CWGroup', 'String',)])
def test_select_6(self):
@@ -350,11 +351,11 @@
self.assertEqual(len(rset.rows), 0)
def test_select_nonregr_edition_not(self):
- groupeids = set((1, 2, 3))
- groupreadperms = set(r[0] for r in self.execute('Any Y WHERE X name "CWGroup", Y eid IN(1, 2, 3), X read_permission Y'))
- rset = self.execute('DISTINCT Any Y WHERE X is CWEType, X name "CWGroup", Y eid IN(1, 2, 3), NOT X read_permission Y')
+ groupeids = set((2, 3, 4))
+ groupreadperms = set(r[0] for r in self.execute('Any Y WHERE X name "CWGroup", Y eid IN(2, 3, 4), X read_permission Y'))
+ rset = self.execute('DISTINCT Any Y WHERE X is CWEType, X name "CWGroup", Y eid IN(2, 3, 4), NOT X read_permission Y')
self.assertEqual(sorted(r[0] for r in rset.rows), sorted(groupeids - groupreadperms))
- rset = self.execute('DISTINCT Any Y WHERE X name "CWGroup", Y eid IN(1, 2, 3), NOT X read_permission Y')
+ rset = self.execute('DISTINCT Any Y WHERE X name "CWGroup", Y eid IN(2, 3, 4), NOT X read_permission Y')
self.assertEqual(sorted(r[0] for r in rset.rows), sorted(groupeids - groupreadperms))
def test_select_outer_join(self):
@@ -493,15 +494,16 @@
self.assertListEqual(rset.rows,
[[u'description_format', 12],
[u'description', 13],
- [u'name', 14],
- [u'created_by', 38],
- [u'creation_date', 38],
- [u'cwuri', 38],
- [u'in_basket', 38],
- [u'is', 38],
- [u'is_instance_of', 38],
- [u'modification_date', 38],
- [u'owned_by', 38]])
+ [u'name', 15],
+ [u'created_by', 40],
+ [u'creation_date', 40],
+ [u'cw_source', 40],
+ [u'cwuri', 40],
+ [u'in_basket', 40],
+ [u'is', 40],
+ [u'is_instance_of', 40],
+ [u'modification_date', 40],
+ [u'owned_by', 40]])
def test_select_aggregat_having_dumb(self):
# dumb but should not raise an error
@@ -617,15 +619,15 @@
def test_select_no_descr(self):
rset = self.execute('Any X WHERE X is CWGroup', build_descr=0)
rset.rows.sort()
- self.assertEqual(tuplify(rset.rows), [(1,), (2,), (3,), (4,)])
+ self.assertEqual(tuplify(rset.rows), [(2,), (3,), (4,), (5,)])
self.assertEqual(rset.description, ())
def test_select_limit_offset(self):
rset = self.execute('CWGroup X ORDERBY N LIMIT 2 WHERE X name N')
- self.assertEqual(tuplify(rset.rows), [(1,), (2,)])
+ self.assertEqual(tuplify(rset.rows), [(2,), (3,)])
self.assertEqual(rset.description, [('CWGroup',), ('CWGroup',)])
rset = self.execute('CWGroup X ORDERBY N LIMIT 2 OFFSET 2 WHERE X name N')
- self.assertEqual(tuplify(rset.rows), [(3,), (4,)])
+ self.assertEqual(tuplify(rset.rows), [(4,), (5,)])
def test_select_symmetric(self):
self.execute("INSERT Personne X: X nom 'machin'")
@@ -766,14 +768,14 @@
def test_select_constant(self):
rset = self.execute('Any X, "toto" ORDERBY X WHERE X is CWGroup')
self.assertEqual(rset.rows,
- map(list, zip((1,2,3,4), ('toto','toto','toto','toto',))))
+ map(list, zip((2,3,4,5), ('toto','toto','toto','toto',))))
self.assertIsInstance(rset[0][1], unicode)
self.assertEqual(rset.description,
zip(('CWGroup', 'CWGroup', 'CWGroup', 'CWGroup'),
('String', 'String', 'String', 'String',)))
rset = self.execute('Any X, %(value)s ORDERBY X WHERE X is CWGroup', {'value': 'toto'})
self.assertEqual(rset.rows,
- map(list, zip((1,2,3,4), ('toto','toto','toto','toto',))))
+ map(list, zip((2,3,4,5), ('toto','toto','toto','toto',))))
self.assertIsInstance(rset[0][1], unicode)
self.assertEqual(rset.description,
zip(('CWGroup', 'CWGroup', 'CWGroup', 'CWGroup'),
--- a/server/test/unittest_repository.py Sat Oct 09 00:05:50 2010 +0200
+++ b/server/test/unittest_repository.py Sat Oct 09 00:05:52 2010 +0200
@@ -95,15 +95,14 @@
self.assertItemsEqual(person._unique_together[0],
('nom', 'prenom', 'inline2'))
- def test_schema_has_owner(self):
- repo = self.repo
- cnxid = repo.connect(self.admlogin, password=self.admpassword)
- self.failIf(repo.execute(cnxid, 'CWEType X WHERE NOT X owned_by U'))
- self.failIf(repo.execute(cnxid, 'CWRType X WHERE NOT X owned_by U'))
- self.failIf(repo.execute(cnxid, 'CWAttribute X WHERE NOT X owned_by U'))
- self.failIf(repo.execute(cnxid, 'CWRelation X WHERE NOT X owned_by U'))
- self.failIf(repo.execute(cnxid, 'CWConstraint X WHERE NOT X owned_by U'))
- self.failIf(repo.execute(cnxid, 'CWConstraintType X WHERE NOT X owned_by U'))
+ def test_all_entities_have_owner(self):
+ self.failIf(self.execute('Any X WHERE NOT X owned_by U'))
+
+ def test_all_entities_have_is(self):
+ self.failIf(self.execute('Any X WHERE NOT X is ET'))
+
+ def test_all_entities_have_cw_source(self):
+ self.failIf(self.execute('Any X WHERE NOT X cw_source S'))
def test_connect(self):
self.assert_(self.repo.connect(self.admlogin, password=self.admpassword))
@@ -288,7 +287,7 @@
self.assertListEqual([r.type for r in schema.eschema('CWAttribute').ordered_relations()
if not r.type in ('eid', 'is', 'is_instance_of', 'identity',
'creation_date', 'modification_date', 'cwuri',
- 'owned_by', 'created_by',
+ 'owned_by', 'created_by', 'cw_source',
'update_permission', 'read_permission',
'in_basket')],
['relation_type',
@@ -369,25 +368,25 @@
repo = self.repo
cnxid = repo.connect(self.admlogin, password=self.admpassword)
session = repo._get_session(cnxid, setpool=True)
- self.assertEqual(repo.type_and_source_from_eid(1, session),
- ('CWGroup', 'system', None))
- self.assertEqual(repo.type_from_eid(1, session), 'CWGroup')
- self.assertEqual(repo.source_from_eid(1, session).uri, 'system')
- self.assertEqual(repo.eid2extid(repo.system_source, 1, session), None)
+ self.assertEqual(repo.type_and_source_from_eid(2, session),
+ ('CWGroup', 'system', None))
+ self.assertEqual(repo.type_from_eid(2, session), 'CWGroup')
+ self.assertEqual(repo.source_from_eid(2, session).uri, 'system')
+ self.assertEqual(repo.eid2extid(repo.system_source, 2, session), None)
class dummysource: uri = 'toto'
- self.assertRaises(UnknownEid, repo.eid2extid, dummysource, 1, session)
+ self.assertRaises(UnknownEid, repo.eid2extid, dummysource, 2, session)
def test_public_api(self):
self.assertEqual(self.repo.get_schema(), self.repo.schema)
- self.assertEqual(self.repo.source_defs(), {'system': {'adapter': 'native', 'uri': 'system'}})
+ self.assertEqual(self.repo.source_defs(), {'system': {'type': 'native', 'uri': 'system'}})
# .properties() return a result set
self.assertEqual(self.repo.properties().rql, 'Any K,V WHERE P is CWProperty,P pkey K, P value V, NOT P for_user U')
def test_session_api(self):
repo = self.repo
cnxid = repo.connect(self.admlogin, password=self.admpassword)
- self.assertEqual(repo.user_info(cnxid), (5, 'admin', set([u'managers']), {}))
- self.assertEqual(repo.describe(cnxid, 1), (u'CWGroup', u'system', None))
+ self.assertEqual(repo.user_info(cnxid), (6, 'admin', set([u'managers']), {}))
+ self.assertEqual(repo.describe(cnxid, 2), (u'CWGroup', u'system', None))
repo.close(cnxid)
self.assertRaises(BadConnectionId, repo.user_info, cnxid)
self.assertRaises(BadConnectionId, repo.describe, cnxid, 1)
@@ -518,7 +517,7 @@
def test_type_from_eid(self):
self.session.set_pool()
- self.assertEqual(self.repo.type_from_eid(1, self.session), 'CWGroup')
+ self.assertEqual(self.repo.type_from_eid(2, self.session), 'CWGroup')
def test_type_from_eid_raise(self):
self.session.set_pool()
--- a/server/utils.py Sat Oct 09 00:05:50 2010 +0200
+++ b/server/utils.py Sat Oct 09 00:05:52 2010 +0200
@@ -24,8 +24,6 @@
from getpass import getpass
from random import choice
-from logilab.common.configuration import Configuration
-
from cubicweb.server import SOURCE_TYPES
try:
@@ -111,12 +109,6 @@
return user, passwd
-def ask_source_config(sourcetype, inputlevel=0):
- sconfig = Configuration(options=SOURCE_TYPES[sourcetype].options)
- sconfig.adapter = sourcetype
- sconfig.input_config(inputlevel=inputlevel)
- return sconfig
-
_MARKER=object()
def func_name(func):
name = getattr(func, '__name__', _MARKER)
--- a/test/unittest_dbapi.py Sat Oct 09 00:05:50 2010 +0200
+++ b/test/unittest_dbapi.py Sat Oct 09 00:05:52 2010 +0200
@@ -29,7 +29,7 @@
def test_public_repo_api(self):
cnx = self.login('anon')
self.assertEqual(cnx.get_schema(), self.repo.schema)
- self.assertEqual(cnx.source_defs(), {'system': {'adapter': 'native', 'uri': 'system'}})
+ self.assertEqual(cnx.source_defs(), {'system': {'type': 'native', 'uri': 'system'}})
self.restore_connection() # proper way to close cnx
self.assertRaises(ProgrammingError, cnx.get_schema)
self.assertRaises(ProgrammingError, cnx.source_defs)
@@ -47,7 +47,7 @@
def test_api(self):
cnx = self.login('anon')
self.assertEqual(cnx.user(None).login, 'anon')
- self.assertEqual(cnx.describe(1), (u'CWGroup', u'system', None))
+ self.assertEqual(cnx.describe(1), (u'CWSource', u'system', None))
self.restore_connection() # proper way to close cnx
self.assertRaises(ProgrammingError, cnx.user, None)
self.assertRaises(ProgrammingError, cnx.describe, 1)
--- a/test/unittest_entity.py Sat Oct 09 00:05:50 2010 +0200
+++ b/test/unittest_entity.py Sat Oct 09 00:05:52 2010 +0200
@@ -319,7 +319,7 @@
e = self.request().create_entity('Card', title=u'rest test', content=u'du :eid:`1:*ReST*`',
content_format=u'text/rest')
self.assertEqual(e.printable_value('content'),
- '<p>du <a class="reference" href="http://testing.fr/cubicweb/cwgroup/guests">*ReST*</a></p>\n')
+ '<p>du <a class="reference" href="http://testing.fr/cubicweb/cwsource/system">*ReST*</a></p>\n')
e.cw_attr_cache['content'] = 'du <em>html</em> <ref rql="CWUser X">users</ref>'
e.cw_attr_cache['content_format'] = 'text/html'
self.assertEqual(e.printable_value('content'),
@@ -513,7 +513,7 @@
req = self.request()
note = req.create_entity('Note', type=u'z')
metainf = note.cw_metainformation()
- self.assertEqual(metainf, {'source': {'adapter': 'native', 'uri': 'system'}, 'type': u'Note', 'extid': None})
+ self.assertEqual(metainf, {'source': {'type': 'native', 'uri': 'system'}, 'type': u'Note', 'extid': None})
self.assertEqual(note.absolute_url(), 'http://testing.fr/cubicweb/note/%s' % note.eid)
metainf['source'] = metainf['source'].copy()
metainf['source']['base-url'] = 'http://cubicweb2.com/'
--- a/test/unittest_schema.py Sat Oct 09 00:05:50 2010 +0200
+++ b/test/unittest_schema.py Sat Oct 09 00:05:52 2010 +0200
@@ -173,6 +173,7 @@
'CWCache', 'CWConstraint', 'CWConstraintType', 'CWEType',
'CWAttribute', 'CWGroup', 'EmailAddress', 'CWRelation',
'CWPermission', 'CWProperty', 'CWRType',
+ 'CWSource', 'CWSourceHostConfig',
'CWUniqueTogetherConstraint', 'CWUser',
'ExternalUri', 'File', 'Float', 'Int', 'Interval', 'Note',
'Password', 'Personne',
@@ -186,10 +187,12 @@
'bookmarked_by', 'by_transition',
'cardinality', 'comment', 'comment_format',
- 'composite', 'condition', 'connait',
+ 'composite', 'condition', 'config', 'connait',
'constrained_by', 'constraint_of',
'content', 'content_format',
- 'created_by', 'creation_date', 'cstrtype', 'custom_workflow', 'cwuri',
+ 'created_by', 'creation_date', 'cstrtype', 'custom_workflow',
+ 'cwuri', 'cw_source', 'cw_host_config_of',
+ 'cw_support', 'cw_dont_cross', 'cw_may_cross',
'data', 'data_encoding', 'data_format', 'data_name', 'default_workflow', 'defaultval', 'delete_permission',
'description', 'description_format', 'destination_state',
@@ -205,7 +208,7 @@
'label', 'last_login_time', 'login',
- 'mainvars', 'modification_date',
+ 'mainvars', 'match_host', 'modification_date',
'name', 'nom',
@@ -225,11 +228,12 @@
'wf_info_for', 'wikiid', 'workflow_of']
- self.assertListEqual(relations, expected_relations)
+ self.assertListEqual(relations, sorted(expected_relations))
eschema = schema.eschema('CWUser')
rels = sorted(str(r) for r in eschema.subject_relations())
- self.assertListEqual(rels, ['created_by', 'creation_date', 'custom_workflow', 'cwuri', 'eid',
+ self.assertListEqual(rels, ['created_by', 'creation_date', 'custom_workflow',
+ 'cw_source', 'cwuri', 'eid',
'evaluee', 'firstname', 'has_text', 'identity',
'in_group', 'in_state', 'is',
'is_instance_of', 'last_login_time',
--- a/web/application.py Sat Oct 09 00:05:50 2010 +0200
+++ b/web/application.py Sat Oct 09 00:05:52 2010 +0200
@@ -224,7 +224,7 @@
def _update_last_login_time(self, req):
# XXX should properly detect missing permission / non writeable source
# and avoid "except (RepositoryError, Unauthorized)" below
- if req.user.cw_metainformation()['source']['adapter'] == 'ldapuser':
+ if req.user.cw_metainformation()['source']['type'] == 'ldapuser':
return
try:
req.execute('SET X last_login_time NOW WHERE X eid %(x)s',
--- a/web/test/unittest_views_editforms.py Sat Oct 09 00:05:50 2010 +0200
+++ b/web/test/unittest_views_editforms.py Sat Oct 09 00:05:52 2010 +0200
@@ -64,10 +64,10 @@
])
self.assertListEqual(rbc(e, 'main', 'metadata'),
[('last_login_time', 'subject'),
+ ('creation_date', 'subject'),
+ ('cwuri', 'subject'),
('modification_date', 'subject'),
('created_by', 'subject'),
- ('creation_date', 'subject'),
- ('cwuri', 'subject'),
('owned_by', 'subject'),
('bookmarked_by', 'object'),
])
@@ -77,8 +77,8 @@
self.assertListEqual([x for x in rbc(e, 'main', 'relations')
if x != ('tags', 'object')],
[('primary_email', 'subject'),
+ ('connait', 'subject'),
('custom_workflow', 'subject'),
- ('connait', 'subject'),
('checked_by', 'object'),
])
self.assertListEqual(rbc(e, 'main', 'inlined'),
--- a/web/views/autoform.py Sat Oct 09 00:05:50 2010 +0200
+++ b/web/views/autoform.py Sat Oct 09 00:05:52 2010 +0200
@@ -901,13 +901,12 @@
_AFS.tag_attribute(('*', 'eid'), 'main', 'attributes')
_AFS.tag_attribute(('*', 'eid'), 'muledit', 'attributes')
_AFS.tag_attribute(('*', 'description'), 'main', 'attributes')
-_AFS.tag_attribute(('*', 'creation_date'), 'main', 'metadata')
-_AFS.tag_attribute(('*', 'modification_date'), 'main', 'metadata')
-_AFS.tag_attribute(('*', 'cwuri'), 'main', 'metadata')
_AFS.tag_attribute(('*', 'has_text'), 'main', 'hidden')
_AFS.tag_subject_of(('*', 'in_state', '*'), 'main', 'hidden')
-_AFS.tag_subject_of(('*', 'owned_by', '*'), 'main', 'metadata')
-_AFS.tag_subject_of(('*', 'created_by', '*'), 'main', 'metadata')
+for rtype in ('creation_date', 'modification_date', 'cwuri',
+ 'owned_by', 'created_by', 'cw_source'):
+ _AFS.tag_subject_of(('*', rtype, '*'), 'main', 'metadata')
+
_AFS.tag_subject_of(('*', 'require_permission', '*'), 'main', 'hidden')
_AFS.tag_subject_of(('*', 'by_transition', '*'), 'main', 'attributes')
_AFS.tag_subject_of(('*', 'by_transition', '*'), 'muledit', 'attributes')
--- a/web/views/facets.py Sat Oct 09 00:05:50 2010 +0200
+++ b/web/views/facets.py Sat Oct 09 00:05:52 2010 +0200
@@ -137,6 +137,11 @@
# facets ######################################################################
+class CWSourceFacet(RelationFacet):
+ __regid__ = 'cw_source-facet'
+ rtype = 'cw_source'
+ target_attr = 'name'
+
class CreatedByFacet(RelationFacet):
__regid__ = 'created_by-facet'
rtype = 'created_by'