# HG changeset patch
# User David Douard
# Date 1371219985 -7200
# Node ID b4bcabf55e77efde2f4b63a0045c837525253ab4
# Parent 2cf127d4f5fdd27efb3f1c24e556f6204ea74681# Parent 1f3d4d829e6318782d148ec8ca98b734dfa0b54d
3.17 is the new stable
diff -r 2cf127d4f5fd -r b4bcabf55e77 .hgtags
--- a/.hgtags Fri Jun 14 16:13:24 2013 +0200
+++ b/.hgtags Fri Jun 14 16:26:25 2013 +0200
@@ -291,9 +291,19 @@
ee860c51f56bd65c4f6ea363462c02700d1dab5a cubicweb-version-3.16.3
ee860c51f56bd65c4f6ea363462c02700d1dab5a cubicweb-debian-version-3.16.3-1
ee860c51f56bd65c4f6ea363462c02700d1dab5a cubicweb-centos-version-3.16.3-1
+cc1a0aad580cf93d26959f97d8d6638e786c1082 cubicweb-version-3.17.0
+22be40c492e9034483bfec379ca11462ea97825b cubicweb-debian-version-3.17.0-1
+09a0c7ea6c3cb97bbbeed3795b3c3715ceb9566b cubicweb-debian-version-3.17.0-2
041804bc48e91e440a5b573ceb0df5bf22863b80 cubicweb-version-3.16.4
041804bc48e91e440a5b573ceb0df5bf22863b80 cubicweb-debian-version-3.16.4-1
041804bc48e91e440a5b573ceb0df5bf22863b80 cubicweb-centos-version-3.16.4-1
810a05fba1a46ab893b6cadac109097a047f8355 cubicweb-version-3.16.5
810a05fba1a46ab893b6cadac109097a047f8355 cubicweb-debiann-version-3.16.5-1
810a05fba1a46ab893b6cadac109097a047f8355 cubicweb-centos-version-3.16.5-1
+f98d1c46ed9fd5db5262cf5be1c8e159c90efc8b cubicweb-version-3.17.1
+f98d1c46ed9fd5db5262cf5be1c8e159c90efc8b cubicweb-version-3.17.1
+73f2ad404716cd211b735e67ee16875f1fff7374 cubicweb-debian-version-3.17.1-1
+f98d1c46ed9fd5db5262cf5be1c8e159c90efc8b cubicweb-debian-version-3.17.1-1
+f98d1c46ed9fd5db5262cf5be1c8e159c90efc8b cubicweb-centos-version-3.17.1-1
+195e519fe97c8d1a5ab5ccb21bf7c88e5801b657 cubicweb-version-3.17.2
+195e519fe97c8d1a5ab5ccb21bf7c88e5801b657 cubicweb-debian-version-3.17.2-1
diff -r 2cf127d4f5fd -r b4bcabf55e77 __init__.py
--- a/__init__.py Fri Jun 14 16:13:24 2013 +0200
+++ b/__init__.py Fri Jun 14 16:26:25 2013 +0200
@@ -38,10 +38,10 @@
import sys, os, logging
from StringIO import StringIO
+from logilab.common.deprecation import deprecated
from logilab.common.logging_ext import set_log_methods
from yams.constraints import BASE_CONVERTERS
-
if os.environ.get('APYCOT_ROOT'):
logging.basicConfig(level=logging.CRITICAL)
else:
@@ -57,8 +57,9 @@
from logilab.common.registry import ObjectNotFound, NoSelectableObject, RegistryNotFound
# convert eid to the right type, raise ValueError if it's not a valid eid
-typed_eid = int
-
+@deprecated('[3.17] typed_eid() was removed. replace it with int() when needed.')
+def typed_eid(eid):
+ return int(eid)
#def log_thread(f, w, a):
# print f.f_code.co_filename, f.f_code.co_name
diff -r 2cf127d4f5fd -r b4bcabf55e77 __pkginfo__.py
--- a/__pkginfo__.py Fri Jun 14 16:13:24 2013 +0200
+++ b/__pkginfo__.py Fri Jun 14 16:26:25 2013 +0200
@@ -22,7 +22,7 @@
modname = distname = "cubicweb"
-numversion = (3, 16, 5)
+numversion = (3, 17, 2)
version = '.'.join(str(num) for num in numversion)
description = "a repository of entities / relations for knowledge management"
@@ -43,7 +43,7 @@
'logilab-common': '>= 0.59.0',
'logilab-mtconverter': '>= 0.8.0',
'rql': '>= 0.31.2',
- 'yams': '>= 0.36.0',
+ 'yams': '>= 0.37.0',
#gettext # for xgettext, msgcat, etc...
# web dependancies
'simplejson': '>= 2.0.9',
@@ -51,7 +51,7 @@
'Twisted': '',
# XXX graphviz
# server dependencies
- 'logilab-database': '>= 1.8.2',
+ 'logilab-database': '>= 1.10',
'pysqlite': '>= 2.5.5', # XXX install pysqlite2
'passlib': '',
}
diff -r 2cf127d4f5fd -r b4bcabf55e77 appobject.py
--- a/appobject.py Fri Jun 14 16:13:24 2013 +0200
+++ b/appobject.py Fri Jun 14 16:26:25 2013 +0200
@@ -34,7 +34,6 @@
from logging import getLogger
from logilab.common.deprecation import deprecated, class_renamed
-from logilab.common.decorators import classproperty
from logilab.common.logging_ext import set_log_methods
# first line imports for bw compat
diff -r 2cf127d4f5fd -r b4bcabf55e77 cubicweb.spec
--- a/cubicweb.spec Fri Jun 14 16:13:24 2013 +0200
+++ b/cubicweb.spec Fri Jun 14 16:26:25 2013 +0200
@@ -7,7 +7,7 @@
%endif
Name: cubicweb
-Version: 3.16.5
+Version: 3.17.2
Release: logilab.1%{?dist}
Summary: CubicWeb is a semantic web application framework
Source0: http://download.logilab.org/pub/cubicweb/cubicweb-%{version}.tar.gz
@@ -23,8 +23,8 @@
Requires: %{python}-logilab-common >= 0.59.0
Requires: %{python}-logilab-mtconverter >= 0.8.0
Requires: %{python}-rql >= 0.31.2
-Requires: %{python}-yams >= 0.36.0
-Requires: %{python}-logilab-database >= 1.9.0
+Requires: %{python}-yams >= 0.37.0
+Requires: %{python}-logilab-database >= 1.10.0
Requires: %{python}-passlib
Requires: %{python}-lxml
Requires: %{python}-twisted-web
diff -r 2cf127d4f5fd -r b4bcabf55e77 cwconfig.py
--- a/cwconfig.py Fri Jun 14 16:13:24 2013 +0200
+++ b/cwconfig.py Fri Jun 14 16:26:25 2013 +0200
@@ -565,19 +565,27 @@
todo.append(depcube)
return cubes
- @classmethod
- def reorder_cubes(cls, cubes):
+ def reorder_cubes(self, cubes):
"""reorder cubes from the top level cubes to inner dependencies
cubes
"""
from logilab.common.graph import ordered_nodes, UnorderableGraph
+ # See help string for 'ui-cube' in web/webconfig.py for the reasons
+ # behind this hack.
+ uicube = self.get('ui-cube', None)
graph = {}
+ if uicube:
+ graph[uicube] = set()
for cube in cubes:
cube = CW_MIGRATION_MAP.get(cube, cube)
- graph[cube] = set(dep for dep in cls.cube_dependencies(cube)
+ graph[cube] = set(dep for dep in self.cube_dependencies(cube)
if dep in cubes)
- graph[cube] |= set(dep for dep in cls.cube_recommends(cube)
+ graph[cube] |= set(dep for dep in self.cube_recommends(cube)
if dep in cubes)
+ if uicube and cube != uicube \
+ and cube not in self.cube_dependencies(uicube) \
+ and cube not in self.cube_recommends(uicube):
+ graph[cube].add(uicube)
try:
return ordered_nodes(graph)
except UnorderableGraph as ex:
diff -r 2cf127d4f5fd -r b4bcabf55e77 cwctl.py
--- a/cwctl.py Fri Jun 14 16:13:24 2013 +0200
+++ b/cwctl.py Fri Jun 14 16:26:25 2013 +0200
@@ -38,10 +38,9 @@
def getpgid():
"""win32 getpgid implementation"""
-
-
from logilab.common.clcommands import CommandLine
from logilab.common.shellutils import ASK
+from logilab.common.configuration import merge_options
from cubicweb import ConfigurationError, ExecutionError, BadCommandUsage
from cubicweb.utils import support_args
@@ -205,9 +204,12 @@
class ListCommand(Command):
"""List configurations, cubes and instances.
- list available configurations, installed cubes, and registered instances
+ List available configurations, installed cubes, and registered instances.
+
+ If given, the optional argument allows to restrict listing only a category of items.
"""
name = 'list'
+ arguments = '[all|cubes|configurations|instances]'
options = (
('verbose',
{'short': 'v', 'action' : 'store_true',
@@ -216,92 +218,107 @@
def run(self, args):
"""run the command with its specific arguments"""
- if args:
+ if not args:
+ mode = 'all'
+ elif len(args) == 1:
+ mode = args[0]
+ else:
raise BadCommandUsage('Too many arguments')
+
from cubicweb.migration import ConfigurationProblem
- print 'CubicWeb %s (%s mode)' % (cwcfg.cubicweb_version(), cwcfg.mode)
- print
- print 'Available configurations:'
- for config in CONFIGURATIONS:
- print '*', config.name
- for line in config.__doc__.splitlines():
- line = line.strip()
- if not line:
- continue
- print ' ', line
- print
- cfgpb = ConfigurationProblem(cwcfg)
- try:
- cubesdir = pathsep.join(cwcfg.cubes_search_path())
- namesize = max(len(x) for x in cwcfg.available_cubes())
- except ConfigurationError as ex:
- print 'No cubes available:', ex
- except ValueError:
- print 'No cubes available in %s' % cubesdir
- else:
- print 'Available cubes (%s):' % cubesdir
- for cube in cwcfg.available_cubes():
- try:
- tinfo = cwcfg.cube_pkginfo(cube)
- tversion = tinfo.version
- cfgpb.add_cube(cube, tversion)
- except (ConfigurationError, AttributeError) as ex:
- tinfo = None
- tversion = '[missing cube information: %s]' % ex
- print '* %s %s' % (cube.ljust(namesize), tversion)
- if self.config.verbose:
- if tinfo:
- descr = getattr(tinfo, 'description', '')
- if not descr:
- descr = getattr(tinfo, 'short_desc', '')
+
+ if mode == 'all':
+ print 'CubicWeb %s (%s mode)' % (cwcfg.cubicweb_version(), cwcfg.mode)
+ print
+
+ if mode in ('all', 'config', 'configurations'):
+ print 'Available configurations:'
+ for config in CONFIGURATIONS:
+ print '*', config.name
+ for line in config.__doc__.splitlines():
+ line = line.strip()
+ if not line:
+ continue
+ print ' ', line
+ print
+
+ if mode in ('all', 'cubes'):
+ cfgpb = ConfigurationProblem(cwcfg)
+ try:
+ cubesdir = pathsep.join(cwcfg.cubes_search_path())
+ namesize = max(len(x) for x in cwcfg.available_cubes())
+ except ConfigurationError as ex:
+ print 'No cubes available:', ex
+ except ValueError:
+ print 'No cubes available in %s' % cubesdir
+ else:
+ print 'Available cubes (%s):' % cubesdir
+ for cube in cwcfg.available_cubes():
+ try:
+ tinfo = cwcfg.cube_pkginfo(cube)
+ tversion = tinfo.version
+ cfgpb.add_cube(cube, tversion)
+ except (ConfigurationError, AttributeError) as ex:
+ tinfo = None
+ tversion = '[missing cube information: %s]' % ex
+ print '* %s %s' % (cube.ljust(namesize), tversion)
+ if self.config.verbose:
+ if tinfo:
+ descr = getattr(tinfo, 'description', '')
+ if not descr:
+ descr = getattr(tinfo, 'short_desc', '')
+ if descr:
+ warn('[3.8] short_desc is deprecated, update %s'
+ ' pkginfo' % cube, DeprecationWarning)
+ else:
+ descr = tinfo.__doc__
if descr:
- warn('[3.8] short_desc is deprecated, update %s'
- ' pkginfo' % cube, DeprecationWarning)
- else:
- descr = tinfo.__doc__
- if descr:
- print ' '+ ' \n'.join(descr.splitlines())
- modes = detect_available_modes(cwcfg.cube_dir(cube))
- print ' available modes: %s' % ', '.join(modes)
- print
- try:
- regdir = cwcfg.instances_dir()
- except ConfigurationError as ex:
- print 'No instance available:', ex
+ print ' '+ ' \n'.join(descr.splitlines())
+ modes = detect_available_modes(cwcfg.cube_dir(cube))
+ print ' available modes: %s' % ', '.join(modes)
print
- return
- instances = list_instances(regdir)
- if instances:
- print 'Available instances (%s):' % regdir
- for appid in instances:
- modes = cwcfg.possible_configurations(appid)
- if not modes:
- print '* %s (BROKEN instance, no configuration found)' % appid
- continue
- print '* %s (%s)' % (appid, ', '.join(modes))
- try:
- config = cwcfg.config_for(appid, modes[0])
- except Exception as exc:
- print ' (BROKEN instance, %s)' % exc
- continue
- else:
- print 'No instance available in %s' % regdir
- print
- # configuration management problem solving
- cfgpb.solve()
- if cfgpb.warnings:
- print 'Warnings:\n', '\n'.join('* '+txt for txt in cfgpb.warnings)
- if cfgpb.errors:
- print 'Errors:'
- for op, cube, version, src in cfgpb.errors:
- if op == 'add':
- print '* cube', cube,
- if version:
- print ' version', version,
- print 'is not installed, but required by %s' % src
- else:
- print '* cube %s version %s is installed, but version %s is required by %s' % (
- cube, cfgpb.cubes[cube], version, src)
+
+ if mode in ('all', 'instances'):
+ try:
+ regdir = cwcfg.instances_dir()
+ except ConfigurationError as ex:
+ print 'No instance available:', ex
+ print
+ return
+ instances = list_instances(regdir)
+ if instances:
+ print 'Available instances (%s):' % regdir
+ for appid in instances:
+ modes = cwcfg.possible_configurations(appid)
+ if not modes:
+ print '* %s (BROKEN instance, no configuration found)' % appid
+ continue
+ print '* %s (%s)' % (appid, ', '.join(modes))
+ try:
+ config = cwcfg.config_for(appid, modes[0])
+ except Exception as exc:
+ print ' (BROKEN instance, %s)' % exc
+ continue
+ else:
+ print 'No instance available in %s' % regdir
+ print
+
+ if mode == 'all':
+ # configuration management problem solving
+ cfgpb.solve()
+ if cfgpb.warnings:
+ print 'Warnings:\n', '\n'.join('* '+txt for txt in cfgpb.warnings)
+ if cfgpb.errors:
+ print 'Errors:'
+ for op, cube, version, src in cfgpb.errors:
+ if op == 'add':
+ print '* cube', cube,
+ if version:
+ print ' version', version,
+ print 'is not installed, but required by %s' % src
+ else:
+ print '* cube %s version %s is installed, but version %s is required by %s' % (
+ cube, cfgpb.cubes[cube], version, src)
def check_options_consistency(config):
if config.automatic and config.config_level > 0:
@@ -347,6 +364,12 @@
' "list" command. Default to "all-in-one", e.g. an installation '
'embedding both the RQL repository and the web server.',
}),
+ ('no-db-create',
+ {'short': 'S',
+ 'action': 'store_true',
+ 'default': False,
+ 'help': 'stop after creation and do not continue with db-create',
+ }),
)
def run(self, args):
@@ -415,7 +438,8 @@
print 'set %s as owner of the data directory' % config['uid']
chown(config.appdatahome, config['uid'])
print '\n-> creation done for %s\n' % repr(config.apphome)[1:-1]
- helper.postcreate(self.config.automatic, self.config.config_level)
+ if not self.config.no_db_create:
+ helper.postcreate(self.config.automatic, self.config.config_level)
def _handle_win32(self, config, appid):
if sys.platform != 'win32':
@@ -811,7 +835,6 @@
name = 'versions'
def versions_instance(self, appid):
- from logilab.common.changelog import Version
config = cwcfg.config_for(appid)
# should not raise error if db versions don't match fs versions
config.repairing = True
@@ -822,7 +845,6 @@
for key in sorted(vcconf):
print key+': %s.%s.%s' % vcconf[key]
-
class ShellCommand(Command):
"""Run an interactive migration shell on an instance. This is a python shell
with enhanced migration commands predefined in the namespace. An additional
@@ -989,6 +1011,33 @@
for cube in cwcfg.available_cubes():
print cube
+class ConfigureInstanceCommand(InstanceCommand):
+ """Configure instance.
+
+ ...
+ identifier of the instance to configure.
+ """
+ name = 'configure'
+ actionverb = 'configured'
+
+ options = merge_options(InstanceCommand.options +
+ (('param',
+ {'short': 'p', 'type' : 'named', 'metavar' : 'key1:value1,key2:value2',
+ 'default': None,
+ 'help': 'set to in configuration file.',
+ }),
+ ))
+
+ def configure_instance(self, appid):
+ if self.config.param is not None:
+ appcfg = cwcfg.config_for(appid)
+ for key, value in self.config.param.iteritems():
+ try:
+ appcfg.global_set_option(key, value)
+ except KeyError:
+ raise ConfigurationError('unknown configuration key "%s" for mode %s' % (key, appcfg.name))
+ appcfg.save()
+
for cmdcls in (ListCommand,
CreateInstanceCommand, DeleteInstanceCommand,
StartInstanceCommand, StopInstanceCommand, RestartInstanceCommand,
@@ -998,10 +1047,10 @@
ShellCommand,
RecompileInstanceCatalogsCommand,
ListInstancesCommand, ListCubesCommand,
+ ConfigureInstanceCommand,
):
CWCTL.register(cmdcls)
-
def run(args):
"""command line tool"""
import os
diff -r 2cf127d4f5fd -r b4bcabf55e77 cwvreg.py
--- a/cwvreg.py Fri Jun 14 16:13:24 2013 +0200
+++ b/cwvreg.py Fri Jun 14 16:26:25 2013 +0200
@@ -197,14 +197,14 @@
from os.path import join, dirname, realpath
from warnings import warn
from datetime import datetime, date, time, timedelta
-from functools import partial, reduce
+from functools import reduce
from logilab.common.decorators import cached, clear_cache
from logilab.common.deprecation import deprecated, class_deprecated
from logilab.common.modutils import cleanup_sys_modules
from logilab.common.registry import (
RegistryStore, Registry, obj_registries,
- ObjectNotFound, NoSelectableObject, RegistryNotFound)
+ ObjectNotFound, RegistryNotFound)
from rql import RQLHelper
from yams.constraints import BASE_CONVERTERS
diff -r 2cf127d4f5fd -r b4bcabf55e77 dataimport.py
--- a/dataimport.py Fri Jun 14 16:13:24 2013 +0200
+++ b/dataimport.py Fri Jun 14 16:26:25 2013 +0200
@@ -70,10 +70,11 @@
import sys
import threading
import traceback
+import warnings
import cPickle
import os.path as osp
+import inspect
from collections import defaultdict
-from contextlib import contextmanager
from copy import copy
from datetime import date, datetime
from time import asctime
@@ -323,7 +324,6 @@
return [(k, len(v)) for k, v in buckets.items()
if k is not None and len(v) > 1]
-
# sql generator utility functions #############################################
@@ -396,7 +396,7 @@
columns = list(data[0])
execmany_func(cu, statement, data, table, columns, encoding)
except Exception:
- print 'unable to copy data into table %s', table
+ print 'unable to copy data into table %s' % table
# Error in import statement, save data in dump_output_dir
if dump_output_dir is not None:
pdata = {'data': data, 'statement': statement,
@@ -431,7 +431,16 @@
# If an error is raised, do not continue.
formatted_row = []
for col in columns:
- value = row[col]
+ try:
+ value = row[col]
+ except KeyError:
+ warnings.warn(u"Column %s is not accessible in row %s"
+ % (col, row), RuntimeWarning)
+ # XXX 'value' set to None so that the import does not end in
+ # error.
+ # Instead, the extra keys are set to NULL from the
+ # database point of view.
+ value = None
if value is None:
value = 'NULL'
elif isinstance(value, (long, int, float)):
@@ -506,7 +515,7 @@
item['eid'] = data['eid']
return item
- def relate(self, eid_from, rtype, eid_to, inlined=False):
+ def relate(self, eid_from, rtype, eid_to, **kwargs):
"""Add new relation"""
relation = eid_from, rtype, eid_to
self.relations.add(relation)
@@ -523,6 +532,18 @@
"""
pass
+ def flush(self):
+ """The method is provided so that all stores share a common API.
+ It just tries to call the commit method.
+ """
+ print 'starting flush'
+ try:
+ self.commit()
+ except:
+ print 'failed to flush'
+ else:
+ print 'flush done'
+
def rql(self, *args):
if self._rql is not None:
return self._rql(*args)
@@ -538,76 +559,6 @@
def nb_inserted_relations(self):
return len(self.relations)
- @deprecated("[3.7] index support will disappear")
- def build_index(self, name, type, func=None, can_be_empty=False):
- """build internal index for further search"""
- index = {}
- if func is None or not callable(func):
- func = lambda x: x['eid']
- for eid in self.types[type]:
- index.setdefault(func(self.eids[eid]), []).append(eid)
- if not can_be_empty:
- assert index, "new index '%s' cannot be empty" % name
- self.indexes[name] = index
-
- @deprecated("[3.7] index support will disappear")
- def build_rqlindex(self, name, type, key, rql, rql_params=False,
- func=None, can_be_empty=False):
- """build an index by rql query
-
- rql should return eid in first column
- ctl.store.build_index('index_name', 'users', 'login', 'Any U WHERE U is CWUser')
- """
- self.types[type] = []
- rset = self.rql(rql, rql_params or {})
- if not can_be_empty:
- assert rset, "new index type '%s' cannot be empty (0 record found)" % type
- for entity in rset.entities():
- getattr(entity, key) # autopopulate entity with key attribute
- self.eids[entity.eid] = dict(entity)
- if entity.eid not in self.types[type]:
- self.types[type].append(entity.eid)
-
- # Build index with specified key
- func = lambda x: x[key]
- self.build_index(name, type, func, can_be_empty=can_be_empty)
-
- @deprecated("[3.7] index support will disappear")
- def fetch(self, name, key, unique=False, decorator=None):
- """index fetcher method
-
- decorator is a callable method or an iterator of callable methods (usually a lambda function)
- decorator=lambda x: x[:1] (first value is returned)
- decorator=lambda x: x.lower (lowercased value is returned)
-
- decorator is handy when you want to improve index keys but without
- changing the original field
-
- Same check functions can be reused here.
- """
- eids = self.indexes[name].get(key, [])
- if decorator is not None:
- if not hasattr(decorator, '__iter__'):
- decorator = (decorator,)
- for f in decorator:
- eids = f(eids)
- if unique:
- assert len(eids) == 1, u'expected a single one value for key "%s" in index "%s". Got %i' % (key, name, len(eids))
- eids = eids[0]
- return eids
-
- @deprecated("[3.7] index support will disappear")
- def find(self, type, key, value):
- for idx in self.types[type]:
- item = self.items[idx]
- if item[key] == value:
- yield item
-
- @deprecated("[3.7] checkpoint() deprecated. use commit() instead")
- def checkpoint(self):
- self.commit()
-
-
class RQLObjectStore(ObjectStore):
"""ObjectStore that works with an actual RQL repository (production mode)"""
_rql = None # bw compat
@@ -630,10 +581,6 @@
self.session = session
self._commit = commit or session.commit
- @deprecated("[3.7] checkpoint() deprecated. use commit() instead")
- def checkpoint(self):
- self.commit()
-
def commit(self):
txuuid = self._commit()
self.session.set_cnxset()
@@ -657,9 +604,9 @@
for k in item)
return self.rql(query, item)[0][0]
- def relate(self, eid_from, rtype, eid_to, inlined=False):
+ def relate(self, eid_from, rtype, eid_to, **kwargs):
eid_from, rtype, eid_to = super(RQLObjectStore, self).relate(
- eid_from, rtype, eid_to)
+ eid_from, rtype, eid_to, **kwargs)
self.rql('SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % rtype,
{'x': int(eid_from), 'y': int(eid_to)})
@@ -809,8 +756,8 @@
self._nb_inserted_relations = 0
self.rql = session.execute
# deactivate security
- session.set_read_security(False)
- session.set_write_security(False)
+ session.read_security = False
+ session.write_security = False
def create_entity(self, etype, **kwargs):
for k, v in kwargs.iteritems():
@@ -825,20 +772,23 @@
session = self.session
self.source.add_entity(session, entity)
self.source.add_info(session, entity, self.source, None, complete=False)
+ kwargs = dict()
+ if inspect.getargspec(self.add_relation).keywords:
+ kwargs['subjtype'] = entity.cw_etype
for rtype, targeteids in rels.iteritems():
# targeteids may be a single eid or a list of eids
inlined = self.rschema(rtype).inlined
try:
for targeteid in targeteids:
self.add_relation(session, entity.eid, rtype, targeteid,
- inlined)
+ inlined, **kwargs)
except TypeError:
self.add_relation(session, entity.eid, rtype, targeteids,
- inlined)
+ inlined, **kwargs)
self._nb_inserted_entities += 1
return entity
- def relate(self, eid_from, rtype, eid_to):
+ def relate(self, eid_from, rtype, eid_to, **kwargs):
assert not rtype.startswith('reverse_')
self.add_relation(self.session, eid_from, rtype, eid_to,
self.rschema(rtype).inlined)
@@ -962,12 +912,12 @@
"""Flush data to the database"""
self.source.flush()
- def relate(self, subj_eid, rtype, obj_eid, subjtype=None):
+ def relate(self, subj_eid, rtype, obj_eid, **kwargs):
if subj_eid is None or obj_eid is None:
return
# XXX Could subjtype be inferred ?
self.source.add_relation(self.session, subj_eid, rtype, obj_eid,
- self.rschema(rtype).inlined, subjtype)
+ self.rschema(rtype).inlined, **kwargs)
def drop_indexes(self, etype):
"""Drop indexes for a given entity type"""
@@ -1081,18 +1031,20 @@
encoding=self.dbencoding)
except:
print 'failed to flush'
+ else:
+ print 'flush done'
finally:
_entities_sql.clear()
_relations_sql.clear()
_insertdicts.clear()
_inlined_relations_sql.clear()
- print 'flush done'
def add_relation(self, session, subject, rtype, object,
- inlined=False, subjtype=None):
+ inlined=False, **kwargs):
if inlined:
_sql = self._sql.inlined_relations
data = {'cw_eid': subject, SQL_PREFIX + rtype: object}
+ subjtype = kwargs.get('subjtype')
if subjtype is None:
# Try to infer it
targets = [t.type for t in
@@ -1102,7 +1054,9 @@
else:
raise ValueError('You should give the subject etype for '
'inlined relation %s'
- ', as it cannot be inferred' % rtype)
+ ', as it cannot be inferred: '
+ 'this type is given as keyword argument '
+ '``subjtype``'% rtype)
statement = self.sqlgen.update(SQL_PREFIX + subjtype,
data, ['cw_eid'])
else:
@@ -1117,13 +1071,13 @@
def add_entity(self, session, entity):
with self._storage_handler(entity, 'added'):
attrs = self.preprocess_entity(entity)
- rtypes = self._inlined_rtypes_cache.get(entity.__regid__, ())
+ rtypes = self._inlined_rtypes_cache.get(entity.cw_etype, ())
if isinstance(rtypes, str):
rtypes = (rtypes,)
for rtype in rtypes:
if rtype not in attrs:
attrs[rtype] = None
- sql = self.sqlgen.insert(SQL_PREFIX + entity.__regid__, attrs)
+ sql = self.sqlgen.insert(SQL_PREFIX + entity.cw_etype, attrs)
self._sql.eid_insertdicts[entity.eid] = attrs
self._append_to_entities(sql, attrs)
@@ -1156,7 +1110,7 @@
assert isinstance(extid, str)
extid = b64encode(extid)
uri = 'system' if source.copy_based_source else source.uri
- attrs = {'type': entity.__regid__, 'eid': entity.eid, 'extid': extid,
+ attrs = {'type': entity.cw_etype, 'eid': entity.eid, 'extid': extid,
'source': uri, 'asource': source.uri, 'mtime': datetime.utcnow()}
self._handle_insert_entity_sql(session, self.sqlgen.insert('entities', attrs), attrs)
# insert core relations: is, is_instance_of and cw_source
@@ -1175,7 +1129,7 @@
self._handle_is_relation_sql(session, 'INSERT INTO cw_source_relation(eid_from,eid_to) VALUES (%s,%s)',
(entity.eid, source.eid))
# now we can update the full text index
- if self.do_fti and self.need_fti_indexation(entity.__regid__):
+ if self.do_fti and self.need_fti_indexation(entity.cw_etype):
if complete:
entity.complete(entity.e_schema.indexable_attributes())
self.index_entity(session, entity=entity)
diff -r 2cf127d4f5fd -r b4bcabf55e77 dbapi.py
--- a/dbapi.py Fri Jun 14 16:13:24 2013 +0200
+++ b/dbapi.py Fri Jun 14 16:26:25 2013 +0200
@@ -415,6 +415,7 @@
"""return the definition of sources used by the repository."""
return self.cnx.source_defs()
+ @deprecated('[3.17] do not use hijack_user. create new Session object')
def hijack_user(self, user):
"""return a fake request/session using specified user"""
req = DBAPIRequest(self.vreg)
diff -r 2cf127d4f5fd -r b4bcabf55e77 debian/changelog
--- a/debian/changelog Fri Jun 14 16:13:24 2013 +0200
+++ b/debian/changelog Fri Jun 14 16:26:25 2013 +0200
@@ -1,3 +1,27 @@
+cubicweb (3.17.2-1) unstable; urgency=low
+
+ * new upstream release
+
+ -- David Douard Thu, 13 Jun 2013 17:32:18 +0200
+
+cubicweb (3.17.1-1) unstable; urgency=low
+
+ * new upstream release
+
+ -- David Douard Thu, 06 Jun 2013 12:28:49 +0200
+
+cubicweb (3.17.0-2) unstable; urgency=low
+
+ * fix yams Depends on cubicweb-common
+
+ -- Pierre-Yves David Fri, 03 May 2013 16:26:50 +0200
+
+cubicweb (3.17.0-1) unstable; urgency=low
+
+ * new upstream release
+
+ -- Pierre-Yves David Mon, 29 Apr 2013 11:20:56 +0200
+
cubicweb (3.16.5-1) unstable; urgency=low
* new upstream release
diff -r 2cf127d4f5fd -r b4bcabf55e77 debian/control
--- a/debian/control Fri Jun 14 16:13:24 2013 +0200
+++ b/debian/control Fri Jun 14 16:26:25 2013 +0200
@@ -16,7 +16,7 @@
python-unittest2,
python-logilab-mtconverter,
python-rql,
- python-yams,
+ python-yams (>= 0.37),
python-lxml,
Standards-Version: 3.9.1
Homepage: http://www.cubicweb.org
@@ -25,8 +25,15 @@
Package: cubicweb
Architecture: all
XB-Python-Version: ${python:Versions}
-Depends: ${misc:Depends}, ${python:Depends}, cubicweb-server (= ${source:Version}), cubicweb-twisted (= ${source:Version})
-Recommends: postgresql | mysql | sqlite3
+Depends:
+ ${misc:Depends},
+ ${python:Depends},
+ cubicweb-server (= ${source:Version}),
+ cubicweb-twisted (= ${source:Version})
+Recommends:
+ postgresql
+ | mysql
+ | sqlite3
Description: the complete CubicWeb framework
CubicWeb is a semantic web application framework.
.
@@ -42,9 +49,21 @@
Conflicts: cubicweb-multisources
Replaces: cubicweb-multisources
Provides: cubicweb-multisources
-Depends: ${misc:Depends}, ${python:Depends}, cubicweb-common (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-logilab-database (>= 1.8.2), cubicweb-postgresql-support | cubicweb-mysql-support | python-pysqlite2, python-passlib
-Recommends: pyro (<< 4.0.0), cubicweb-documentation (= ${source:Version})
-Suggests: python-zmq
+Depends:
+ ${misc:Depends},
+ ${python:Depends},
+ cubicweb-common (= ${source:Version}),
+ cubicweb-ctl (= ${source:Version}),
+ python-logilab-database (>= 1.10.0),
+ cubicweb-postgresql-support
+ | cubicweb-mysql-support
+ | python-pysqlite2,
+ python-passlib
+Recommends:
+ pyro (<< 4.0.0),
+ cubicweb-documentation (= ${source:Version})
+Suggests:
+ python-zmq
Description: server part of the CubicWeb framework
CubicWeb is a semantic web application framework.
.
@@ -56,7 +75,10 @@
Package: cubicweb-postgresql-support
Architecture: all
# postgresql-client packages for backup/restore of non local database
-Depends: ${misc:Depends}, python-psycopg2, postgresql-client
+Depends:
+ ${misc:Depends},
+ python-psycopg2,
+ postgresql-client
Description: postgres support for the CubicWeb framework
CubicWeb is a semantic web application framework.
.
@@ -66,7 +88,10 @@
Package: cubicweb-mysql-support
Architecture: all
# mysql-client packages for backup/restore of non local database
-Depends: ${misc:Depends}, python-mysqldb, mysql-client
+Depends:
+ ${misc:Depends},
+ python-mysqldb,
+ mysql-client
Description: mysql support for the CubicWeb framework
CubicWeb is a semantic web application framework.
.
@@ -78,8 +103,15 @@
Architecture: all
XB-Python-Version: ${python:Versions}
Provides: cubicweb-web-frontend
-Depends: ${misc:Depends}, ${python:Depends}, cubicweb-web (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-twisted-web
-Recommends: pyro (<< 4.0.0), cubicweb-documentation (= ${source:Version})
+Depends:
+ ${misc:Depends},
+ ${python:Depends},
+ cubicweb-web (= ${source:Version}),
+ cubicweb-ctl (= ${source:Version}),
+ python-twisted-web
+Recommends:
+ pyro (<< 4.0.0),
+ cubicweb-documentation (= ${source:Version})
Description: twisted-based web interface for the CubicWeb framework
CubicWeb is a semantic web application framework.
.
@@ -92,8 +124,18 @@
Package: cubicweb-web
Architecture: all
XB-Python-Version: ${python:Versions}
-Depends: ${misc:Depends}, ${python:Depends}, cubicweb-common (= ${source:Version}), python-simplejson (>= 2.0.9)
-Recommends: python-docutils (>= 0.6), python-vobject, fckeditor, python-fyzz, python-imaging, python-rdflib
+Depends:
+ ${misc:Depends},
+ ${python:Depends},
+ cubicweb-common (= ${source:Version}),
+ python-simplejson (>= 2.0.9)
+Recommends:
+ python-docutils (>= 0.6),
+ python-vobject,
+ fckeditor,
+ python-fyzz,
+ python-imaging,
+ python-rdflib
Description: web interface library for the CubicWeb framework
CubicWeb is a semantic web application framework.
.
@@ -107,8 +149,19 @@
Package: cubicweb-common
Architecture: all
XB-Python-Version: ${python:Versions}
-Depends: ${misc:Depends}, ${python:Depends}, graphviz, gettext, python-logilab-mtconverter (>= 0.8.0), python-logilab-common (>= 0.59.0), python-yams (>= 0.36.0), python-rql (>= 0.31.2), python-lxml
-Recommends: python-simpletal (>= 4.0), python-crypto
+Depends:
+ ${misc:Depends},
+ ${python:Depends},
+ graphviz,
+ gettext,
+ python-logilab-mtconverter (>= 0.8.0),
+ python-logilab-common (>= 0.59.0),
+ python-yams (>= 0.37.0),
+ python-rql (>= 0.31.2),
+ python-lxml
+Recommends:
+ python-simpletal (>= 4.0),
+ python-crypto
Conflicts: cubicweb-core
Replaces: cubicweb-core
Description: common library for the CubicWeb framework
@@ -121,7 +174,10 @@
Package: cubicweb-ctl
Architecture: all
XB-Python-Version: ${python:Versions}
-Depends: ${misc:Depends}, ${python:Depends}, cubicweb-common (= ${source:Version})
+Depends:
+ ${misc:Depends},
+ ${python:Depends},
+ cubicweb-common (= ${source:Version})
Description: tool to manage the CubicWeb framework
CubicWeb is a semantic web application framework.
.
@@ -133,8 +189,15 @@
Package: cubicweb-dev
Architecture: all
XB-Python-Version: ${python:Versions}
-Depends: ${misc:Depends}, ${python:Depends}, cubicweb-server (= ${source:Version}), cubicweb-web (= ${source:Version}), python-pysqlite2
-Suggests: w3c-dtd-xhtml, xvfb
+Depends:
+ ${misc:Depends},
+ ${python:Depends},
+ cubicweb-server (= ${source:Version}),
+ cubicweb-web (= ${source:Version}),
+ python-pysqlite2
+Suggests:
+ w3c-dtd-xhtml,
+ xvfb
Description: tests suite and development tools for the CubicWeb framework
CubicWeb is a semantic web application framework.
.
@@ -144,7 +207,8 @@
Package: cubicweb-documentation
Architecture: all
-Recommends: doc-base
+Recommends:
+ doc-base
Description: documentation for the CubicWeb framework
CubicWeb is a semantic web application framework.
.
diff -r 2cf127d4f5fd -r b4bcabf55e77 devtools/__init__.py
--- a/devtools/__init__.py Fri Jun 14 16:13:24 2013 +0200
+++ b/devtools/__init__.py Fri Jun 14 16:26:25 2013 +0200
@@ -28,15 +28,14 @@
import warnings
from hashlib import sha1 # pylint: disable=E0611
from datetime import timedelta
-from os.path import (abspath, join, exists, basename, dirname, normpath, split,
- isfile, isabs, splitext, isdir, expanduser)
+from os.path import (abspath, join, exists, split, isabs, isdir)
from functools import partial
from logilab.common.date import strptime
from logilab.common.decorators import cached, clear_cache
-from cubicweb import ConfigurationError, ExecutionError, BadConnectionId
-from cubicweb import CW_SOFTWARE_ROOT, schema, cwconfig
+from cubicweb import ExecutionError, BadConnectionId
+from cubicweb import schema, cwconfig
from cubicweb.server.serverconfig import ServerConfiguration
from cubicweb.etwist.twconfig import TwistedConfiguration
diff -r 2cf127d4f5fd -r b4bcabf55e77 devtools/devctl.py
--- a/devtools/devctl.py Fri Jun 14 16:13:24 2013 +0200
+++ b/devtools/devctl.py Fri Jun 14 16:26:25 2013 +0200
@@ -122,7 +122,6 @@
def _generate_schema_pot(w, vreg, schema, libconfig=None):
- from copy import deepcopy
from cubicweb.i18n import add_msg
from cubicweb.schema import NO_I18NCONTEXT, CONSTRAINTS
w('# schema pot file, generated on %s\n'
diff -r 2cf127d4f5fd -r b4bcabf55e77 devtools/fake.py
--- a/devtools/fake.py Fri Jun 14 16:13:24 2013 +0200
+++ b/devtools/fake.py Fri Jun 14 16:26:25 2013 +0200
@@ -25,7 +25,6 @@
from cubicweb.req import RequestSessionBase
from cubicweb.cwvreg import CWRegistryStore
from cubicweb.web.request import CubicWebRequestBase
-from cubicweb.web.http_headers import Headers
from cubicweb.devtools import BASE_URL, BaseApptestConfiguration
@@ -163,10 +162,6 @@
# for use with enabled_security context manager
read_security = write_security = True
- def init_security(self, *args):
- return None, None
- def reset_security(self, *args):
- return
class FakeRepo(object):
querier = None
diff -r 2cf127d4f5fd -r b4bcabf55e77 devtools/htmlparser.py
--- a/devtools/htmlparser.py Fri Jun 14 16:13:24 2013 +0200
+++ b/devtools/htmlparser.py Fri Jun 14 16:26:25 2013 +0200
@@ -1,4 +1,4 @@
-# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of CubicWeb.
@@ -19,10 +19,12 @@
import re
import sys
+from xml import sax
+from cStringIO import StringIO
from lxml import etree
-from logilab.common.deprecation import class_deprecated
+from logilab.common.deprecation import class_deprecated, class_renamed
from cubicweb.view import STRICT_DOCTYPE, TRANSITIONAL_DOCTYPE
@@ -31,22 +33,67 @@
ERR_COUNT = 0
-class Validator(object):
+_REM_SCRIPT_RGX = re.compile(r"", re.U|re.M|re.I|re.S)
+def _remove_script_tags(data):
+ """Remove the script (usually javascript) tags to help the lxml
+ XMLParser / HTMLParser do their job. Without that, they choke on
+ tags embedded in JS strings.
+ """
+ # Notice we may want to use lxml cleaner, but it's far too intrusive:
+ #
+ # cleaner = Cleaner(scripts=True,
+ # javascript=False,
+ # comments=False,
+ # style=False,
+ # links=False,
+ # meta=False,
+ # page_structure=False,
+ # processing_instructions=False,
+ # embedded=False,
+ # frames=False,
+ # forms=False,
+ # annoying_tags=False,
+ # remove_tags=(),
+ # remove_unknown_tags=False,
+ # safe_attrs_only=False,
+ # add_nofollow=False)
+ # >>> cleaner.clean_html('')
+ # ''
+ # >>> cleaner.clean_html('')
+ # ''
+ # >>> cleaner.clean_html('')
+ # ''
+ # >>> cleaner.clean_html(' ')
+ # ' '
+ # >>> cleaner.clean_html(' ')
+ # ' '
+ #
+ # using that, we'll miss most actual validation error we want to
+ # catch. For now, use dumb regexp
+ return _REM_SCRIPT_RGX.sub('', data)
- def parse_string(self, data, sysid=None):
+
+class Validator(object):
+ """ base validator API """
+ parser = None
+
+ def parse_string(self, source):
+ etree = self._parse(self.preprocess_data(source))
+ return PageInfo(source, etree)
+
+ def preprocess_data(self, data):
+ return data
+
+ def _parse(self, pdata):
try:
- data = self.preprocess_data(data)
- return PageInfo(data, etree.fromstring(data, self.parser))
+ return etree.fromstring(pdata, self.parser)
except etree.XMLSyntaxError as exc:
def save_in(fname=''):
file(fname, 'w').write(data)
- new_exc = AssertionError(u'invalid xml %s' % exc)
+ new_exc = AssertionError(u'invalid document: %s' % exc)
new_exc.position = exc.position
raise new_exc
- def preprocess_data(self, data):
- return data
-
class DTDValidator(Validator):
def __init__(self):
@@ -60,7 +107,7 @@
return data
# parse using transitional DTD
data = data.replace(STRICT_DOCTYPE, TRANSITIONAL_DOCTYPE)
- tree = etree.fromstring(data, self.parser)
+ tree = self._parse(data)
namespace = tree.nsmap.get(None)
# this is the list of authorized child tags for
nodes
expected = 'p h1 h2 h3 h4 h5 h6 div ul ol dl pre hr blockquote address ' \
@@ -79,20 +126,64 @@
STRICT_DOCTYPE, data)
-class SaxOnlyValidator(Validator):
+class XMLValidator(Validator):
+ """XML validator, checks that XML is well-formed and used XMLNS are defined"""
def __init__(self):
Validator.__init__(self)
self.parser = etree.XMLParser()
+SaxOnlyValidator = class_renamed('SaxOnlyValidator',
+ XMLValidator,
+ '[3.17] you should use the '
+ 'XMLValidator class instead')
-class XMLDemotingValidator(SaxOnlyValidator):
+
+class XMLSyntaxValidator(Validator):
+ """XML syntax validator, check XML is well-formed"""
+
+ class MySaxErrorHandler(sax.ErrorHandler):
+ """override default handler to avoid choking because of unknown entity"""
+ def fatalError(self, exception):
+ # XXX check entity in htmlentitydefs
+ if not str(exception).endswith('undefined entity'):
+ raise exception
+ _parser = sax.make_parser()
+ _parser.setContentHandler(sax.handler.ContentHandler())
+ _parser.setErrorHandler(MySaxErrorHandler())
+
+ def __init__(self):
+ super(XMLSyntaxValidator, self).__init__()
+ # XMLParser() wants xml namespaces defined
+ # XMLParser(recover=True) will accept almost anything
+ #
+ # -> use the later but preprocess will check xml well-formness using a
+ # dumb SAX parser
+ self.parser = etree.XMLParser(recover=True)
+
+ def preprocess_data(self, data):
+ return _remove_script_tags(data)
+
+ def _parse(self, data):
+ inpsrc = sax.InputSource()
+ inpsrc.setByteStream(StringIO(data))
+ try:
+ self._parser.parse(inpsrc)
+ except sax.SAXParseException, exc:
+ new_exc = AssertionError(u'invalid document: %s' % exc)
+ new_exc.position = (exc._linenum, exc._colnum)
+ raise new_exc
+ return super(XMLSyntaxValidator, self)._parse(data)
+
+
+class XMLDemotingValidator(XMLValidator):
""" some views produce html instead of xhtml, using demote_to_html
this is typically related to the use of external dependencies
which do not produce valid xhtml (google maps, ...)
"""
__metaclass__ = class_deprecated
+ __deprecation_warning__ = '[3.10] this is now handled in testlib.py'
def preprocess_data(self, data):
if data.startswith('\d{4})-(?P\d{2})-(?P\d{2})$')
diff -r 2cf127d4f5fd -r b4bcabf55e77 devtools/test/unittest_httptest.py
--- a/devtools/test/unittest_httptest.py Fri Jun 14 16:13:24 2013 +0200
+++ b/devtools/test/unittest_httptest.py Fri Jun 14 16:26:25 2013 +0200
@@ -20,7 +20,7 @@
import httplib
from logilab.common.testlib import Tags
-from cubicweb.devtools.httptest import CubicWebServerTC, CubicWebServerConfig
+from cubicweb.devtools.httptest import CubicWebServerTC
class TwistedCWAnonTC(CubicWebServerTC):
diff -r 2cf127d4f5fd -r b4bcabf55e77 devtools/test/unittest_qunit.py
--- a/devtools/test/unittest_qunit.py Fri Jun 14 16:13:24 2013 +0200
+++ b/devtools/test/unittest_qunit.py Fri Jun 14 16:26:25 2013 +0200
@@ -1,5 +1,5 @@
from logilab.common.testlib import unittest_main
-from cubicweb.devtools.qunit import make_qunit_html, QUnitTestCase
+from cubicweb.devtools.qunit import QUnitTestCase
from os import path as osp
diff -r 2cf127d4f5fd -r b4bcabf55e77 devtools/test/unittest_testlib.py
--- a/devtools/test/unittest_testlib.py Fri Jun 14 16:13:24 2013 +0200
+++ b/devtools/test/unittest_testlib.py Fri Jun 14 16:26:25 2013 +0200
@@ -97,7 +97,9 @@
class HTMLPageInfoTC(TestCase):
"""test cases for PageInfo"""
def setUp(self):
- parser = htmlparser.DTDValidator()
+ parser = htmlparser.HTMLValidator()
+ # disable cleanup that would remove doctype
+ parser.preprocess_data = lambda data: data
self.page_info = parser.parse_string(HTML_PAGE2)
def test_source1(self):
diff -r 2cf127d4f5fd -r b4bcabf55e77 devtools/testlib.py
--- a/devtools/testlib.py Fri Jun 14 16:13:24 2013 +0200
+++ b/devtools/testlib.py Fri Jun 14 16:26:25 2013 +0200
@@ -18,7 +18,6 @@
"""this module contains base classes and utilities for cubicweb tests"""
__docformat__ = "restructuredtext en"
-import os
import sys
import re
import urlparse
@@ -40,15 +39,14 @@
from logilab.common.deprecation import deprecated, class_deprecated
from logilab.common.shellutils import getlogin
-from cubicweb import ValidationError, NoSelectableObject, AuthenticationError
+from cubicweb import ValidationError, NoSelectableObject
from cubicweb import cwconfig, dbapi, devtools, web, server
from cubicweb.utils import json
from cubicweb.sobjects import notification
from cubicweb.web import Redirect, application
-from cubicweb.server.session import Session
from cubicweb.server.hook import SendMailOp
from cubicweb.devtools import SYSTEM_ENTITIES, SYSTEM_RELATIONS, VIEW_VALIDATORS
-from cubicweb.devtools import BASE_URL, fake, htmlparser, DEFAULT_EMPTY_DB_ID
+from cubicweb.devtools import fake, htmlparser, DEFAULT_EMPTY_DB_ID
from cubicweb.utils import json
# low-level utilities ##########################################################
@@ -812,8 +810,8 @@
# snippets
#'text/html': DTDValidator,
#'application/xhtml+xml': DTDValidator,
- 'application/xml': htmlparser.SaxOnlyValidator,
- 'text/xml': htmlparser.SaxOnlyValidator,
+ 'application/xml': htmlparser.XMLValidator,
+ 'text/xml': htmlparser.XMLValidator,
'application/json': JsonValidator,
'text/plain': None,
'text/comma-separated-values': None,
@@ -891,8 +889,12 @@
content_type = view.content_type
if content_type is None:
content_type = 'text/html'
- if content_type in ('text/html', 'application/xhtml+xml'):
- if output and output.startswith(''):
+ # only check XML well-formness since HTMLValidator isn't html5
+ # compatible and won't like various other extensions
+ default_validator = htmlparser.XMLSyntaxValidator
+ elif output.startswith('`_)
+
+* Add CubicWebRequestBase.content with the content of the HTTP request (see #2742453)
+ (see `#2742453 `_)
+
+* Add directive bookmark to ReST rendering
+ (see `#2545595 `_)
+
+* Allow user defined final type
+ (see `#124342 `_)
+
+
+API changes
+-----------
+
+* drop typed_eid() in favour of int() (see `#2742462 `_)
+
+* The SIOC views and adapters have been removed from CubicWeb and moved to the
+ `sioc` cube.
+
+* The web page embedding views and adapters have been removed from CubicWeb and
+ moved to the `embed` cube.
+
+* The email sending views and controllers have been removed from CubicWeb and
+ moved to the `massmailing` cube.
+
+* ``RenderAndSendNotificationView`` is deprecated in favor of
+ ``ActualNotificationOp`` the new operation use the more efficient *data*
+ idiom.
+
+* Looping task can now have a interval <= ``0``. Negative interval disable the
+ looping task entirely.
+
+* We now serve html instead of xhtml.
+ (see `#2065651 `_)
+
+
+Deprecation
+---------------------
+
+* ``ldapuser`` have been deprecated. It'll be fully dropped in the next
+ version. If you are still using ldapuser switch to ``ldapfeed`` **NOW**!
+
+* ``hijack_user`` have been deprecated. It will be dropped soon.
+
+Deprecated Code Drops
+----------------------
+
+* The progress views and adapters have been removed from CubicWeb. These
+ classes were deprecated since 3.14.0. They are still available in the
+ `iprogress` cube.
+
+* API deprecated since 3.7 have been dropped.
diff -r 2cf127d4f5fd -r b4bcabf55e77 doc/book/en/admin/pyro.rst
--- a/doc/book/en/admin/pyro.rst Fri Jun 14 16:13:24 2013 +0200
+++ b/doc/book/en/admin/pyro.rst Fri Jun 14 16:26:25 2013 +0200
@@ -53,7 +53,7 @@
cnx.commit()
Calling :meth:`cubicweb.dbapi.load_appobjects`, will populate the
-cubicweb registrires (see :ref:`VRegistryIntro`) with the application
+cubicweb registries (see :ref:`VRegistryIntro`) with the application
objects installed on the host where the script runs. You'll then be
allowed to use the ORM goodies and custom entity methods and views. Of
course this is optional, without it you can still get the repository
diff -r 2cf127d4f5fd -r b4bcabf55e77 doc/book/en/conf.py
--- a/doc/book/en/conf.py Fri Jun 14 16:13:24 2013 +0200
+++ b/doc/book/en/conf.py Fri Jun 14 16:26:25 2013 +0200
@@ -31,7 +31,6 @@
# All configuration values have a default value; values that are commented out
# serve to show the default value.
-import sys, os
from os import path as osp
path = __file__
diff -r 2cf127d4f5fd -r b4bcabf55e77 doc/book/en/devrepo/repo/notifications.rst
--- a/doc/book/en/devrepo/repo/notifications.rst Fri Jun 14 16:13:24 2013 +0200
+++ b/doc/book/en/devrepo/repo/notifications.rst Fri Jun 14 16:26:25 2013 +0200
@@ -3,4 +3,27 @@
Notifications management
========================
-.. XXX FILLME
+CubicWeb provides a machinery to ease notifications handling. To use it for a
+notification:
+
+* write a view inheriting from
+ :class:`~cubicweb.sobjects.notification.NotificationView`. The usual view api
+ is used to generated the email (plain text) content, and additional
+ :meth:`~cubicweb.sobjects.notification.NotificationView.subject` and
+ :meth:`~cubicweb.sobjects.notification.NotificationView.recipients` methods
+ are used to build the email's subject and
+ recipients. :class:`NotificationView` provides default implementation for both
+ methods.
+
+* write a hook for event that should trigger this notification, select the view
+ (without rendering it), and give it to
+ :func:`cubicweb.hooks.notification.notify_on_commit` so that the notification
+ will be sent if the transaction succeed.
+
+
+.. XXX explain recipient finder and provide example
+
+API details
+~~~~~~~~~~~
+.. autoclass:: cubicweb.sobjects.notification.NotificationView
+.. autofunction:: cubicweb.hooks.notification.notify_on_commit
diff -r 2cf127d4f5fd -r b4bcabf55e77 doc/book/en/devrepo/repo/sessions.rst
--- a/doc/book/en/devrepo/repo/sessions.rst Fri Jun 14 16:13:24 2013 +0200
+++ b/doc/book/en/devrepo/repo/sessions.rst Fri Jun 14 16:26:25 2013 +0200
@@ -199,3 +199,8 @@
if hasattr(req.cnx, 'foo_user') and req.foo_user:
return 1
return 0
+
+Full API Session
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. autoclass:: cubicweb.server.session.Session
diff -r 2cf127d4f5fd -r b4bcabf55e77 doc/book/en/intro/concepts.rst
--- a/doc/book/en/intro/concepts.rst Fri Jun 14 16:13:24 2013 +0200
+++ b/doc/book/en/intro/concepts.rst Fri Jun 14 16:26:25 2013 +0200
@@ -29,8 +29,7 @@
.. note::
- The command :command:`cubicweb-ctl list` displays the list of cubes
- installed on your system.
+ The command :command:`cubicweb-ctl list` displays the list of available cubes.
.. _`CubicWeb.org Forge`: http://www.cubicweb.org/project/
.. _`cubicweb-blog`: http://www.cubicweb.org/project/cubicweb-blog
@@ -89,7 +88,7 @@
state of an object changes. See :ref:`HookIntro` below.
.. [1] not to be confused with a Mercurial repository or a Debian repository.
-.. _`Python Remote Objects`: http://pyro.sourceforge.net/
+.. _`Python Remote Objects`: http://pythonhosted.org/Pyro4/
.. _WebEngineIntro:
diff -r 2cf127d4f5fd -r b4bcabf55e77 doc/book/en/intro/history.rst
--- a/doc/book/en/intro/history.rst Fri Jun 14 16:13:24 2013 +0200
+++ b/doc/book/en/intro/history.rst Fri Jun 14 16:26:25 2013 +0200
@@ -28,5 +28,5 @@
and energy originally put in the design of the framework.
-.. _Narval: http://www.logilab.org/project/narval
+.. _Narval: http://www.logilab.org/project/narval-moved
.. _Logilab: http://www.logilab.fr/
diff -r 2cf127d4f5fd -r b4bcabf55e77 doc/book/en/tutorials/base/conclusion.rst
--- a/doc/book/en/tutorials/base/conclusion.rst Fri Jun 14 16:13:24 2013 +0200
+++ b/doc/book/en/tutorials/base/conclusion.rst Fri Jun 14 16:26:25 2013 +0200
@@ -3,7 +3,7 @@
What's next?
------------
-In this tutorial, we have seen have you can, right after the installation of
+In this tutorial, we have seen that you can, right after the installation of
|cubicweb|, build a web application in a few minutes by defining a data model as
assembling cubes. You get a working application that you can then customize there
and there while keeping something that works. This is important in agile
diff -r 2cf127d4f5fd -r b4bcabf55e77 doc/book/en/tutorials/index.rst
--- a/doc/book/en/tutorials/index.rst Fri Jun 14 16:13:24 2013 +0200
+++ b/doc/book/en/tutorials/index.rst Fri Jun 14 16:26:25 2013 +0200
@@ -18,3 +18,4 @@
base/index
advanced/index
tools/windmill.rst
+ textreports/index
diff -r 2cf127d4f5fd -r b4bcabf55e77 doc/book/en/tutorials/textreports/index.rst
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/book/en/tutorials/textreports/index.rst Fri Jun 14 16:26:25 2013 +0200
@@ -0,0 +1,13 @@
+.. -*- coding: utf-8 -*-
+
+Writing text reports with RestructuredText
+==========================================
+
+|cubicweb| offers several text formats for the RichString type used in schemas,
+including restructuredtext.
+
+Three additional restructuredtext roles are defined by |cubicweb|:
+
+.. autodocfunction:: cubicweb.ext.rest.eid_reference_role
+.. autodocfunction:: cubicweb.ext.rest.rql_role
+.. autodocfunction:: cubicweb.ext.rest.bookmark_role
diff -r 2cf127d4f5fd -r b4bcabf55e77 doc/tools/generate_modules.py
--- a/doc/tools/generate_modules.py Fri Jun 14 16:13:24 2013 +0200
+++ b/doc/tools/generate_modules.py Fri Jun 14 16:26:25 2013 +0200
@@ -19,8 +19,6 @@
"""
-import sys
-
EXCLUDE_DIRS = ('test', 'tests', 'examples', 'data', 'doc', 'dist',
'.hg', 'migration')
if __name__ == '__main__':
diff -r 2cf127d4f5fd -r b4bcabf55e77 doc/tools/pyjsrest.py
--- a/doc/tools/pyjsrest.py Fri Jun 14 16:13:24 2013 +0200
+++ b/doc/tools/pyjsrest.py Fri Jun 14 16:26:25 2013 +0200
@@ -136,9 +136,7 @@
'cubicweb.preferences',
'cubicweb.edition',
'cubicweb.reledit',
- 'cubicweb.iprogress',
'cubicweb.rhythm',
- 'cubicweb.gmap',
'cubicweb.timeline-ext',
]
diff -r 2cf127d4f5fd -r b4bcabf55e77 doc/tutorials/dataimport/data_import_tutorial.rst
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/tutorials/dataimport/data_import_tutorial.rst Fri Jun 14 16:26:25 2013 +0200
@@ -0,0 +1,646 @@
+Importing relational data into a CubicWeb instance
+==================================================
+
+Introduction
+~~~~~~~~~~~~
+
+This tutorial explains how to import data from an external source (e.g. a collection of files)
+into a CubicWeb cube instance.
+
+First, once we know the format of the data we wish to import, we devise a
+*data model*, that is, a CubicWeb (Yams) schema which reflects the way the data
+is structured. This schema is implemented in the ``schema.py`` file.
+In this tutorial, we will describe such a schema for a particular data set,
+the Diseasome data (see below).
+
+Once the schema is defined, we create a cube and an instance.
+The cube is a specification of an application, whereas an instance
+is the application per se.
+
+Once the schema is defined and the instance is created, the import can be performed, via
+the following steps:
+
+1. Build a custom parser for the data to be imported. Thus, one obtains a Python
+ memory representation of the data.
+
+2. Map the parsed data to the data model defined in ``schema.py``.
+
+3. Perform the actual import of the data. This comes down to "populating"
+ the data model with the memory representation obtained at 1, according to
+ the mapping defined at 2.
+
+This tutorial illustrates all the above steps in the context of relational data
+stored in the RDF format.
+
+More specifically, we describe the import of Diseasome_ RDF/OWL data.
+
+.. _Diseasome: http://datahub.io/dataset/fu-berlin-diseasome
+
+Building a data model
+~~~~~~~~~~~~~~~~~~~~~
+
+The first thing to do when using CubicWeb for creating an application from scratch
+is to devise a *data model*, that is, a relational representation of the problem to be
+modeled or of the structure of the data to be imported.
+
+In such a schema, we define
+an entity type (``EntityType`` objects) for each type of entity to import. Each such type
+has several attributes. If the attributes are of known CubicWeb (Yams) types, viz. numbers,
+strings or characters, then they are defined as attributes, as e.g. ``attribute = Int()``
+for an attribute named ``attribute`` which is an integer.
+
+Each such type also has a set of
+relations, which are defined like the attributes, except that they represent, in fact,
+relations between the entities of the type under discussion and the objects of a type which
+is specified in the relation definition.
+
+For example, for the Diseasome data, we have two types of entities, genes and diseases.
+Thus, we create two classes which inherit from ``EntityType``::
+
+ class Disease(EntityType):
+ # Corresponds to http://www.w3.org/2000/01/rdf-schema#label
+ label = String(maxsize=512, fulltextindexed=True)
+ ...
+
+ #Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/diseasome/associatedGene
+ associated_genes = SubjectRelation('Gene', cardinality='**')
+ ...
+
+ #Corresponds to 'http://www4.wiwiss.fu-berlin.de/diseasome/resource/diseasome/chromosomalLocation'
+ chromosomal_location = SubjectRelation('ExternalUri', cardinality='?*', inlined=True)
+
+
+ class Gene(EntityType):
+ ...
+
+In this schema, there are attributes whose values are numbers or strings. Thus, they are
+defined by using the CubicWeb / Yams primitive types, e.g., ``label = String(maxsize=12)``.
+These types can have several constraints or attributes, such as ``maxsize``.
+There are also relations, either between the entity types themselves, or between them
+and a CubicWeb type, ``ExternalUri``. The latter defines a class of URI objects in
+CubicWeb. For instance, the ``chromosomal_location`` attribute is a relation between
+a ``Disease`` entity and an ``ExternalUri`` entity. The relation is marked by the CubicWeb /
+Yams ``SubjectRelation`` method. The latter can have several optional keyword arguments, such as
+``cardinality`` which specifies the number of subjects and objects related by the relation type
+specified. For example, the ``'?*'`` cardinality in the ``chromosomal_relation`` relation type says
+that zero or more ``Disease`` entities are related to zero or one ``ExternalUri`` entities.
+In other words, a ``Disease`` entity is related to at most one ``ExternalUri`` entity via the
+``chromosomal_location`` relation type, and that we can have zero or more ``Disease`` entities in the
+data base.
+For a relation between the entity types themselves, the ``associated_genes`` between a ``Disease``
+entity and a ``Gene`` entity is defined, so that any number of ``Gene`` entities can be associated
+to a ``Disease``, and there can be any number of ``Disease`` s if a ``Gene`` exists.
+
+Of course, before being able to use the CubicWeb / Yams built-in objects, we need to import them::
+
+
+ from yams.buildobjs import EntityType, SubjectRelation, String, Int
+ from cubicweb.schemas.base import ExternalUri
+
+Building a custom data parser
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The data we wish to import is structured in the RDF format,
+as a text file containing a set of lines.
+On each line, there are three fields.
+The first two fields are URIs ("Universal Resource Identifiers").
+The third field is either an URI or a string. Each field bares a particular meaning:
+
+- the leftmost field is an URI that holds the entity to be imported.
+ Note that the entities defined in the data model (i.e., in ``schema.py``) should
+ correspond to the entities whose URIs are specified in the import file.
+
+- the middle field is an URI that holds a relation whose subject is the entity
+ defined by the leftmost field. Note that this should also correspond
+ to the definitions in the data model.
+
+- the rightmost field is either an URI or a string. When this field is an URI,
+ it gives the object of the relation defined by the middle field.
+ When the rightmost field is a string, the middle field is interpreted as an attribute
+ of the subject (introduced by the leftmost field) and the rightmost field is
+ interpreted as the value of the attribute.
+
+Note however that some attributes (i.e. relations whose objects are strings)
+have their objects defined as strings followed by ``^^`` and by another URI;
+we ignore this part.
+
+Let us show some examples:
+
+- of line holding an attribute definition:
+ ``
+ "CYP17A1" .``
+ The line contains the definition of the ``label`` attribute of an
+ entity of type ``gene``. The value of ``label`` is '``CYP17A1``'.
+
+- of line holding a relation definition:
+ ``
+
+ .``
+ The line contains the definition of the ``associatedGene`` relation between
+ a ``disease`` subject entity identified by ``1`` and a ``gene`` object
+ entity defined by ``HADH2``.
+
+Thus, for parsing the data, we can (:note: see the ``diseasome_parser`` module):
+
+1. define a couple of regular expressions for parsing the two kinds of lines,
+ ``RE_ATTS`` for parsing the attribute definitions, and ``RE_RELS`` for parsing
+ the relation definitions.
+
+2. define a function that iterates through the lines of the file and retrieves
+ (``yield`` s) a (subject, relation, object) tuple for each line.
+ We called it ``_retrieve_structure`` in the ``diseasome_parser`` module.
+ The function needs the file name and the types for which information
+ should be retrieved.
+
+Alternatively, instead of hand-making the parser, one could use the RDF parser provided
+in the ``dataio`` cube.
+
+.. XXX To further study and detail the ``dataio`` cube usage.
+
+Once we get to have the (subject, relation, object) triples, we need to map them into
+the data model.
+
+
+Mapping the data to the schema
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In the case of diseasome data, we can just define two dictionaries for mapping
+the names of the relations as extracted by the parser, to the names of the relations
+as defined in the ``schema.py`` data model. In the ``diseasome_parser`` module
+they are called ``MAPPING_ATTS`` and ``MAPPING_RELS``.
+Given that the relation and attribute names are given in CamelCase in the original data,
+mappings are necessary if we follow the PEP08 when naming the attributes in the data model.
+For example, the RDF relation ``chromosomalLocation`` is mapped into the schema relation
+``chromosomal_location``.
+
+Once these mappings have been defined, we just iterate over the (subject, relation, object)
+tuples provided by the parser and we extract the entities, with their attributes and relations.
+For each entity, we thus have a dictionary with two keys, ``attributes`` and ``relations``.
+The value associated to the ``attributes`` key is a dictionary containing (attribute: value)
+pairs, where "value" is a string, plus the ``cwuri`` key / attribute holding the URI of
+the entity itself.
+The value associated to the ``relations`` key is a dictionary containing (relation: value)
+pairs, where "value" is an URI.
+This is implemented in the ``entities_from_rdf`` interface function of the module
+``diseasome_parser``. This function provides an iterator on the dictionaries containing
+the ``attributes`` and ``relations`` keys for all entities.
+
+However, this is a simple case. In real life, things can get much more complicated, and the
+mapping can be far from trivial, especially when several data sources (which can follow
+different formatting and even structuring conventions) must be mapped into the same data model.
+
+Importing the data
+~~~~~~~~~~~~~~~~~~
+
+The data import code should be placed in a Python module. Let us call it
+``diseasome_import.py``. Then, this module should be called via
+``cubicweb-ctl``, as follows::
+
+ cubicweb-ctl shell diseasome_import.py --
+
+In the import module, we should use a *store* for doing the import.
+A store is an object which provides three kinds of methods for
+importing data:
+
+- a method for importing the entities, along with the values
+ of their attributes.
+- a method for importing the relations between the entities.
+- a method for committing the imports to the database.
+
+In CubicWeb, we have four stores:
+
+1. ``ObjectStore`` base class for the stores in CubicWeb.
+ It only provides a skeleton for all other stores and
+ provides the means for creating the memory structures
+ (dictionaries) that hold the entities and the relations
+ between them.
+
+2. ``RQLObjectStore``: store which uses the RQL language for performing
+ database insertions and updates. It relies on all the CubicWeb hooks
+ machinery, especially for dealing with security issues (database access
+ permissions).
+
+2. ``NoHookRQLObjectStore``: store which uses the RQL language for
+ performing database insertions and updates, but for which
+ all hooks are deactivated. This implies that
+ certain checks with respect to the CubicWeb / Yams schema
+ (data model) are not performed. However, all SQL queries
+ obtained from the RQL ones are executed in a sequential
+ manner, one query per inserted entity.
+
+4. ``SQLGenObjectStore``: store which uses the SQL language directly.
+ It inserts entities either sequentially, by executing an SQL query
+ for each entity, or directly by using one PostGRES ``COPY FROM``
+ query for a set of similarly structured entities.
+
+For really massive imports (millions or billions of entities), there
+is a cube ``dataio`` which contains another store, called
+``MassiveObjectStore``. This store is similar to ``SQLGenObjectStore``,
+except that anything related to CubicWeb is bypassed. That is, even the
+CubicWeb EID entity identifiers are not handled. This store is the fastest,
+but has a slightly different API from the other four stores mentioned above.
+Moreover, it has an important limitation, in that it doesn't insert inlined [#]_
+relations in the database.
+
+.. [#] An inlined relation is a relation defined in the schema
+ with the keyword argument ``inlined=True``. Such a relation
+ is inserted in the database as an attribute of the entity
+ whose subject it is.
+
+In the following section we will see how to import data by using the stores
+in CubicWeb's ``dataimport`` module.
+
+Using the stores in ``dataimport``
+++++++++++++++++++++++++++++++++++
+
+``ObjectStore`` is seldom used in real life for importing data, since it is
+only the base store for the other stores and it doesn't perform an actual
+import of the data. Nevertheless, the other three stores, which import data,
+are based on ``ObjectStore`` and provide the same API.
+
+All three stores ``RQLObjectStore``, ``NoHookRQLObjectStore`` and
+``SQLGenObjectStore`` provide exactly the same API for importing data, that is
+entities and relations, in an SQL database.
+
+Before using a store, one must import the ``dataimport`` module and then initialize
+the store, with the current ``session`` as a parameter::
+
+ import cubicweb.dataimport as cwdi
+ ...
+
+ store = cwdi.RQLObjectStore(session)
+
+Each such store provides three methods for data import:
+
+#. ``create_entity(Etype, **attributes)``, which allows us to add
+ an entity of the Yams type ``Etype`` to the database. This entity's attributes
+ are specified in the ``attributes`` dictionary. The method returns the entity
+ created in the database. For example, we add two entities,
+ a person, of ``Person`` type, and a location, of ``Location`` type::
+
+ person = store.create_entity('Person', name='Toto', age='18', height='190')
+
+ location = store.create_entity('Location', town='Paris', arrondissement='13')
+
+#. ``relate(subject_eid, r_type, object_eid)``, which allows us to add a relation
+ of the Yams type ``r_type`` to the database. The relation's subject is an entity
+ whose EID is ``subject_eid``; its object is another entity, whose EID is
+ ``object_eid``. For example [#]_::
+
+ store.relate(person.eid(), 'lives_in', location.eid(), **kwargs)
+
+ ``kwargs`` is only used by the ``SQLGenObjectStore``'s ``relate`` method and is here
+ to allow us to specify the type of the subject of the relation, when the relation is
+ defined as inlined in the schema.
+
+.. [#] The ``eid`` method of an entity defined via ``create_entity`` returns
+ the entity identifier as assigned by CubicWeb when creating the entity.
+ This only works for entities defined via the stores in the CubicWeb's
+ ``dataimport`` module.
+
+ The keyword argument that is understood by ``SQLGenObjectStore`` is called
+ ``subjtype`` and holds the type of the subject entity. For the example considered here,
+ this comes to having [#]_::
+
+ store.relate(person.eid(), 'lives_in', location.eid(), subjtype=person.cw_etype)
+
+ If ``subjtype`` is not specified, then the store tries to infer the type of the subject.
+ However, this doesn't always work, e.g. when there are several possible subject types
+ for a given relation type.
+
+.. [#] The ``cw_etype`` attribute of an entity defined via ``create_entity`` holds
+ the type of the entity just created. This only works for entities defined via
+ the stores in the CubicWeb's ``dataimport`` module. In the example considered
+ here, ``person.cw_etype`` holds ``'Person'``.
+
+ All the other stores but ``SQLGenObjectStore`` ignore the ``kwargs`` parameters.
+
+#. ``flush()``, which allows us to perform the actual commit into the database, along
+ with some cleanup operations. Ideally, this method should be called as often as
+ possible, that is after each insertion in the database, so that database sessions
+ are kept as atomic as possible. In practice, we usually call this method twice:
+ first, after all the entities have been created, second, after all relations have
+ been created.
+
+ Note however that before each commit the database insertions
+ have to be consistent with the schema. Thus, if, for instance,
+ an entity has an attribute defined through a relation (viz.
+ a ``SubjectRelation``) with a ``"1"`` or ``"+"`` object
+ cardinality, we have to create the entity under discussion,
+ the object entity of the relation under discussion, and the
+ relation itself, before committing the additions to the database.
+
+ The ``flush`` method is simply called as::
+
+ store.flush().
+
+
+Using the ``MassiveObjectStore`` in the ``dataio`` cube
++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+This store, available in the ``dataio`` cube, allows us to
+fully dispense with the CubicWeb import mechanisms and hence
+to interact directly with the database server, via SQL queries.
+
+Moreover, these queries rely on PostGreSQL's ``COPY FROM`` instruction
+to create several entities in a single query. This brings tremendous
+performance improvements with respect to the RQL-based data insertion
+procedures.
+
+However, the API of this store is slightly different from the API of
+the stores in CubicWeb's ``dataimport`` module.
+
+Before using the store, one has to import the ``dataio`` cube's
+``dataimport`` module, then initialize the store by giving it the
+``session`` parameter::
+
+ from cubes.dataio import dataimport as mcwdi
+ ...
+
+ store = mcwdi.MassiveObjectStore(session)
+
+The ``MassiveObjectStore`` provides six methods for inserting data
+into the database:
+
+#. ``init_rtype_table(SubjEtype, r_type, ObjEtype)``, which specifies the
+ creation of the tables associated to the relation types in the database.
+ Each such table has three column, the type of the subject entity, the
+ type of the relation (that is, the name of the attribute in the subject
+ entity which is defined via the relation), and the type of the object
+ entity. For example::
+
+ store.init_rtype_table('Person', 'lives_in', 'Location')
+
+ Please note that these tables can be created before the entities, since
+ they only specify their types, not their unique identifiers.
+
+#. ``create_entity(Etype, **attributes)``, which allows us to add new entities,
+ whose attributes are given in the ``attributes`` dictionary.
+ Please note however that, by default, this method does *not* return
+ the created entity. The method is called, for example, as in::
+
+ store.create_entity('Person', name='Toto', age='18', height='190',
+ uri='http://link/to/person/toto_18_190')
+ store.create_entity('Location', town='Paris', arrondissement='13',
+ uri='http://link/to/location/paris_13')
+
+ In order to be able to link these entities via the relations when needed,
+ we must provide ourselves a means for uniquely identifying the entities.
+ In general, this is done via URIs, stored in attributes like ``uri`` or
+ ``cwuri``. The name of the attribute is irrelevant as long as its value is
+ unique for each entity.
+
+#. ``relate_by_iid(subject_iid, r_type, object_iid)`` allows us to actually
+ relate the entities uniquely identified by ``subject_iid`` and
+ ``object_iid`` via a relation of type ``r_type``. For example::
+
+ store.relate_by_iid('http://link/to/person/toto_18_190',
+ 'lives_in',
+ 'http://link/to/location/paris_13')
+
+ Please note that this method does *not* work for inlined relations!
+
+#. ``convert_relations(SubjEtype, r_type, ObjEtype, subj_iid_attribute,
+ obj_iid_attribute)``
+ allows us to actually insert
+ the relations in the database. At one call of this method, one inserts
+ all the relations of type ``rtype`` between entities of given types.
+ ``subj_iid_attribute`` and ``object_iid_attribute`` are the names
+ of the attributes which store the unique identifiers of the entities,
+ as assigned by the user. These names can be identical, as long as
+ their values are unique. For example, for inserting all relations
+ of type ``lives_in`` between ``People`` and ``Location`` entities,
+ we write::
+
+ store.convert_relations('Person', 'lives_in', 'Location', 'uri', 'uri')
+
+#. ``flush()`` performs the actual commit in the database. It only needs
+ to be called after ``create_entity`` and ``relate_by_iid`` calls.
+ Please note that ``relate_by_iid`` does *not* perform insertions into
+ the database, hence calling ``flush()`` for it would have no effect.
+
+#. ``cleanup()`` performs database cleanups, by removing temporary tables.
+ It should only be called at the end of the import.
+
+
+
+.. XXX to add smth on the store's parameter initialization.
+
+
+
+Application to the Diseasome data
++++++++++++++++++++++++++++++++++
+
+Import setup
+############
+
+We define an import function, ``diseasome_import``, which does basically four things:
+
+#. creates and initializes the store to be used, via a line such as::
+
+ store = cwdi.SQLGenObjectStore(session)
+
+ where ``cwdi`` is the imported ``cubicweb.dataimport`` or
+ ``cubes.dataio.dataimport``.
+
+#. calls the diseasome parser, that is, the ``entities_from_rdf`` function in the
+ ``diseasome_parser`` module and iterates on its result, in a line such as::
+
+ for entity, relations in parser.entities_from_rdf(filename, ('gene', 'disease')):
+
+ where ``parser`` is the imported ``diseasome_parser`` module, and ``filename`` is the
+ name of the file containing the data (with its path), e.g. ``../data/diseasome_dump.nt``.
+
+#. creates the entities to be inserted in the database; for Diseasome, there are two
+ kinds of entities:
+
+ #. entities defined in the data model, viz. ``Gene`` and ``Disease`` in our case.
+ #. entities which are built in CubicWeb / Yams, viz. ``ExternalUri`` which define
+ URIs.
+
+ As we are working with RDF data, each entity is defined through a series of URIs. Hence,
+ each "relational attribute" [#]_ of an entity is defined via an URI, that is, in CubicWeb
+ terms, via an ``ExternalUri`` entity. The entities are created, in the loop presented above,
+ as such::
+
+ ent = store.create_entity(etype, **entity)
+
+ where ``etype`` is the appropriate entity type, either ``Gene`` or ``Disease``.
+
+.. [#] By "relational attribute" we denote an attribute (of an entity) which
+ is defined through a relation, e.g. the ``chromosomal_location`` attribute
+ of ``Disease`` entities, which is defined through a relation between a
+ ``Disease`` and an ``ExternalUri``.
+
+ The ``ExternalUri`` entities are as many as URIs in the data file. For them, we define a unique
+ attribute, ``uri``, which holds the URI under discussion::
+
+ extu = store.create_entity('ExternalUri', uri="http://path/of/the/uri")
+
+#. creates the relations between the entities. We have relations between:
+
+ #. entities defined in the schema, e.g. between ``Disease`` and ``Gene``
+ entities, such as the ``associated_genes`` relation defined for
+ ``Disease`` entities.
+ #. entities defined in the schema and ``ExternalUri`` entities, such as ``gene_id``.
+
+ The way relations are added to the database depends on the store:
+
+ - for the stores in the CubicWeb ``dataimport`` module, we only use
+ ``store.relate``, in
+ another loop, on the relations (that is, a
+ loop inside the preceding one, mentioned at step 2)::
+
+ for rtype, rels in relations.iteritems():
+ ...
+
+ store.relate(ent.eid(), rtype, extu.eid(), **kwargs)
+
+ where ``kwargs`` is a dictionary designed to accommodate the need for specifying
+ the type of the subject entity of the relation, when the relation is inlined and
+ ``SQLGenObjectStore`` is used. For example::
+
+ ...
+ store.relate(ent.eid(), 'chromosomal_location', extu.eid(), subjtype='Disease')
+
+ - for the ``MassiveObjectStore`` in the ``dataio`` cube's ``dataimport`` module,
+ the relations are created in three steps:
+
+ #. first, a table is created for each relation type, as in::
+
+ ...
+ store.init_rtype_table(ent.cw_etype, rtype, extu.cw_etype)
+
+ which comes down to lines such as::
+
+ store.init_rtype_table('Disease', 'associated_genes', 'Gene')
+ store.init_rtype_table('Gene', 'gene_id', 'ExternalUri')
+
+ #. second, the URI of each entity will be used as its identifier, in the
+ ``relate_by_iid`` method, such as::
+
+ disease_uri = 'http://www4.wiwiss.fu-berlin.de/diseasome/resource/diseases/3'
+ gene_uri = '.
+
+"""This module imports the Diseasome data into a CubicWeb instance.
+"""
+
+# Python imports
+import sys
+import argparse
+
+# Logilab import, for timing
+from logilab.common.decorators import timed
+
+# CubicWeb imports
+import cubicweb.dataimport as cwdi
+from cubes.dataio import dataimport as mcwdi
+
+# Diseasome parser import
+import diseasome_parser as parser
+
+def _is_of_class(instance, class_name):
+ """Helper function to determine whether an instance is
+ of a specified class or not.
+ Returns a True if this is the case and False otherwise.
+ """
+ if instance.__class__.__name__ == class_name:
+ return True
+ else:
+ return False
+
+@timed
+def diseasome_import(session, file_name, store):
+ """Main function for importing Diseasome data.
+
+ It uses the Diseasome data parser to get the contents of the
+ data from a file, then uses a store for importing the data
+ into a CubicWeb instance.
+
+ >>> diseasome_import(session, 'file_name', Store)
+
+ """
+ exturis = dict(session.execute('Any U, X WHERE X is ExternalUri, X uri U'))
+ uri_to_eid = {}
+ uri_to_etype = {}
+ all_relations = {}
+ etypes = {('http://www4.wiwiss.fu-berlin.de/'
+ 'diseasome/resource/diseasome/genes'): 'Gene',
+ ('http://www4.wiwiss.fu-berlin.de/'
+ 'diseasome/resource/diseasome/diseases'): 'Disease'}
+ # Read the parsed data
+ for entity, relations in parser.entities_from_rdf(file_name,
+ ('gene', 'disease')):
+ uri = entity.get('cwuri', None)
+ types = list(relations.get('types', []))
+ if not types:
+ continue
+ etype = etypes.get(types[0])
+ if not etype:
+ sys.stderr.write('Entity type %s not recognized.', types[0])
+ sys.stderr.flush()
+ if _is_of_class(store, 'MassiveObjectStore'):
+ for relation in (set(relations).intersection(('classes',
+ 'possible_drugs', 'omim', 'omim_page',
+ 'chromosomal_location', 'same_as', 'gene_id',
+ 'hgnc_id', 'hgnc_page'))):
+ store.init_rtype_table(etype, relation, 'ExternalUri')
+ for relation in set(relations).intersection(('subtype_of',)):
+ store.init_rtype_table(etype, relation, 'Disease')
+ for relation in set(relations).intersection(('associated_genes',)):
+ store.init_rtype_table(etype, relation, 'Gene')
+ # Create the entities
+ ent = store.create_entity(etype, **entity)
+ if not _is_of_class(store, 'MassiveObjectStore'):
+ uri_to_eid[uri] = ent.eid
+ uri_to_etype[uri] = ent.cw_etype
+ else:
+ uri_to_eid[uri] = uri
+ uri_to_etype[uri] = etype
+ # Store relations for after
+ all_relations[uri] = relations
+ # Perform a first commit, of the entities
+ store.flush()
+ kwargs = {}
+ for uri, relations in all_relations.iteritems():
+ from_eid = uri_to_eid.get(uri)
+ # ``subjtype`` should be initialized if ``SQLGenObjectStore`` is used
+ # and there are inlined relations in the schema.
+ # If ``subjtype`` is not given, while ``SQLGenObjectStore`` is used
+ # and there are inlined relations in the schema, the store
+ # tries to infer the type of the subject, but this does not always
+ # work, e.g. when there are several object types for the relation.
+ # ``subjtype`` is ignored for other stores, or if there are no
+ # inlined relations in the schema.
+ kwargs['subjtype'] = uri_to_etype.get(uri)
+ if not from_eid:
+ continue
+ for rtype, rels in relations.iteritems():
+ if rtype in ('classes', 'possible_drugs', 'omim', 'omim_page',
+ 'chromosomal_location', 'same_as', 'gene_id',
+ 'hgnc_id', 'hgnc_page'):
+ for rel in list(rels):
+ if rel not in exturis:
+ # Create the "ExternalUri" entities, which are the
+ # objects of the relations
+ extu = store.create_entity('ExternalUri', uri=rel)
+ if not _is_of_class(store, 'MassiveObjectStore'):
+ rel_eid = extu.eid
+ else:
+ # For the "MassiveObjectStore", the EIDs are
+ # in fact the URIs.
+ rel_eid = rel
+ exturis[rel] = rel_eid
+ else:
+ rel_eid = exturis[rel]
+ # Create the relations that have "ExternalUri"s as objects
+ if not _is_of_class(store, 'MassiveObjectStore'):
+ store.relate(from_eid, rtype, rel_eid, **kwargs)
+ else:
+ store.relate_by_iid(from_eid, rtype, rel_eid)
+ elif rtype in ('subtype_of', 'associated_genes'):
+ for rel in list(rels):
+ to_eid = uri_to_eid.get(rel)
+ if to_eid:
+ # Create relations that have objects of other type
+ # than "ExternalUri"
+ if not _is_of_class(store, 'MassiveObjectStore'):
+ store.relate(from_eid, rtype, to_eid, **kwargs)
+ else:
+ store.relate_by_iid(from_eid, rtype, to_eid)
+ else:
+ sys.stderr.write('Missing entity with URI %s '
+ 'for relation %s' % (rel, rtype))
+ sys.stderr.flush()
+ # Perform a second commit, of the "ExternalUri" entities.
+ # when the stores in the CubicWeb ``dataimport`` module are used,
+ # relations are also committed.
+ store.flush()
+ # If the ``MassiveObjectStore`` is used, then entity and relation metadata
+ # are pushed as well. By metadata we mean information on the creation
+ # time and author.
+ if _is_of_class(store, 'MassiveObjectStore'):
+ store.flush_meta_data()
+ for relation in ('classes', 'possible_drugs', 'omim', 'omim_page',
+ 'chromosomal_location', 'same_as'):
+ # Afterwards, relations are actually created in the database.
+ store.convert_relations('Disease', relation, 'ExternalUri',
+ 'cwuri', 'uri')
+ store.convert_relations('Disease', 'subtype_of', 'Disease',
+ 'cwuri', 'cwuri')
+ store.convert_relations('Disease', 'associated_genes', 'Gene',
+ 'cwuri', 'cwuri')
+ for relation in ('gene_id', 'hgnc_id', 'hgnc_page', 'same_as'):
+ store.convert_relations('Gene', relation, 'ExternalUri',
+ 'cwuri', 'uri')
+ # Clean up temporary tables in the database
+ store.cleanup()
+
+if __name__ == '__main__':
+ # Change sys.argv so that ``cubicweb-ctl shell`` can work out the options
+ # we give to our ``diseasome_import.py`` script.
+ sys.argv = [arg for
+ arg in sys.argv[sys.argv.index("--") - 1:] if arg != "--"]
+ PARSER = argparse.ArgumentParser(description="Import Diseasome data")
+ PARSER.add_argument("-df", "--datafile", type=str,
+ help="RDF data file name")
+ PARSER.add_argument("-st", "--store", type=str,
+ default="RQLObjectStore",
+ help="data import store")
+ ARGS = PARSER.parse_args()
+ if ARGS.datafile:
+ FILENAME = ARGS.datafile
+ if ARGS.store in (st + "ObjectStore" for
+ st in ("RQL", "NoHookRQL", "SQLGen")):
+ IMPORT_STORE = getattr(cwdi, ARGS.store)(session)
+ elif ARGS.store == "MassiveObjectStore":
+ IMPORT_STORE = mcwdi.MassiveObjectStore(session)
+ else:
+ sys.exit("Import store unknown")
+ diseasome_import(session, FILENAME, IMPORT_STORE)
+ else:
+ sys.exit("Data file not found or not specified")
diff -r 2cf127d4f5fd -r b4bcabf55e77 doc/tutorials/dataimport/diseasome_parser.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/tutorials/dataimport/diseasome_parser.py Fri Jun 14 16:26:25 2013 +0200
@@ -0,0 +1,100 @@
+# -*- coding: utf-8 -*-
+
+"""
+Diseasome data import module.
+Its interface is the ``entities_from_rdf`` function.
+"""
+
+import re
+RE_RELS = re.compile(r'^<(.*?)>\s<(.*?)>\s<(.*?)>\s*\.')
+RE_ATTS = re.compile(r'^<(.*?)>\s<(.*?)>\s"(.*)"(\^\^<(.*?)>|)\s*\.')
+
+MAPPING_ATTS = {'bio2rdfSymbol': 'bio2rdf_symbol',
+ 'label': 'label',
+ 'name': 'name',
+ 'classDegree': 'class_degree',
+ 'degree': 'degree',
+ 'size': 'size'}
+
+MAPPING_RELS = {'geneId': 'gene_id',
+ 'hgncId': 'hgnc_id',
+ 'hgncIdPage': 'hgnc_page',
+ 'sameAs': 'same_as',
+ 'class': 'classes',
+ 'diseaseSubtypeOf': 'subtype_of',
+ 'associatedGene': 'associated_genes',
+ 'possibleDrug': 'possible_drugs',
+ 'type': 'types',
+ 'omim': 'omim',
+ 'omimPage': 'omim_page',
+ 'chromosomalLocation': 'chromosomal_location'}
+
+def _retrieve_reltype(uri):
+ """
+ Retrieve a relation type from an URI.
+
+ Internal function which takes an URI containing a relation type as input
+ and returns the name of the relation.
+ If no URI string is given, then the function returns None.
+ """
+ if uri:
+ return uri.rsplit('/', 1)[-1].rsplit('#', 1)[-1]
+
+def _retrieve_etype(tri_uri):
+ """
+ Retrieve entity type from a triple of URIs.
+
+ Internal function whith takes a tuple of three URIs as input
+ and returns the type of the entity, as obtained from the
+ first member of the tuple.
+ """
+ if tri_uri:
+ return tri_uri.split('> <')[0].rsplit('/', 2)[-2].rstrip('s')
+
+def _retrieve_structure(filename, etypes):
+ """
+ Retrieve a (subject, relation, object) tuples iterator from a file.
+
+ Internal function which takes as input a file name and a tuple of
+ entity types, and returns an iterator of (subject, relation, object)
+ tuples.
+ """
+ with open(filename) as fil:
+ for line in fil:
+ if _retrieve_etype(line) not in etypes:
+ continue
+ match = RE_RELS.match(line)
+ if not match:
+ match = RE_ATTS.match(line)
+ subj = match.group(1)
+ relation = _retrieve_reltype(match.group(2))
+ obj = match.group(3)
+ yield subj, relation, obj
+
+def entities_from_rdf(filename, etypes):
+ """
+ Return entities from an RDF file.
+
+ Module interface function which takes as input a file name and
+ a tuple of entity types, and returns an iterator on the
+ attributes and relations of each entity. The attributes
+ and relations are retrieved as dictionaries.
+
+ >>> for entities, relations in entities_from_rdf('data_file',
+ ('type_1', 'type_2')):
+ ...
+ """
+ entities = {}
+ for subj, rel, obj in _retrieve_structure(filename, etypes):
+ entities.setdefault(subj, {})
+ entities[subj].setdefault('attributes', {})
+ entities[subj].setdefault('relations', {})
+ entities[subj]['attributes'].setdefault('cwuri', unicode(subj))
+ if rel in MAPPING_ATTS:
+ entities[subj]['attributes'].setdefault(MAPPING_ATTS[rel],
+ unicode(obj))
+ if rel in MAPPING_RELS:
+ entities[subj]['relations'].setdefault(MAPPING_RELS[rel], set())
+ entities[subj]['relations'][MAPPING_RELS[rel]].add(unicode(obj))
+ return ((ent.get('attributes'), ent.get('relations'))
+ for ent in entities.itervalues())
diff -r 2cf127d4f5fd -r b4bcabf55e77 doc/tutorials/dataimport/schema.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/tutorials/dataimport/schema.py Fri Jun 14 16:26:25 2013 +0200
@@ -0,0 +1,135 @@
+# -*- coding: utf-8 -*-
+# copyright 2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr -- mailto:contact@logilab.fr
+#
+# This program is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see .
+
+"""cubicweb-diseasome schema"""
+
+from yams.buildobjs import EntityType, SubjectRelation, String, Int
+
+
+class Disease(EntityType):
+ """Disease entity definition.
+
+ A Disease entity is characterized by several attributes which are
+ defined by URIs:
+
+ - a name, which we define as a CubicWeb / Yams String object
+ - a label, also defined as a Yams String
+ - a class degree, defined as a Yams Int (that is, an integer)
+ - a degree, also defined as a Yams Int
+ - size, also defined as an Int
+ - classes, defined as a set containing zero, one or several objects
+ identified by their URIs, that is, objects of type ``ExternalUri``
+ - subtype_of, defined as a set containing zero, one or several
+ objects of type ``Disease``
+ - associated_genes, defined as a set containing zero, one or several
+ objects of type ``Gene``, that is, of genes associated to the
+ disease
+ - possible_drugs, defined as a set containing zero, one or several
+ objects, identified by their URIs, that is, of type ``ExternalUri``
+ - omim and omim_page are identifiers in the OMIM (Online Mendelian
+ Inheritance in Man) database, which contains an inventory of "human
+ genes and genetic phenotypes"
+ (see http://http://www.ncbi.nlm.nih.gov/omim). Given that a disease
+ only has unique omim and omim_page identifiers, when it has them,
+ these attributes have been defined through relations such that
+ for each disease there is at most one omim and one omim_page.
+ Each such identifier is defined through an URI, that is, through
+ an ``ExternalUri`` entity.
+ That is, these relations are of cardinality "?*". For optimization
+ purposes, one might be tempted to defined them as inlined, by setting
+ the ``inlined`` keyword argument to ``True``.
+ - chromosomal_location is also defined through a relation of
+ cardinality "?*", since any disease has at most one chromosomal
+ location associated to it.
+ - same_as is also defined through an URI, and hence through a
+ relation having ``ExternalUri`` entities as objects.
+
+ For more information on this data set and the data set itself,
+ please consult http://datahub.io/dataset/fu-berlin-diseasome.
+ """
+ # Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+ # diseasome/name
+ name = String(maxsize=256, fulltextindexed=True)
+ # Corresponds to http://www.w3.org/2000/01/rdf-schema#label
+ label = String(maxsize=512, fulltextindexed=True)
+ # Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+ # diseasome/classDegree
+ class_degree = Int()
+ # Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+ # diseasome/degree
+ degree = Int()
+ # Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+ # diseasome/size
+ size = Int()
+ #Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+ # diseasome/class
+ classes = SubjectRelation('ExternalUri', cardinality='**')
+ # Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+ # diseasome/diseaseSubtypeOf
+ subtype_of = SubjectRelation('Disease', cardinality='**')
+ # Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+ # diseasome/associatedGene
+ associated_genes = SubjectRelation('Gene', cardinality='**')
+ #Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+ # diseasome/possibleDrug
+ possible_drugs = SubjectRelation('ExternalUri', cardinality='**')
+ #Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+ # diseasome/omim
+ omim = SubjectRelation('ExternalUri', cardinality='?*', inlined=True)
+ #Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+ # diseasome/omimPage
+ omim_page = SubjectRelation('ExternalUri', cardinality='?*', inlined=True)
+ #Corresponds to 'http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+ # diseasome/chromosomalLocation'
+ chromosomal_location = SubjectRelation('ExternalUri', cardinality='?*',
+ inlined=True)
+ #Corresponds to http://www.w3.org/2002/07/owl#sameAs
+ same_as = SubjectRelation('ExternalUri', cardinality='**')
+
+
+class Gene(EntityType):
+ """Gene entity defintion.
+
+ A gene is characterized by the following attributes:
+
+ - label, defined through a Yams String.
+ - bio2rdf_symbol, also defined as a Yams String, since it is
+ just an identifier.
+ - gene_id is an URI identifying a gene, hence it is defined
+ as a relation with an ``ExternalUri`` object.
+ - a pair of unique identifiers in the HUGO Gene Nomenclature
+ Committee (http://http://www.genenames.org/). They are defined
+ as ``ExternalUri`` entities as well.
+ - same_as is also defined through an URI, and hence through a
+ relation having ``ExternalUri`` entities as objects.
+ """
+ # Corresponds to http://www.w3.org/2000/01/rdf-schema#label
+ label = String(maxsize=512, fulltextindexed=True)
+ # Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+ # diseasome/geneId
+ gene_id = SubjectRelation('ExternalUri', cardinality='**')
+ # Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+ # diseasome/hgncId
+ hgnc_id = SubjectRelation('ExternalUri', cardinality='**')
+ # Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+ # diseasome/hgncIdPage
+ hgnc_page = SubjectRelation('ExternalUri', cardinality='**')
+ # Corresponds to http://www4.wiwiss.fu-berlin.de/diseasome/resource/
+ # diseasome/bio2rdfSymbol
+ bio2rdf_symbol = String(maxsize=64, fulltextindexed=True)
+ #Corresponds to http://www.w3.org/2002/07/owl#sameAs
+ same_as = SubjectRelation('ExternalUri', cardinality='**')
diff -r 2cf127d4f5fd -r b4bcabf55e77 entities/__init__.py
--- a/entities/__init__.py Fri Jun 14 16:13:24 2013 +0200
+++ b/entities/__init__.py Fri Jun 14 16:26:25 2013 +0200
@@ -19,12 +19,10 @@
__docformat__ = "restructuredtext en"
-from warnings import warn
-from logilab.common.deprecation import deprecated
-from logilab.common.decorators import cached
+from logilab.common.decorators import classproperty
-from cubicweb import Unauthorized, typed_eid
+from cubicweb import Unauthorized
from cubicweb.entity import Entity
@@ -60,6 +58,11 @@
# meta data api ###########################################################
+ @classproperty
+ def cw_etype(self):
+ """entity Etype as a string"""
+ return self.__regid__
+
def dc_title(self):
"""return a suitable *unicode* title for this entity"""
for rschema, attrschema in self.e_schema.attribute_definitions():
diff -r 2cf127d4f5fd -r b4bcabf55e77 entities/adapters.py
--- a/entities/adapters.py Fri Jun 14 16:13:24 2013 +0200
+++ b/entities/adapters.py Fri Jun 14 16:26:25 2013 +0200
@@ -26,12 +26,11 @@
from logilab.mtconverter import TransformError
from logilab.common.decorators import cached
-from logilab.common.deprecation import class_deprecated
from cubicweb import ValidationError, view
from cubicweb.predicates import (implements, is_instance, relation_possible,
match_exception)
-from cubicweb.interfaces import IDownloadable, ITree, IProgress, IMileStone
+from cubicweb.interfaces import IDownloadable, ITree
class IEmailableAdapter(view.EntityAdapter):
@@ -330,7 +329,7 @@
_done = set()
for child in self.children():
if child.eid in _done:
- self.error('loop in %s tree: %s', child.__regid__.lower(), child)
+ self.error('loop in %s tree: %s', child.cw_etype.lower(), child)
continue
yield child
_done.add(child.eid)
@@ -357,7 +356,7 @@
entity = adapter.entity
while entity is not None:
if entity.eid in path:
- self.error('loop in %s tree: %s', entity.__regid__.lower(), entity)
+ self.error('loop in %s tree: %s', entity.cw_etype.lower(), entity)
break
path.append(entity.eid)
try:
@@ -404,117 +403,3 @@
"%(cls)s is deprecated") % {'cls': cls.__name__}
warn(msg, DeprecationWarning, stacklevel=2)
return type.__call__(cls, *args, **kwargs)
-
-
-class IProgressAdapter(view.EntityAdapter):
- """something that has a cost, a state and a progression.
-
- You should at least override progress_info an in_progress methods on
- concrete implementations.
- """
- __metaclass__ = adapter_deprecated
- __deprecation_warning__ = '[3.14] IProgressAdapter has been moved to iprogress cube'
- __needs_bw_compat__ = True
- __regid__ = 'IProgress'
- __select__ = implements(IProgress, warn=False) # XXX for bw compat, should be abstract
-
- @property
- @view.implements_adapter_compat('IProgress')
- def cost(self):
- """the total cost"""
- return self.progress_info()['estimated']
-
- @property
- @view.implements_adapter_compat('IProgress')
- def revised_cost(self):
- return self.progress_info().get('estimatedcorrected', self.cost)
-
- @property
- @view.implements_adapter_compat('IProgress')
- def done(self):
- """what is already done"""
- return self.progress_info()['done']
-
- @property
- @view.implements_adapter_compat('IProgress')
- def todo(self):
- """what remains to be done"""
- return self.progress_info()['todo']
-
- @view.implements_adapter_compat('IProgress')
- def progress_info(self):
- """returns a dictionary describing progress/estimated cost of the
- version.
-
- - mandatory keys are (''estimated', 'done', 'todo')
-
- - optional keys are ('notestimated', 'notestimatedcorrected',
- 'estimatedcorrected')
-
- 'noestimated' and 'notestimatedcorrected' should default to 0
- 'estimatedcorrected' should default to 'estimated'
- """
- raise NotImplementedError
-
- @view.implements_adapter_compat('IProgress')
- def finished(self):
- """returns True if status is finished"""
- return not self.in_progress()
-
- @view.implements_adapter_compat('IProgress')
- def in_progress(self):
- """returns True if status is not finished"""
- raise NotImplementedError
-
- @view.implements_adapter_compat('IProgress')
- def progress(self):
- """returns the % progress of the task item"""
- try:
- return 100. * self.done / self.revised_cost
- except ZeroDivisionError:
- # total cost is 0 : if everything was estimated, task is completed
- if self.progress_info().get('notestimated'):
- return 0.
- return 100
-
- @view.implements_adapter_compat('IProgress')
- def progress_class(self):
- return ''
-
-
-class IMileStoneAdapter(IProgressAdapter):
- __metaclass__ = adapter_deprecated
- __deprecation_warning__ = '[3.14] IMileStoneAdapter has been moved to iprogress cube'
- __needs_bw_compat__ = True
- __regid__ = 'IMileStone'
- __select__ = implements(IMileStone, warn=False) # XXX for bw compat, should be abstract
-
- parent_type = None # specify main task's type
-
- @view.implements_adapter_compat('IMileStone')
- def get_main_task(self):
- """returns the main ITask entity"""
- raise NotImplementedError
-
- @view.implements_adapter_compat('IMileStone')
- def initial_prevision_date(self):
- """returns the initial expected end of the milestone"""
- raise NotImplementedError
-
- @view.implements_adapter_compat('IMileStone')
- def eta_date(self):
- """returns expected date of completion based on what remains
- to be done
- """
- raise NotImplementedError
-
- @view.implements_adapter_compat('IMileStone')
- def completion_date(self):
- """returns date on which the subtask has been completed"""
- raise NotImplementedError
-
- @view.implements_adapter_compat('IMileStone')
- def contractors(self):
- """returns the list of persons supposed to work on this task"""
- raise NotImplementedError
-
diff -r 2cf127d4f5fd -r b4bcabf55e77 entities/lib.py
--- a/entities/lib.py Fri Jun 14 16:13:24 2013 +0200
+++ b/entities/lib.py Fri Jun 14 16:26:25 2013 +0200
@@ -23,8 +23,6 @@
from urlparse import urlsplit, urlunsplit
from datetime import datetime
-from logilab.common.deprecation import deprecated
-
from cubicweb import UnknownProperty
from cubicweb.entity import _marker
from cubicweb.entities import AnyEntity, fetch_config
diff -r 2cf127d4f5fd -r b4bcabf55e77 entities/sources.py
--- a/entities/sources.py Fri Jun 14 16:13:24 2013 +0200
+++ b/entities/sources.py Fri Jun 14 16:26:25 2013 +0200
@@ -27,7 +27,6 @@
from logilab.common.configuration import OptionError
from logilab.mtconverter import xml_escape
-from cubicweb import ValidationError
from cubicweb.entities import AnyEntity, fetch_config
class _CWSourceCfgMixIn(object):
@@ -124,7 +123,7 @@
fetch_attrs, cw_fetch_order = fetch_config(['cw_for_source', 'cw_schema', 'options'])
def dc_title(self):
- return self._cw._(self.__regid__) + ' #%s' % self.eid
+ return self._cw._(self.cw_etype) + ' #%s' % self.eid
@property
def schema(self):
diff -r 2cf127d4f5fd -r b4bcabf55e77 entities/test/unittest_base.py
--- a/entities/test/unittest_base.py Fri Jun 14 16:13:24 2013 +0200
+++ b/entities/test/unittest_base.py Fri Jun 14 16:26:25 2013 +0200
@@ -25,7 +25,6 @@
from cubicweb.devtools.testlib import CubicWebTC
-from cubicweb import ValidationError
from cubicweb.interfaces import IMileStone, ICalendarable
from cubicweb.entities import AnyEntity
@@ -48,8 +47,13 @@
self.assertEqual(entity.dc_creator(), u'member')
def test_type(self):
+ #dc_type may be translated
self.assertEqual(self.member.dc_type(), 'CWUser')
+ def test_cw_etype(self):
+ #cw_etype is never translated
+ self.assertEqual(self.member.cw_etype, 'CWUser')
+
def test_entity_meta_attributes(self):
# XXX move to yams
self.assertEqual(self.schema['CWUser'].meta_attributes(), {})
@@ -172,7 +176,7 @@
self.assertEqual(eclass.__bases__[0].__bases__, (Foo,))
# check Division eclass is still selected for plain Division entities
eclass = self.select_eclass('Division')
- self.assertEqual(eclass.__regid__, 'Division')
+ self.assertEqual(eclass.cw_etype, 'Division')
if __name__ == '__main__':
unittest_main()
diff -r 2cf127d4f5fd -r b4bcabf55e77 entities/wfobjs.py
--- a/entities/wfobjs.py Fri Jun 14 16:13:24 2013 +0200
+++ b/entities/wfobjs.py Fri Jun 14 16:26:25 2013 +0200
@@ -24,7 +24,6 @@
__docformat__ = "restructuredtext en"
-from warnings import warn
from logilab.common.decorators import cached, clear_cache
from logilab.common.deprecation import deprecated
@@ -186,7 +185,7 @@
fetch_attrs, cw_fetch_order = fetch_config(['name', 'type'])
def __init__(self, *args, **kwargs):
- if self.__regid__ == 'BaseTransition':
+ if self.cw_etype == 'BaseTransition':
raise WorkflowException('should not be instantiated')
super(BaseTransition, self).__init__(*args, **kwargs)
@@ -449,10 +448,10 @@
"""return the default workflow for entities of this type"""
# XXX CWEType method
wfrset = self._cw.execute('Any WF WHERE ET default_workflow WF, '
- 'ET name %(et)s', {'et': self.entity.__regid__})
+ 'ET name %(et)s', {'et': self.entity.cw_etype})
if wfrset:
return wfrset.get_entity(0, 0)
- self.warning("can't find any workflow for %s", self.entity.__regid__)
+ self.warning("can't find any workflow for %s", self.entity.cw_etype)
return None
@property
diff -r 2cf127d4f5fd -r b4bcabf55e77 entity.py
--- a/entity.py Fri Jun 14 16:13:24 2013 +0200
+++ b/entity.py Fri Jun 14 16:26:25 2013 +0200
@@ -26,18 +26,17 @@
from logilab.common.decorators import cached
from logilab.common.deprecation import deprecated
from logilab.common.registry import yes
-from logilab.mtconverter import TransformData, TransformError, xml_escape
+from logilab.mtconverter import TransformData, xml_escape
from rql.utils import rqlvar_maker
from rql.stmts import Select
from rql.nodes import (Not, VariableRef, Constant, make_relation,
Relation as RqlRelation)
-from cubicweb import Unauthorized, typed_eid, neg_role
+from cubicweb import Unauthorized, neg_role
from cubicweb.utils import support_args
from cubicweb.rset import ResultSet
from cubicweb.appobject import AppObject
-from cubicweb.req import _check_cw_unsafe
from cubicweb.schema import (RQLVocabularyConstraint, RQLConstraint,
GeneratedConstraint)
from cubicweb.rqlrewrite import RQLRewriter
@@ -555,7 +554,10 @@
return '' % (
self.e_schema, self.eid, list(self.cw_attr_cache), id(self))
- def __cmp__(self, other):
+ def __lt__(self, other):
+ raise NotImplementedError('comparison not implemented for %s' % self.__class__)
+
+ def __eq__(self, other):
raise NotImplementedError('comparison not implemented for %s' % self.__class__)
def _cw_update_attr_cache(self, attrcache):
@@ -627,7 +629,7 @@
meaning that the entity has to be created
"""
try:
- typed_eid(self.eid)
+ int(self.eid)
return True
except (ValueError, TypeError):
return False
@@ -793,7 +795,7 @@
for rtype in self.skip_copy_for:
skip_copy_for['subject'].add(rtype)
warn('[3.14] skip_copy_for on entity classes (%s) is deprecated, '
- 'use cw_skip_for instead with list of couples (rtype, role)' % self.__regid__,
+ 'use cw_skip_for instead with list of couples (rtype, role)' % self.cw_etype,
DeprecationWarning)
for rtype, role in self.cw_skip_copy_for:
assert role in ('subject', 'object'), role
@@ -845,7 +847,7 @@
def as_rset(self): # XXX .cw_as_rset
"""returns a resultset containing `self` information"""
rset = ResultSet([(self.eid,)], 'Any X WHERE X eid %(x)s',
- {'x': self.eid}, [(self.__regid__,)])
+ {'x': self.eid}, [(self.cw_etype,)])
rset.req = self._cw
return rset
@@ -1287,7 +1289,6 @@
an entity or eid, a list of entities or eids, or None (meaning that all
relations of the given type from or to this object should be deleted).
"""
- _check_cw_unsafe(kwargs)
assert kwargs
assert self.cw_is_saved(), "should not call set_attributes while entity "\
"hasn't been saved yet"
@@ -1397,10 +1398,6 @@
@deprecated('[3.10] use entity.cw_attr_cache[attr]')
def __getitem__(self, key):
- if key == 'eid':
- warn('[3.7] entity["eid"] is deprecated, use entity.eid instead',
- DeprecationWarning, stacklevel=2)
- return self.eid
return self.cw_attr_cache[key]
@deprecated('[3.10] use entity.cw_attr_cache.get(attr[, default])')
@@ -1424,15 +1421,10 @@
the attribute to skip_security since we don't want to check security
for such attributes set by hooks.
"""
- if attr == 'eid':
- warn('[3.7] entity["eid"] = value is deprecated, use entity.eid = value instead',
- DeprecationWarning, stacklevel=2)
- self.eid = value
- else:
- try:
- self.cw_edited[attr] = value
- except AttributeError:
- self.cw_attr_cache[attr] = value
+ try:
+ self.cw_edited[attr] = value
+ except AttributeError:
+ self.cw_attr_cache[attr] = value
@deprecated('[3.10] use del entity.cw_edited[attr]')
def __delitem__(self, attr):
diff -r 2cf127d4f5fd -r b4bcabf55e77 etwist/http.py
--- a/etwist/http.py Fri Jun 14 16:13:24 2013 +0200
+++ b/etwist/http.py Fri Jun 14 16:26:25 2013 +0200
@@ -8,8 +8,6 @@
__docformat__ = "restructuredtext en"
-from cubicweb.web.http_headers import Headers
-
class HTTPResponse(object):
"""An object representing an HTTP Response to be sent to the client.
"""
diff -r 2cf127d4f5fd -r b4bcabf55e77 etwist/request.py
--- a/etwist/request.py Fri Jun 14 16:13:24 2013 +0200
+++ b/etwist/request.py Fri Jun 14 16:26:25 2013 +0200
@@ -19,14 +19,8 @@
__docformat__ = "restructuredtext en"
-from datetime import datetime
-from twisted.web import http
-
-from cubicweb.web import DirectResponse
from cubicweb.web.request import CubicWebRequestBase
-from cubicweb.web.httpcache import GMTOFFSET
-from cubicweb.web.http_headers import Headers
class CubicWebTwistedRequestAdapter(CubicWebRequestBase):
@@ -39,6 +33,7 @@
self.form[key] = (name, stream)
else:
self.form[key] = (unicode(name, self.encoding), stream)
+ self.content = self._twreq.content # stream
def http_method(self):
"""returns 'POST', 'GET', 'HEAD', etc."""
diff -r 2cf127d4f5fd -r b4bcabf55e77 etwist/server.py
--- a/etwist/server.py Fri Jun 14 16:13:24 2013 +0200
+++ b/etwist/server.py Fri Jun 14 16:26:25 2013 +0200
@@ -19,35 +19,25 @@
__docformat__ = "restructuredtext en"
import sys
-import os
-import os.path as osp
import select
import traceback
import threading
-import re
-from hashlib import md5 # pylint: disable=E0611
-from os.path import join
-from time import mktime
-from datetime import date, timedelta
from urlparse import urlsplit, urlunsplit
from cgi import FieldStorage, parse_header
from twisted.internet import reactor, task, threads
-from twisted.internet.defer import maybeDeferred
from twisted.web import http, server
-from twisted.web import static, resource
+from twisted.web import resource
from twisted.web.server import NOT_DONE_YET
from logilab.mtconverter import xml_escape
from logilab.common.decorators import monkeypatch
-from cubicweb import (AuthenticationError, ConfigurationError,
- CW_EVENT_MANAGER, CubicWebException)
+from cubicweb import ConfigurationError, CW_EVENT_MANAGER
from cubicweb.utils import json_dumps
from cubicweb.web import DirectResponse
from cubicweb.web.application import CubicWebPublisher
-from cubicweb.web.http_headers import generateDateTime
from cubicweb.etwist.request import CubicWebTwistedRequestAdapter
from cubicweb.etwist.http import HTTPResponse
diff -r 2cf127d4f5fd -r b4bcabf55e77 etwist/test/data/views.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/etwist/test/data/views.py Fri Jun 14 16:26:25 2013 +0200
@@ -0,0 +1,29 @@
+# copyright 2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
+#
+# This file is part of CubicWeb.
+#
+# CubicWeb is free software: you can redistribute it and/or modify it under the
+# terms of the GNU Lesser General Public License as published by the Free
+# Software Foundation, either version 2.1 of the License, or (at your option)
+# any later version.
+#
+# CubicWeb is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
+# details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with CubicWeb. If not, see .
+"""only for unit tests !"""
+
+from cubicweb.view import View
+from cubicweb.predicates import match_http_method
+
+class PutView(View):
+ __regid__ = 'put'
+ __select__ = match_http_method('PUT')
+ binary = True
+
+ def call(self):
+ self.w(self._cw.content.read())
diff -r 2cf127d4f5fd -r b4bcabf55e77 etwist/test/unittest_server.py
--- a/etwist/test/unittest_server.py Fri Jun 14 16:13:24 2013 +0200
+++ b/etwist/test/unittest_server.py Fri Jun 14 16:26:25 2013 +0200
@@ -19,6 +19,7 @@
import os, os.path as osp, glob
from cubicweb.devtools.testlib import CubicWebTC
+from cubicweb.devtools.httptest import CubicWebServerTC
from cubicweb.etwist.server import host_prefixed_baseurl
@@ -53,6 +54,13 @@
self._check('http://localhost:8080/hg/', 'code.cubicweb.org',
'http://localhost:8080/hg/')
+
+class ETwistHTTPTC(CubicWebServerTC):
+ def test_put_content(self):
+ body = 'hop'
+ response = self.web_request('?vid=put', method='PUT', body=body)
+ self.assertEqual(body, response.body)
+
if __name__ == '__main__':
from logilab.common.testlib import unittest_main
unittest_main()
diff -r 2cf127d4f5fd -r b4bcabf55e77 etwist/twconfig.py
--- a/etwist/twconfig.py Fri Jun 14 16:13:24 2013 +0200
+++ b/etwist/twconfig.py Fri Jun 14 16:26:25 2013 +0200
@@ -28,10 +28,10 @@
from os.path import join
-from logilab.common.configuration import Method
+from logilab.common.configuration import Method, merge_options
from cubicweb.cwconfig import CONFIGURATIONS
-from cubicweb.web.webconfig import WebConfiguration, merge_options
+from cubicweb.web.webconfig import WebConfiguration
class TwistedConfiguration(WebConfiguration):
@@ -103,8 +103,8 @@
return join(self.apphome, '%s-%s.py' % (self.appid, self.name))
def default_base_url(self):
- from socket import gethostname
- return 'http://%s:%s/' % (self['host'] or gethostname(), self['port'] or 8080)
+ from socket import getfqdn
+ return 'http://%s:%s/' % (self['host'] or getfqdn(), self['port'] or 8080)
CONFIGURATIONS.append(TwistedConfiguration)
diff -r 2cf127d4f5fd -r b4bcabf55e77 ext/rest.py
--- a/ext/rest.py Fri Jun 14 16:13:24 2013 +0200
+++ b/ext/rest.py Fri Jun 14 16:26:25 2013 +0200
@@ -36,6 +36,7 @@
from itertools import chain
from logging import getLogger
from os.path import join
+from urlparse import urlsplit
from docutils import statemachine, nodes, utils, io
from docutils.core import Publisher
@@ -128,6 +129,63 @@
set_classes(options)
return [nodes.raw('', content, format='html')], []
+def bookmark_role(role, rawtext, text, lineno, inliner, options={}, content=[]):
+ """:bookmark:`` or :bookmark:`:`
+
+ Example: :bookmark:`1234:table`
+
+ Replace the directive with the output of applying the view to the resultset
+ returned by the query stored in the bookmark. By default, the view is the one
+ stored in the bookmark, but it can be overridden by the directive as in the
+ example above.
+
+ "X eid %(userid)s" can be used in the RQL query stored in the Bookmark, for
+ this query will be executed with the argument {'userid': _cw.user.eid}.
+ """
+ _cw = inliner.document.settings.context._cw
+ text = text.strip()
+ try:
+ if ':' in text:
+ eid, vid = text.rsplit(u':', 1)
+ eid = int(eid)
+ else:
+ eid, vid = int(text), None
+ except ValueError:
+ msg = inliner.reporter.error(
+ 'EID number must be a positive number; "%s" is invalid.'
+ % text, line=lineno)
+ prb = inliner.problematic(rawtext, rawtext, msg)
+ return [prb], [msg]
+ try:
+ bookmark = _cw.entity_from_eid(eid)
+ except UnknownEid:
+ msg = inliner.reporter.error('Unknown EID %s.' % text, line=lineno)
+ prb = inliner.problematic(rawtext, rawtext, msg)
+ return [prb], [msg]
+ try:
+ params = dict(_cw.url_parse_qsl(urlsplit(bookmark.path).query))
+ rql = params['rql']
+ if vid is None:
+ vid = params.get('vid')
+ except (ValueError, KeyError), exc:
+ msg = inliner.reporter.error('Could not parse bookmark path %s [%s].'
+ % (bookmark.path, exc), line=lineno)
+ prb = inliner.problematic(rawtext, rawtext, msg)
+ return [prb], [msg]
+ try:
+ rset = _cw.execute(rql, {'userid': _cw.user.eid})
+ if rset:
+ if vid is None:
+ vid = vid_from_rset(_cw, rset, _cw.vreg.schema)
+ else:
+ vid = 'noresult'
+ view = _cw.vreg['views'].select(vid, _cw, rset=rset)
+ content = view.render()
+ except Exception, exc:
+ content = 'An error occured while interpreting directive bookmark: %r' % exc
+ set_classes(options)
+ return [nodes.raw('', content, format='html')], []
+
def winclude_directive(name, arguments, options, content, lineno,
content_offset, block_text, state, state_machine):
"""Include a reST file as part of the content of this reST file.
@@ -323,6 +381,7 @@
_INITIALIZED = True
register_canonical_role('eid', eid_reference_role)
register_canonical_role('rql', rql_role)
+ register_canonical_role('bookmark', bookmark_role)
directives.register_directive('winclude', winclude_directive)
if pygments_directive is not None:
directives.register_directive('sourcecode', pygments_directive)
diff -r 2cf127d4f5fd -r b4bcabf55e77 ext/test/unittest_rest.py
--- a/ext/test/unittest_rest.py Fri Jun 14 16:13:24 2013 +0200
+++ b/ext/test/unittest_rest.py Fri Jun 14 16:26:25 2013 +0200
@@ -75,5 +75,12 @@
out = rest_publish(context, ':rql:`Any X WHERE X is CWUser`')
self.assertEqual(out, u'
\n')
+ def test_bookmark_role(self):
+ context = self.context()
+ rset = self.execute('INSERT Bookmark X: X title "hello", X path "/view?rql=Any X WHERE X is CWUser"')
+ eid = rset[0][0]
+ out = rest_publish(context, ':bookmark:`%s`' % eid)
+ self.assertEqual(out, u'