# HG changeset patch # User Rémi Cardona # Date 1449681849 -3600 # Node ID 2fdf67ef33415d8484cd95829aac18256d94b18e # Parent 90c55e27aa87add791fce460c652926feff76f0f# Parent 2fe19ba68daa8b5d60ca2cb16e02940aedd4cae3 merge with 3.20.11 diff -r 2fe19ba68daa -r 2fdf67ef3341 .hgignore --- a/.hgignore Wed Dec 09 16:36:17 2015 +0100 +++ b/.hgignore Wed Dec 09 18:24:09 2015 +0100 @@ -1,6 +1,8 @@ \.svn ^build$ ^dist$ +\.egg-info$ +^.tox$ \.pyc$ \.pyo$ \.bak$ @@ -11,11 +13,16 @@ ^doc/book/en/apidoc$ \.old$ syntax: regexp -.*/data/database/.*\.sqlite -.*/data/database/.*\.config +.*/data.*/database/.*\.sqlite +.*/data.*/database/.*\.config .*/data/database/tmpdb.* .*/data/ldapdb/.* +.*/data/uicache/ +.*/data/cubes/.*/i18n/.*\.po ^doc/html/ ^doc/doctrees/ ^doc/book/en/devweb/js_api/ +^doc/_build +^doc/js_api/ data/pgdb/ +data.*/pgdb.* diff -r 2fe19ba68daa -r 2fdf67ef3341 .hgtags --- a/.hgtags Wed Dec 09 16:36:17 2015 +0100 +++ b/.hgtags Wed Dec 09 18:24:09 2015 +0100 @@ -508,3 +508,12 @@ c44930ac9579fe4d526b26892954e56021af18be 3.20.11 c44930ac9579fe4d526b26892954e56021af18be debian/3.20.11-1 c44930ac9579fe4d526b26892954e56021af18be centos/3.20.11-1 +887c6eef807781560adcd4ecd2dea9011f5a6681 3.21.0 +887c6eef807781560adcd4ecd2dea9011f5a6681 debian/3.21.0-1 +887c6eef807781560adcd4ecd2dea9011f5a6681 centos/3.21.0-1 +a8a0de0298a58306d63dbc998ad60c48bf18c80a 3.21.1 +a8a0de0298a58306d63dbc998ad60c48bf18c80a debian/3.21.1-1 +a8a0de0298a58306d63dbc998ad60c48bf18c80a centos/3.21.1-1 +a5428e1ab36491a8e6d66ce09d23b708b97e1337 3.21.2 +a5428e1ab36491a8e6d66ce09d23b708b97e1337 debian/3.21.2-1 +a5428e1ab36491a8e6d66ce09d23b708b97e1337 centos/3.21.2-1 diff -r 2fe19ba68daa -r 2fdf67ef3341 MANIFEST.in --- a/MANIFEST.in Wed Dec 09 16:36:17 2015 +0100 +++ b/MANIFEST.in Wed Dec 09 18:24:09 2015 +0100 @@ -6,9 +6,18 @@ include man/cubicweb-ctl.1 include doc/*.rst +include doc/Makefile recursive-include doc/book * recursive-include doc/tools *.py recursive-include doc/tutorials *.rst *.py +include doc/api/*.rst +recursive-include doc/_themes * +recursive-include doc/_static * +include doc/_templates/*.html +include doc/changes/*.rst +recursive-include doc/dev .txt *.rst +recursive-include doc/images *.png *.svg +include doc/conf.py recursive-include misc *.py *.png *.display @@ -25,18 +34,18 @@ recursive-include sobjects/test/data bootstrap_cubes *.py recursive-include hooks/test/data bootstrap_cubes *.py recursive-include server/test/data bootstrap_cubes *.py source* *.conf.in *.ldif -recursive-include devtools/test/data bootstrap_cubes *.py *.txt *.js +recursive-include devtools/test/data bootstrap_cubes *.py *.txt *.js *.po.ref recursive-include web/test/data bootstrap_cubes pouet.css *.py +recursive-include etwist/test/data *.py recursive-include web/test/jstests *.js *.html *.css *.json recursive-include web/test/windmill *.py -recursive-include skeleton *.py *.css *.js *.po compat *.in *.tmpl +recursive-include skeleton *.py *.css *.js *.po compat *.in *.tmpl rules prune doc/book/en/.static prune doc/book/fr/.static prune doc/html/_sources/ prune misc/cwfs -prune goa -prune doc/book/en/devweb/js_api +prune doc/js_api global-exclude *.pyc diff -r 2fe19ba68daa -r 2fdf67ef3341 README --- a/README Wed Dec 09 16:36:17 2015 +0100 +++ b/README Wed Dec 09 18:24:09 2015 +0100 @@ -14,7 +14,7 @@ Install ------- -More details at http://docs.cubicweb.org/admin/setup +More details at http://docs.cubicweb.org/book/admin/setup Getting started --------------- diff -r 2fe19ba68daa -r 2fdf67ef3341 __pkginfo__.py --- a/__pkginfo__.py Wed Dec 09 16:36:17 2015 +0100 +++ b/__pkginfo__.py Wed Dec 09 18:24:09 2015 +0100 @@ -22,7 +22,7 @@ modname = distname = "cubicweb" -numversion = (3, 20, 11) +numversion = (3, 21, 2) version = '.'.join(str(num) for num in numversion) description = "a repository of entities / relations for knowledge management" @@ -55,7 +55,6 @@ __recommends__ = { 'docutils': '>= 0.6', - 'Pyro': '>= 3.9.1, < 4.0.0', 'Pillow': '', # for captcha 'pycrypto': '', # for crypto extensions 'fyzz': '>= 0.1.0', # for sparql @@ -115,8 +114,6 @@ [join('share', 'cubicweb', 'cubes', 'shared', 'data'), [join(_data_dir, fname) for fname in listdir(_data_dir) if not isdir(join(_data_dir, fname))]], - [join('share', 'cubicweb', 'cubes', 'shared', 'data', 'timeline'), - [join(_data_dir, 'timeline', fname) for fname in listdir(join(_data_dir, 'timeline'))]], [join('share', 'cubicweb', 'cubes', 'shared', 'data', 'images'), [join(_data_dir, 'images', fname) for fname in listdir(join(_data_dir, 'images'))]], [join('share', 'cubicweb', 'cubes', 'shared', 'data', 'jquery-treeview'), diff -r 2fe19ba68daa -r 2fdf67ef3341 _exceptions.py --- a/_exceptions.py Wed Dec 09 16:36:17 2015 +0100 +++ b/_exceptions.py Wed Dec 09 18:24:09 2015 +0100 @@ -82,6 +82,8 @@ self.session = session assert 'rtypes' in kwargs or 'cstrname' in kwargs self.kwargs = kwargs + # fill cache while the session is open + self.rtypes @cachedproperty def rtypes(self): @@ -100,6 +102,12 @@ return None, self.rtypes +class ViolatedConstraint(RepositoryError): + def __init__(self, cnx, cstrname): + self.cnx = cnx + self.cstrname = cstrname + + # security exceptions ######################################################### class Unauthorized(SecurityError): diff -r 2fe19ba68daa -r 2fdf67ef3341 _gcdebug.py --- a/_gcdebug.py Wed Dec 09 16:36:17 2015 +0100 +++ b/_gcdebug.py Wed Dec 09 18:24:09 2015 +0100 @@ -19,6 +19,10 @@ import gc, types, weakref from cubicweb.schema import CubicWebRelationSchema, CubicWebEntitySchema +try: + from cubicweb.web.request import _NeedAuthAccessMock +except ImportError: + _NeedAuthAccessMock = None listiterator = type(iter([])) @@ -30,6 +34,8 @@ types.ModuleType, types.FunctionType, types.MethodType, types.MemberDescriptorType, types.GetSetDescriptorType, ) +if _NeedAuthAccessMock is not None: + IGNORE_CLASSES = IGNORE_CLASSES + (_NeedAuthAccessMock,) def _get_counted_class(obj, classes): for cls in classes: @@ -63,7 +69,8 @@ ocounters[key] = 1 if isinstance(obj, viewreferrersclasses): print ' ', obj, referrers(obj, showobjs, maxlevel) - return counters, ocounters, gc.garbage + garbage = [repr(obj) for obj in gc.garbage] + return counters, ocounters, garbage def referrers(obj, showobj=False, maxlevel=1): diff -r 2fe19ba68daa -r 2fdf67ef3341 appobject.py --- a/appobject.py Wed Dec 09 16:36:17 2015 +0100 +++ b/appobject.py Wed Dec 09 18:24:09 2015 +0100 @@ -16,7 +16,6 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . """ -.. _appobject: The `AppObject` class --------------------- @@ -27,7 +26,6 @@ We can find a certain number of attributes and methods defined in this class and common to all the application objects. -.. autoclass:: AppObject """ __docformat__ = "restructuredtext en" diff -r 2fe19ba68daa -r 2fdf67ef3341 bin/clone_deps.py --- a/bin/clone_deps.py Wed Dec 09 16:36:17 2015 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,117 +0,0 @@ -#!/usr/bin/python -import sys - -from subprocess import call as sbp_call, Popen, PIPE -from urllib import urlopen -import os -from os import path as osp, pardir, chdir - - -def find_mercurial(): - print "trying to find mercurial from the command line ..." - print '-' * 20 - tryhg = sbp_call(['hg', '--version']) - if tryhg: - print 'mercurial seems to be unavailable, please install it' - raise - print '-' * 20 - def hg_call(args): - return sbp_call(['hg'] + args) - - return hg_call - - -BASE_URL = 'http://www.logilab.org/hg/' - -to_clone = ['fyzz', 'yams', 'rql', - 'logilab/common', 'logilab/constraint', 'logilab/database', - 'logilab/devtools', 'logilab/mtconverter', - 'cubes/blog', 'cubes/calendar', 'cubes/card', 'cubes/comment', - 'cubes/datafeed', 'cubes/email', 'cubes/file', 'cubes/folder', - 'cubes/forgotpwd', 'cubes/keyword', 'cubes/link', 'cubes/localperms', - 'cubes/mailinglist', 'cubes/nosylist', 'cubes/person', - 'cubes/preview', 'cubes/registration', 'cubes/rememberme', - 'cubes/tag', 'cubes/vcsfile', 'cubes/zone'] - -# a couple of functions to be used to explore available -# repositories and cubes -def list_repos(repos_root): - assert repos_root.startswith('http://') - hgwebdir_repos = (repo.strip() - for repo in urlopen(repos_root + '?style=raw').readlines() - if repo.strip()) - prefix = osp.commonprefix(hgwebdir_repos) - return (repo[len(prefix):].strip('/') - for repo in hgwebdir_repos) - -def list_all_cubes(base_url=BASE_URL): - all_repos = list_repos(base_url) - #search for cubes - for repo in all_repos: - if repo.startswith('cubes'): - to_clone.append(repo) - -def get_latest_debian_tag(path): - proc = Popen(['hg', '-R', path, 'tags'], stdout=PIPE) - out, _err = proc.communicate() - for line in out.splitlines(): - if 'debian-version' in line: - return line.split()[0] - -def main(): - if len(sys.argv) == 1: - base_url = BASE_URL - elif len(sys.argv) == 2: - base_url = sys.argv[1] - else: - sys.stderr.write('usage %s [base_url]\n' % sys.argv[0]) - sys.exit(1) - hg_call = find_mercurial() - print len(to_clone), 'repositories will be cloned' - base_dir = osp.normpath(osp.join(osp.dirname(__file__), pardir, pardir)) - chdir(base_dir) - not_updated = [] - for repo in to_clone: - url = base_url + repo - if '/' not in repo: - target_path = repo - else: - assert repo.count('/') == 1, repo - directory, repo = repo.split('/') - if not osp.isdir(directory): - os.mkdir(directory) - open(osp.join(directory, '__init__.py'), 'w').close() - target_path = osp.join(directory, repo) - if osp.exists(target_path): - print target_path, 'seems already cloned. Skipping it.' - else: - hg_call(['clone', '-U', url, target_path]) - tag = get_latest_debian_tag(target_path) - if tag: - print 'updating to', tag - hg_call(['update', '-R', target_path, tag]) - else: - not_updated.append(target_path) - print """ -CubicWeb dependencies and standard set of cubes have been fetched and -update to the latest stable version. - -You should ensure your PYTHONPATH contains `%(basedir)s`. -You might want to read the environment configuration section of the documentation -at http://docs.cubicweb.org/admin/setup.html#environment-configuration - -You can find more cubes at http://www.cubicweb.org. -Clone them from `%(baseurl)scubes/` into the `%(basedir)s%(sep)scubes%(sep)s` directory. - -To get started you may read http://docs.cubicweb.org/tutorials/base/index.html. -""" % {'basedir': os.getcwd(), 'baseurl': base_url, 'sep': os.sep} - if not_updated: - sys.stderr.write('WARNING: The following repositories were not updated (no debian tag found):\n') - for path in not_updated: - sys.stderr.write('\t-%s\n' % path) - -if __name__ == '__main__': - main() - - - diff -r 2fe19ba68daa -r 2fdf67ef3341 cubicweb.spec --- a/cubicweb.spec Wed Dec 09 16:36:17 2015 +0100 +++ b/cubicweb.spec Wed Dec 09 18:24:09 2015 +0100 @@ -7,7 +7,7 @@ %endif Name: cubicweb -Version: 3.20.11 +Version: 3.21.2 Release: logilab.1%{?dist} Summary: CubicWeb is a semantic web application framework Source0: http://download.logilab.org/pub/cubicweb/cubicweb-%{version}.tar.gz diff -r 2fe19ba68daa -r 2fdf67ef3341 cwconfig.py --- a/cwconfig.py Wed Dec 09 16:36:17 2015 +0100 +++ b/cwconfig.py Wed Dec 09 18:24:09 2015 +0100 @@ -279,7 +279,7 @@ ('default-text-format', {'type' : 'choice', 'choices': ('text/plain', 'text/rest', 'text/html', 'text/markdown'), - 'default': 'text/html', # use fckeditor in the web ui + 'default': 'text/plain', 'help': _('default text format for rich text fields.'), 'group': 'ui', }), @@ -835,7 +835,7 @@ # set by upgrade command verbosity = 0 - + cmdline_options = None options = CubicWebNoAppConfiguration.options + ( ('log-file', {'type' : 'string', @@ -843,6 +843,13 @@ 'help': 'file where output logs should be written', 'group': 'main', 'level': 2, }), + ('statsd-endpoint', + {'type' : 'string', + 'default': '', + 'help': 'UDP address of the statsd endpoint; it must be formatted' + 'like :; disabled is unset.', + 'group': 'main', 'level': 2, + }), # email configuration ('smtp-host', {'type' : 'string', @@ -870,6 +877,18 @@ the repository', 'group': 'email', 'level': 1, }), + ('logstat-interval', + {'type' : 'int', + 'default': 0, + 'help': 'interval (in seconds) at which stats are dumped in the logstat file; set 0 to disable', + 'group': 'main', 'level': 2, + }), + ('logstat-file', + {'type' : 'string', + 'default': Method('default_stats_file'), + 'help': 'file where stats for the instance should be written', + 'group': 'main', 'level': 2, + }), ) @classmethod @@ -953,6 +972,13 @@ log_path = os.path.join(_INSTALL_PREFIX, 'var', 'log', 'cubicweb', '%s-%s.log') return log_path % (self.appid, self.name) + def default_stats_file(self): + """return default path to the stats file of the instance'server""" + logfile = self.default_log_file() + if logfile.endswith('.log'): + logfile = logfile[:-4] + return logfile + '.stats' + def default_pid_file(self): """return default path to the pid file of the instance'server""" if self.mode == 'system': @@ -1010,7 +1036,7 @@ # or site_cubicweb files self.load_file_configuration(self.main_config_file()) # configuration initialization hook - self.load_configuration() + self.load_configuration(**(self.cmdline_options or {})) def add_cubes(self, cubes): """add given cubes to the list of used cubes""" @@ -1077,9 +1103,9 @@ infos.append('cubicweb-%s' % str(self.cubicweb_version())) return md5(';'.join(infos)).hexdigest() - def load_configuration(self): + def load_configuration(self, **kw): """load instance's configuration files""" - super(CubicWebConfiguration, self).load_configuration() + super(CubicWebConfiguration, self).load_configuration(**kw) if self.apphome and not self.creating: # init gettext self._gettext_init() @@ -1102,6 +1128,17 @@ logconfig = join(self.apphome, 'logging.conf') if exists(logconfig): logging.config.fileConfig(logconfig) + # set the statsd address, if any + if self.get('statsd-endpoint'): + try: + address, port = self.get('statsd-endpoint').split(':') + port = int(port) + except: + self.error('statsd-endpoint: invalid address format ({}); ' + 'it should be "ip:port"'.format(self.get('statsd-endpoint'))) + else: + import statsd_logger + statsd_logger.setup('cubicweb.%s' % self.appid, (address, port)) def available_languages(self, *args): """return available translation for an instance, by looking for diff -r 2fe19ba68daa -r 2fdf67ef3341 cwctl.py --- a/cwctl.py Wed Dec 09 16:36:17 2015 +0100 +++ b/cwctl.py Wed Dec 09 18:24:09 2015 +0100 @@ -1,4 +1,4 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -25,7 +25,7 @@ # possible (for cubicweb-ctl reactivity, necessary for instance for usable bash # completion). So import locally in command helpers. import sys -from warnings import warn +from warnings import warn, filterwarnings from os import remove, listdir, system, pathsep from os.path import exists, join, isfile, isdir, dirname, abspath from urlparse import urlparse @@ -401,7 +401,7 @@ if 'type' in odict and odict.get('level') <= self.config.config_level) for section in sections: - if section not in ('main', 'email', 'pyro', 'web'): + if section not in ('main', 'email', 'web'): print '\n' + underline_title('%s options' % section) config.input_config(section, self.config.config_level) # write down configuration @@ -520,7 +520,12 @@ 'default': None, 'choices': ('debug', 'info', 'warning', 'error'), 'help': 'debug if -D is set, error otherwise', }), - ) + ('param', + {'short': 'p', 'type' : 'named', 'metavar' : 'key1:value1,key2:value2', + 'default': {}, + 'help': 'override configuration file option with .', + }), + ) def start_instance(self, appid): """start the instance's server""" @@ -534,6 +539,8 @@ "- '{ctl} pyramid {appid}' (requires the pyramid cube)\n") raise ExecutionError(msg.format(ctl='cubicweb-ctl', appid=appid)) config = cwcfg.config_for(appid, debugmode=self['debug']) + # override config file values with cmdline options + config.cmdline_options = self.config.param init_cmdline_log_threshold(config, self['loglevel']) if self['profile']: config.global_set_option('profile', self.config.profile) @@ -900,9 +907,7 @@ ('repo-uri', {'short': 'H', 'type' : 'string', 'metavar': '://<[host][:port]>', 'help': 'URI of the CubicWeb repository to connect to. URI can be \ -pyro://[host:port] the Pyro name server host; if the pyro nameserver is not set, \ -it will be detected by using a broadcast query, a ZMQ URL or \ -inmemory:// (default) use an in-memory repository. THIS OPTION IS DEPRECATED, \ +a ZMQ URL or inmemory:// (default) use an in-memory repository. THIS OPTION IS DEPRECATED, \ directly give URI as instance id instead', 'group': 'remote' }), @@ -953,7 +958,7 @@ if self.config.repo_uri: warn('[3.16] --repo-uri option is deprecated, directly give the URI as instance id', DeprecationWarning) - if urlparse(self.config.repo_uri).scheme in ('pyro', 'inmemory'): + if urlparse(self.config.repo_uri).scheme == 'inmemory': appuri = '%s/%s' % (self.config.repo_uri.rstrip('/'), appuri) from cubicweb.utils import parse_repo_uri @@ -1135,6 +1140,7 @@ import os sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0) sys.stderr = os.fdopen(sys.stderr.fileno(), 'w', 0) + filterwarnings('default', category=DeprecationWarning) cwcfg.load_cwctl_plugins() try: CWCTL.run(args) diff -r 2fe19ba68daa -r 2fdf67ef3341 cwvreg.py --- a/cwvreg.py Wed Dec 09 16:36:17 2015 +0100 +++ b/cwvreg.py Wed Dec 09 18:24:09 2015 +0100 @@ -15,179 +15,8 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -""".. RegistryStore: - -The `RegistryStore` -------------------- - -The `RegistryStore` can be seen as a two-level dictionary. It contains -all dynamically loaded objects (subclasses of :ref:`appobject`) to -build a |cubicweb| application. Basically: - -* the first level key returns a *registry*. This key corresponds to the - `__registry__` attribute of application object classes - -* the second level key returns a list of application objects which - share the same identifier. This key corresponds to the `__regid__` - attribute of application object classes. - -A *registry* holds a specific kind of application objects. There is -for instance a registry for entity classes, another for views, etc... - -The `RegistryStore` has two main responsibilities: - -- being the access point to all registries - -- handling the registration process at startup time, and during automatic - reloading in debug mode. - -.. _AppObjectRecording: - -Details of the recording process -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -.. index:: - vregistry: registration_callback - -On startup, |cubicweb| loads application objects defined in its library -and in cubes used by the instance. Application objects from the -library are loaded first, then those provided by cubes are loaded in -dependency order (e.g. if your cube depends on an other, objects from -the dependency will be loaded first). The layout of the modules or packages -in a cube is explained in :ref:`cubelayout`. - -For each module: - -* by default all objects are registered automatically - -* if some objects have to replace other objects, or have to be - included only if some condition is met, you'll have to define a - `registration_callback(vreg)` function in your module and explicitly - register **all objects** in this module, using the api defined - below. - -.. Note:: - Once the function `registration_callback(vreg)` is implemented in a module, - all the objects from this module have to be explicitly registered as it - disables the automatic objects registration. - - -API for objects registration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Here are the registration methods that you can use in the `registration_callback` -to register your objects to the `RegistryStore` instance given as argument (usually -named `vreg`): - -.. automethod:: cubicweb.cwvreg.CWRegistryStore.register_all -.. automethod:: cubicweb.cwvreg.CWRegistryStore.register_and_replace -.. automethod:: cubicweb.cwvreg.CWRegistryStore.register -.. automethod:: cubicweb.cwvreg.CWRegistryStore.unregister - -Examples: - -.. sourcecode:: python - - # web/views/basecomponents.py - def registration_callback(vreg): - # register everything in the module except SeeAlsoComponent - vreg.register_all(globals().itervalues(), __name__, (SeeAlsoVComponent,)) - # conditionally register SeeAlsoVComponent - if 'see_also' in vreg.schema: - vreg.register(SeeAlsoVComponent) - -In this example, we register all application object classes defined in the module -except `SeeAlsoVComponent`. This class is then registered only if the 'see_also' -relation type is defined in the instance'schema. - -.. sourcecode:: python - - # goa/appobjects/sessions.py - def registration_callback(vreg): - vreg.register(SessionsCleaner) - # replace AuthenticationManager by GAEAuthenticationManager - vreg.register_and_replace(GAEAuthenticationManager, AuthenticationManager) - # replace PersistentSessionManager by GAEPersistentSessionManager - vreg.register_and_replace(GAEPersistentSessionManager, PersistentSessionManager) - -In this example, we explicitly register classes one by one: - -* the `SessionCleaner` class -* the `GAEAuthenticationManager` to replace the `AuthenticationManager` -* the `GAEPersistentSessionManager` to replace the `PersistentSessionManager` - -If at some point we register a new appobject class in this module, it won't be -registered at all without modification to the `registration_callback` -implementation. The previous example will register it though, thanks to the call -to the `register_all` method. - - -.. _Selection: - -Runtime objects selection -~~~~~~~~~~~~~~~~~~~~~~~~~ - -Now that we have all application objects loaded, the question is : when -I want some specific object, for instance the primary view for a given -entity, how do I get the proper object ? This is what we call the -**selection mechanism**. - -As explained in the :ref:`Concepts` section: - -* each application object has a **selector**, defined by its - `__select__` class attribute - -* this selector is responsible to return a **score** for a given context - - - 0 score means the object doesn't apply to this context - - - else, the higher the score, the better the object suits the context - -* the object with the highest score is selected. - -.. Note:: - - When no single object has the highest score, an exception is raised in development - mode to let you know that the engine was not able to identify the view to - apply. This error is silenced in production mode and one of the objects with - the highest score is picked. - - In such cases you would need to review your design and make sure - your selectors or appobjects are properly defined. Such an error is - typically caused by either forgetting to change the __regid__ in a - derived class, or by having copy-pasted some code. - -For instance, if you are selecting the primary (`__regid__ = -'primary'`) view (`__registry__ = 'views'`) for a result set -containing a `Card` entity, two objects will probably be selectable: - -* the default primary view (`__select__ = is_instance('Any')`), meaning - that the object is selectable for any kind of entity type - -* the specific `Card` primary view (`__select__ = is_instance('Card')`, - meaning that the object is selectable for Card entities - -Other primary views specific to other entity types won't be selectable in this -case. Among selectable objects, the `is_instance('Card')` selector will return a higher -score since it's more specific, so the correct view will be selected as expected. - -.. _SelectionAPI: - -API for objects selections -~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Here is the selection API you'll get on every registry. Some of them, as the -'etypes' registry, containing entity classes, extend it. In those methods, -`*args, **kwargs` is what we call the **context**. Those arguments are given to -selectors that will inspect their content and return a score accordingly. - -.. automethod:: cubicweb.vregistry.Registry.select - -.. automethod:: cubicweb.vregistry.Registry.select_or_none - -.. automethod:: cubicweb.vregistry.Registry.possible_objects - -.. automethod:: cubicweb.vregistry.Registry.object_by_id +""" +Cubicweb registries """ __docformat__ = "restructuredtext en" @@ -229,6 +58,7 @@ sys.modules.pop('cubicweb.web.uicfg', None) sys.modules.pop('cubicweb.web.uihelper', None) + def require_appobject(obj): """return appobjects required by the given object by searching for `appobject_selectable` predicate @@ -241,11 +71,16 @@ class CWRegistry(Registry): def __init__(self, vreg): + """ + :param vreg: the :py:class:`CWRegistryStore` managing this registry. + """ super(CWRegistry, self).__init__(True) self.vreg = vreg @property def schema(self): + """The :py:class:`cubicweb.schema.CubicWebSchema` + """ return self.vreg.schema def poss_visible_objects(self, *args, **kwargs): @@ -269,7 +104,7 @@ def selected(self, winner, args, kwargs): """overriden to avoid the default 'instanciation' behaviour, ie - winner(*args, **kwargs) + `winner(*args, **kwargs)` """ return winner diff -r 2fe19ba68daa -r 2fdf67ef3341 dataimport.py --- a/dataimport.py Wed Dec 09 16:36:17 2015 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1173 +0,0 @@ -# -*- coding: utf-8 -*- -# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""This module provides tools to import tabular data. - - -Example of use (run this with `cubicweb-ctl shell instance import-script.py`): - -.. sourcecode:: python - - from cubicweb.dataimport import * - # define data generators - GENERATORS = [] - - USERS = [('Prenom', 'firstname', ()), - ('Nom', 'surname', ()), - ('Identifiant', 'login', ()), - ] - - def gen_users(ctl): - for row in ctl.iter_and_commit('utilisateurs'): - entity = mk_entity(row, USERS) - entity['upassword'] = 'motdepasse' - ctl.check('login', entity['login'], None) - entity = ctl.store.create_entity('CWUser', **entity) - email = ctl.store.create_entity('EmailAddress', address=row['email']) - ctl.store.relate(entity.eid, 'use_email', email.eid) - ctl.store.rql('SET U in_group G WHERE G name "users", U eid %(x)s', {'x':entity['eid']}) - - CHK = [('login', check_doubles, 'Utilisateurs Login', - 'Deux utilisateurs ne devraient pas avoir le même login.'), - ] - - GENERATORS.append( (gen_users, CHK) ) - - # create controller - ctl = CWImportController(RQLObjectStore(cnx)) - ctl.askerror = 1 - ctl.generators = GENERATORS - ctl.data['utilisateurs'] = lazytable(ucsvreader(open('users.csv'))) - # run - ctl.run() - -.. BUG file with one column are not parsable -.. TODO rollback() invocation is not possible yet -""" -__docformat__ = "restructuredtext en" - -import csv -import sys -import threading -import traceback -import warnings -import cPickle -import os.path as osp -import inspect -from base64 import b64encode -from collections import defaultdict -from copy import copy -from datetime import date, datetime, time -from time import asctime -from StringIO import StringIO - -from logilab.common import shellutils, attrdict -from logilab.common.date import strptime -from logilab.common.decorators import cached -from logilab.common.deprecation import deprecated - -from cubicweb import QueryError -from cubicweb.utils import make_uid -from cubicweb.schema import META_RTYPES, VIRTUAL_RTYPES -from cubicweb.server.edition import EditedEntity -from cubicweb.server.sqlutils import SQL_PREFIX -from cubicweb.server.utils import eschema_eid - - -def count_lines(stream_or_filename): - if isinstance(stream_or_filename, basestring): - f = open(stream_or_filename) - else: - f = stream_or_filename - f.seek(0) - for i, line in enumerate(f): - pass - f.seek(0) - return i+1 - -def ucsvreader_pb(stream_or_path, encoding='utf-8', delimiter=',', quotechar='"', - skipfirst=False, withpb=True, skip_empty=True, separator=None, - quote=None): - """same as :func:`ucsvreader` but a progress bar is displayed as we iter on rows""" - if separator is not None: - delimiter = separator - warnings.warn("[3.20] 'separator' kwarg is deprecated, use 'delimiter' instead") - if quote is not None: - quotechar = quote - warnings.warn("[3.20] 'quote' kwarg is deprecated, use 'quotechar' instead") - if isinstance(stream_or_path, basestring): - if not osp.exists(stream_or_path): - raise Exception("file doesn't exists: %s" % stream_or_path) - stream = open(stream_or_path) - else: - stream = stream_or_path - rowcount = count_lines(stream) - if skipfirst: - rowcount -= 1 - if withpb: - pb = shellutils.ProgressBar(rowcount, 50) - for urow in ucsvreader(stream, encoding, delimiter, quotechar, - skipfirst=skipfirst, skip_empty=skip_empty): - yield urow - if withpb: - pb.update() - print ' %s rows imported' % rowcount - -def ucsvreader(stream, encoding='utf-8', delimiter=',', quotechar='"', - skipfirst=False, ignore_errors=False, skip_empty=True, - separator=None, quote=None): - """A csv reader that accepts files with any encoding and outputs unicode - strings - - if skip_empty (the default), lines without any values specified (only - separators) will be skipped. This is useful for Excel exports which may be - full of such lines. - """ - if separator is not None: - delimiter = separator - warnings.warn("[3.20] 'separator' kwarg is deprecated, use 'delimiter' instead") - if quote is not None: - quotechar = quote - warnings.warn("[3.20] 'quote' kwarg is deprecated, use 'quotechar' instead") - it = iter(csv.reader(stream, delimiter=delimiter, quotechar=quotechar)) - if not ignore_errors: - if skipfirst: - it.next() - for row in it: - decoded = [item.decode(encoding) for item in row] - if not skip_empty or any(decoded): - yield decoded - else: - if skipfirst: - try: - row = it.next() - except csv.Error: - pass - # Safe version, that can cope with error in CSV file - while True: - try: - row = it.next() - # End of CSV, break - except StopIteration: - break - # Error in CSV, ignore line and continue - except csv.Error: - continue - decoded = [item.decode(encoding) for item in row] - if not skip_empty or any(decoded): - yield decoded - - -def callfunc_every(func, number, iterable): - """yield items of `iterable` one by one and call function `func` - every `number` iterations. Always call function `func` at the end. - """ - for idx, item in enumerate(iterable): - yield item - if not idx % number: - func() - func() - -def lazytable(reader): - """The first row is taken to be the header of the table and - used to output a dict for each row of data. - - >>> data = lazytable(ucsvreader(open(filename))) - """ - header = reader.next() - for row in reader: - yield dict(zip(header, row)) - -def lazydbtable(cu, table, headers, orderby=None): - """return an iterator on rows of a sql table. On each row, fetch columns - defined in headers and return values as a dictionary. - - >>> data = lazydbtable(cu, 'experimentation', ('id', 'nickname', 'gps')) - """ - sql = 'SELECT %s FROM %s' % (','.join(headers), table,) - if orderby: - sql += ' ORDER BY %s' % ','.join(orderby) - cu.execute(sql) - while True: - row = cu.fetchone() - if row is None: - break - yield dict(zip(headers, row)) - -def mk_entity(row, map): - """Return a dict made from sanitized mapped values. - - ValueError can be raised on unexpected values found in checkers - - >>> row = {'myname': u'dupont'} - >>> map = [('myname', u'name', (call_transform_method('title'),))] - >>> mk_entity(row, map) - {'name': u'Dupont'} - >>> row = {'myname': u'dupont', 'optname': u''} - >>> map = [('myname', u'name', (call_transform_method('title'),)), - ... ('optname', u'MARKER', (optional,))] - >>> mk_entity(row, map) - {'name': u'Dupont', 'optname': None} - """ - res = {} - assert isinstance(row, dict) - assert isinstance(map, list) - for src, dest, funcs in map: - try: - res[dest] = row[src] - except KeyError: - continue - try: - for func in funcs: - res[dest] = func(res[dest]) - if res[dest] is None: - break - except ValueError as err: - raise ValueError('error with %r field: %s' % (src, err)), None, sys.exc_info()[-1] - return res - -# user interactions ############################################################ - -def tell(msg): - print msg - -def confirm(question): - """A confirm function that asks for yes/no/abort and exits on abort.""" - answer = shellutils.ASK.ask(question, ('Y', 'n', 'abort'), 'Y') - if answer == 'abort': - sys.exit(1) - return answer == 'Y' - - -class catch_error(object): - """Helper for @contextmanager decorator.""" - - def __init__(self, ctl, key='unexpected error', msg=None): - self.ctl = ctl - self.key = key - self.msg = msg - - def __enter__(self): - return self - - def __exit__(self, type, value, traceback): - if type is not None: - if issubclass(type, (KeyboardInterrupt, SystemExit)): - return # re-raise - if self.ctl.catcherrors: - self.ctl.record_error(self.key, None, type, value, traceback) - return True # silent - - -# base sanitizing/coercing functions ########################################### - -def optional(value): - """checker to filter optional field - - If value is undefined (ex: empty string), return None that will - break the checkers validation chain - - General use is to add 'optional' check in first condition to avoid - ValueError by further checkers - - >>> MAPPER = [(u'value', 'value', (optional, int))] - >>> row = {'value': u'XXX'} - >>> mk_entity(row, MAPPER) - {'value': None} - >>> row = {'value': u'100'} - >>> mk_entity(row, MAPPER) - {'value': 100} - """ - if value: - return value - return None - -def required(value): - """raise ValueError if value is empty - - This check should be often found in last position in the chain. - """ - if value: - return value - raise ValueError("required") - -def todatetime(format='%d/%m/%Y'): - """return a transformation function to turn string input value into a - `datetime.datetime` instance, using given format. - - Follow it by `todate` or `totime` functions from `logilab.common.date` if - you want a `date`/`time` instance instead of `datetime`. - """ - def coerce(value): - return strptime(value, format) - return coerce - -def call_transform_method(methodname, *args, **kwargs): - """return value returned by calling the given method on input""" - def coerce(value): - return getattr(value, methodname)(*args, **kwargs) - return coerce - -def call_check_method(methodname, *args, **kwargs): - """check value returned by calling the given method on input is true, - else raise ValueError - """ - def check(value): - if getattr(value, methodname)(*args, **kwargs): - return value - raise ValueError('%s not verified on %r' % (methodname, value)) - return check - -# base integrity checking functions ############################################ - -def check_doubles(buckets): - """Extract the keys that have more than one item in their bucket.""" - return [(k, len(v)) for k, v in buckets.items() if len(v) > 1] - -def check_doubles_not_none(buckets): - """Extract the keys that have more than one item in their bucket.""" - return [(k, len(v)) for k, v in buckets.items() - if k is not None and len(v) > 1] - -# sql generator utility functions ############################################# - - -def _import_statements(sql_connect, statements, nb_threads=3, - dump_output_dir=None, - support_copy_from=True, encoding='utf-8'): - """ - Import a bunch of sql statements, using different threads. - """ - try: - chunksize = (len(statements) / nb_threads) + 1 - threads = [] - for i in xrange(nb_threads): - chunks = statements[i*chunksize:(i+1)*chunksize] - thread = threading.Thread(target=_execmany_thread, - args=(sql_connect, chunks, - dump_output_dir, - support_copy_from, - encoding)) - thread.start() - threads.append(thread) - for t in threads: - t.join() - except Exception: - print 'Error in import statements' - -def _execmany_thread_not_copy_from(cu, statement, data, table=None, - columns=None, encoding='utf-8'): - """ Execute thread without copy from - """ - cu.executemany(statement, data) - -def _execmany_thread_copy_from(cu, statement, data, table, - columns, encoding='utf-8'): - """ Execute thread with copy from - """ - buf = _create_copyfrom_buffer(data, columns, encoding=encoding) - if buf is None: - _execmany_thread_not_copy_from(cu, statement, data) - else: - if columns is None: - cu.copy_from(buf, table, null='NULL') - else: - cu.copy_from(buf, table, null='NULL', columns=columns) - -def _execmany_thread(sql_connect, statements, dump_output_dir=None, - support_copy_from=True, encoding='utf-8'): - """ - Execute sql statement. If 'INSERT INTO', try to use 'COPY FROM' command, - or fallback to execute_many. - """ - if support_copy_from: - execmany_func = _execmany_thread_copy_from - else: - execmany_func = _execmany_thread_not_copy_from - cnx = sql_connect() - cu = cnx.cursor() - try: - for statement, data in statements: - table = None - columns = None - try: - if not statement.startswith('INSERT INTO'): - cu.executemany(statement, data) - continue - table = statement.split()[2] - if isinstance(data[0], (tuple, list)): - columns = None - else: - columns = list(data[0]) - execmany_func(cu, statement, data, table, columns, encoding) - except Exception: - print 'unable to copy data into table %s' % table - # Error in import statement, save data in dump_output_dir - if dump_output_dir is not None: - pdata = {'data': data, 'statement': statement, - 'time': asctime(), 'columns': columns} - filename = make_uid() - try: - with open(osp.join(dump_output_dir, - '%s.pickle' % filename), 'w') as fobj: - fobj.write(cPickle.dumps(pdata)) - except IOError: - print 'ERROR while pickling in', dump_output_dir, filename+'.pickle' - pass - cnx.rollback() - raise - finally: - cnx.commit() - cu.close() - - -def _copyfrom_buffer_convert_None(value, **opts): - '''Convert None value to "NULL"''' - return 'NULL' - -def _copyfrom_buffer_convert_number(value, **opts): - '''Convert a number into its string representation''' - return str(value) - -def _copyfrom_buffer_convert_string(value, **opts): - '''Convert string value. - - Recognized keywords: - :encoding: resulting string encoding (default: utf-8) - :replace_sep: character used when input contains characters - that conflict with the column separator. - ''' - encoding = opts.get('encoding','utf-8') - replace_sep = opts.get('replace_sep', None) - # Remove separators used in string formatting - for _char in (u'\t', u'\r', u'\n'): - if _char in value: - # If a replace_sep is given, replace - # the separator - # (and thus avoid empty buffer) - if replace_sep is None: - raise ValueError('conflicting separator: ' - 'you must provide the replace_sep option') - value = value.replace(_char, replace_sep) - value = value.replace('\\', r'\\') - if isinstance(value, unicode): - value = value.encode(encoding) - return value - -def _copyfrom_buffer_convert_date(value, **opts): - '''Convert date into "YYYY-MM-DD"''' - # Do not use strftime, as it yields issue with date < 1900 - # (http://bugs.python.org/issue1777412) - return '%04d-%02d-%02d' % (value.year, value.month, value.day) - -def _copyfrom_buffer_convert_datetime(value, **opts): - '''Convert date into "YYYY-MM-DD HH:MM:SS.UUUUUU"''' - # Do not use strftime, as it yields issue with date < 1900 - # (http://bugs.python.org/issue1777412) - return '%s %s' % (_copyfrom_buffer_convert_date(value, **opts), - _copyfrom_buffer_convert_time(value, **opts)) - -def _copyfrom_buffer_convert_time(value, **opts): - '''Convert time into "HH:MM:SS.UUUUUU"''' - return '%02d:%02d:%02d.%06d' % (value.hour, value.minute, - value.second, value.microsecond) - -# (types, converter) list. -_COPYFROM_BUFFER_CONVERTERS = [ - (type(None), _copyfrom_buffer_convert_None), - ((long, int, float), _copyfrom_buffer_convert_number), - (basestring, _copyfrom_buffer_convert_string), - (datetime, _copyfrom_buffer_convert_datetime), - (date, _copyfrom_buffer_convert_date), - (time, _copyfrom_buffer_convert_time), -] - -def _create_copyfrom_buffer(data, columns=None, **convert_opts): - """ - Create a StringIO buffer for 'COPY FROM' command. - Deals with Unicode, Int, Float, Date... (see ``converters``) - - :data: a sequence/dict of tuples - :columns: list of columns to consider (default to all columns) - :converter_opts: keyword arguements given to converters - """ - # Create a list rather than directly create a StringIO - # to correctly write lines separated by '\n' in a single step - rows = [] - if columns is None: - if isinstance(data[0], (tuple, list)): - columns = range(len(data[0])) - elif isinstance(data[0], dict): - columns = data[0].keys() - else: - raise ValueError('Could not get columns: you must provide columns.') - for row in data: - # Iterate over the different columns and the different values - # and try to convert them to a correct datatype. - # If an error is raised, do not continue. - formatted_row = [] - for col in columns: - try: - value = row[col] - except KeyError: - warnings.warn(u"Column %s is not accessible in row %s" - % (col, row), RuntimeWarning) - # XXX 'value' set to None so that the import does not end in - # error. - # Instead, the extra keys are set to NULL from the - # database point of view. - value = None - for types, converter in _COPYFROM_BUFFER_CONVERTERS: - if isinstance(value, types): - value = converter(value, **convert_opts) - break - else: - raise ValueError("Unsupported value type %s" % type(value)) - # We push the value to the new formatted row - # if the value is not None and could be converted to a string. - formatted_row.append(value) - rows.append('\t'.join(formatted_row)) - return StringIO('\n'.join(rows)) - - -# object stores ################################################################# - -class ObjectStore(object): - """Store objects in memory for *faster* validation (development mode) - - But it will not enforce the constraints of the schema and hence will miss some problems - - >>> store = ObjectStore() - >>> user = store.create_entity('CWUser', login=u'johndoe') - >>> group = store.create_entity('CWUser', name=u'unknown') - >>> store.relate(user.eid, 'in_group', group.eid) - """ - def __init__(self): - self.items = [] - self.eids = {} - self.types = {} - self.relations = set() - self.indexes = {} - - def create_entity(self, etype, **data): - data = attrdict(data) - data['eid'] = eid = len(self.items) - self.items.append(data) - self.eids[eid] = data - self.types.setdefault(etype, []).append(eid) - return data - - def relate(self, eid_from, rtype, eid_to, **kwargs): - """Add new relation""" - relation = eid_from, rtype, eid_to - self.relations.add(relation) - return relation - - def commit(self): - """this commit method does nothing by default""" - return - - def flush(self): - """The method is provided so that all stores share a common API""" - pass - - @property - def nb_inserted_entities(self): - return len(self.eids) - @property - def nb_inserted_types(self): - return len(self.types) - @property - def nb_inserted_relations(self): - return len(self.relations) - -class RQLObjectStore(ObjectStore): - """ObjectStore that works with an actual RQL repository (production mode)""" - - def __init__(self, cnx, commit=None): - if commit is not None: - warnings.warn('[3.19] commit argument should not be specified ' - 'as the cnx object already provides it.', - DeprecationWarning, stacklevel=2) - super(RQLObjectStore, self).__init__() - self._cnx = cnx - self._commit = commit or cnx.commit - - def commit(self): - return self._commit() - - def rql(self, *args): - return self._cnx.execute(*args) - - @property - def session(self): - warnings.warn('[3.19] deprecated property.', DeprecationWarning, - stacklevel=2) - return self._cnx.repo._get_session(self._cnx.sessionid) - - def create_entity(self, *args, **kwargs): - entity = self._cnx.create_entity(*args, **kwargs) - self.eids[entity.eid] = entity - self.types.setdefault(args[0], []).append(entity.eid) - return entity - - def relate(self, eid_from, rtype, eid_to, **kwargs): - eid_from, rtype, eid_to = super(RQLObjectStore, self).relate( - eid_from, rtype, eid_to, **kwargs) - self.rql('SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % rtype, - {'x': int(eid_from), 'y': int(eid_to)}) - - @deprecated("[3.19] use cnx.find(*args, **kwargs).entities() instead") - def find_entities(self, *args, **kwargs): - return self._cnx.find(*args, **kwargs).entities() - - @deprecated("[3.19] use cnx.find(*args, **kwargs).one() instead") - def find_one_entity(self, *args, **kwargs): - return self._cnx.find(*args, **kwargs).one() - -# the import controller ######################################################## - -class CWImportController(object): - """Controller of the data import process. - - >>> ctl = CWImportController(store) - >>> ctl.generators = list_of_data_generators - >>> ctl.data = dict_of_data_tables - >>> ctl.run() - """ - - def __init__(self, store, askerror=0, catcherrors=None, tell=tell, - commitevery=50): - self.store = store - self.generators = None - self.data = {} - self.errors = None - self.askerror = askerror - if catcherrors is None: - catcherrors = askerror - self.catcherrors = catcherrors - self.commitevery = commitevery # set to None to do a single commit - self._tell = tell - - def check(self, type, key, value): - self._checks.setdefault(type, {}).setdefault(key, []).append(value) - - def check_map(self, entity, key, map, default): - try: - entity[key] = map[entity[key]] - except KeyError: - self.check(key, entity[key], None) - entity[key] = default - - def record_error(self, key, msg=None, type=None, value=None, tb=None): - tmp = StringIO() - if type is None: - traceback.print_exc(file=tmp) - else: - traceback.print_exception(type, value, tb, file=tmp) - # use a list to avoid counting a errors instead of one - errorlog = self.errors.setdefault(key, []) - if msg is None: - errorlog.append(tmp.getvalue().splitlines()) - else: - errorlog.append( (msg, tmp.getvalue().splitlines()) ) - - def run(self): - self.errors = {} - if self.commitevery is None: - self.tell('Will commit all or nothing.') - else: - self.tell('Will commit every %s iterations' % self.commitevery) - for func, checks in self.generators: - self._checks = {} - func_name = func.__name__ - self.tell("Run import function '%s'..." % func_name) - try: - func(self) - except Exception: - if self.catcherrors: - self.record_error(func_name, 'While calling %s' % func.__name__) - else: - self._print_stats() - raise - for key, func, title, help in checks: - buckets = self._checks.get(key) - if buckets: - err = func(buckets) - if err: - self.errors[title] = (help, err) - try: - txuuid = self.store.commit() - if txuuid is not None: - self.tell('Transaction commited (txuuid: %s)' % txuuid) - except QueryError as ex: - self.tell('Transaction aborted: %s' % ex) - self._print_stats() - if self.errors: - if self.askerror == 2 or (self.askerror and confirm('Display errors ?')): - from pprint import pformat - for errkey, error in self.errors.items(): - self.tell("\n%s (%s): %d\n" % (error[0], errkey, len(error[1]))) - self.tell(pformat(sorted(error[1]))) - - def _print_stats(self): - nberrors = sum(len(err) for err in self.errors.itervalues()) - self.tell('\nImport statistics: %i entities, %i types, %i relations and %i errors' - % (self.store.nb_inserted_entities, - self.store.nb_inserted_types, - self.store.nb_inserted_relations, - nberrors)) - - def get_data(self, key): - return self.data.get(key) - - def index(self, name, key, value, unique=False): - """create a new index - - If unique is set to True, only first occurence will be kept not the following ones - """ - if unique: - try: - if value in self.store.indexes[name][key]: - return - except KeyError: - # we're sure that one is the first occurence; so continue... - pass - self.store.indexes.setdefault(name, {}).setdefault(key, []).append(value) - - def tell(self, msg): - self._tell(msg) - - def iter_and_commit(self, datakey): - """iter rows, triggering commit every self.commitevery iterations""" - if self.commitevery is None: - return self.get_data(datakey) - else: - return callfunc_every(self.store.commit, - self.commitevery, - self.get_data(datakey)) - - -class NoHookRQLObjectStore(RQLObjectStore): - """ObjectStore that works with an actual RQL repository (production mode)""" - - def __init__(self, cnx, metagen=None, baseurl=None): - super(NoHookRQLObjectStore, self).__init__(cnx) - self.source = cnx.repo.system_source - self.rschema = cnx.repo.schema.rschema - self.add_relation = self.source.add_relation - if metagen is None: - metagen = MetaGenerator(cnx, baseurl) - self.metagen = metagen - self._nb_inserted_entities = 0 - self._nb_inserted_types = 0 - self._nb_inserted_relations = 0 - # deactivate security - cnx.read_security = False - cnx.write_security = False - - def create_entity(self, etype, **kwargs): - for k, v in kwargs.iteritems(): - kwargs[k] = getattr(v, 'eid', v) - entity, rels = self.metagen.base_etype_dicts(etype) - # make a copy to keep cached entity pristine - entity = copy(entity) - entity.cw_edited = copy(entity.cw_edited) - entity.cw_clear_relation_cache() - entity.cw_edited.update(kwargs, skipsec=False) - entity_source, extid = self.metagen.init_entity(entity) - cnx = self._cnx - self.source.add_entity(cnx, entity) - self.source.add_info(cnx, entity, entity_source, extid) - kwargs = dict() - if inspect.getargspec(self.add_relation).keywords: - kwargs['subjtype'] = entity.cw_etype - for rtype, targeteids in rels.iteritems(): - # targeteids may be a single eid or a list of eids - inlined = self.rschema(rtype).inlined - try: - for targeteid in targeteids: - self.add_relation(cnx, entity.eid, rtype, targeteid, - inlined, **kwargs) - except TypeError: - self.add_relation(cnx, entity.eid, rtype, targeteids, - inlined, **kwargs) - self._nb_inserted_entities += 1 - return entity - - def relate(self, eid_from, rtype, eid_to, **kwargs): - assert not rtype.startswith('reverse_') - self.add_relation(self._cnx, eid_from, rtype, eid_to, - self.rschema(rtype).inlined) - if self.rschema(rtype).symmetric: - self.add_relation(self._cnx, eid_to, rtype, eid_from, - self.rschema(rtype).inlined) - self._nb_inserted_relations += 1 - - @property - def nb_inserted_entities(self): - return self._nb_inserted_entities - @property - def nb_inserted_types(self): - return self._nb_inserted_types - @property - def nb_inserted_relations(self): - return self._nb_inserted_relations - - -class MetaGenerator(object): - META_RELATIONS = (META_RTYPES - - VIRTUAL_RTYPES - - set(('eid', 'cwuri', - 'is', 'is_instance_of', 'cw_source'))) - - def __init__(self, cnx, baseurl=None, source=None): - self._cnx = cnx - if baseurl is None: - config = cnx.vreg.config - baseurl = config['base-url'] or config.default_base_url() - if not baseurl[-1] == '/': - baseurl += '/' - self.baseurl = baseurl - if source is None: - source = cnx.repo.system_source - self.source = source - self.create_eid = cnx.repo.system_source.create_eid - self.time = datetime.now() - # attributes/relations shared by all entities of the same type - self.etype_attrs = [] - self.etype_rels = [] - # attributes/relations specific to each entity - self.entity_attrs = ['cwuri'] - #self.entity_rels = [] XXX not handled (YAGNI?) - schema = cnx.vreg.schema - rschema = schema.rschema - for rtype in self.META_RELATIONS: - # skip owned_by / created_by if user is the internal manager - if cnx.user.eid == -1 and rtype in ('owned_by', 'created_by'): - continue - if rschema(rtype).final: - self.etype_attrs.append(rtype) - else: - self.etype_rels.append(rtype) - - @cached - def base_etype_dicts(self, etype): - entity = self._cnx.vreg['etypes'].etype_class(etype)(self._cnx) - # entity are "surface" copied, avoid shared dict between copies - del entity.cw_extra_kwargs - entity.cw_edited = EditedEntity(entity) - for attr in self.etype_attrs: - genfunc = self.generate(attr) - if genfunc: - entity.cw_edited.edited_attribute(attr, genfunc(entity)) - rels = {} - for rel in self.etype_rels: - genfunc = self.generate(rel) - if genfunc: - rels[rel] = genfunc(entity) - return entity, rels - - def init_entity(self, entity): - entity.eid = self.create_eid(self._cnx) - extid = entity.cw_edited.get('cwuri') - for attr in self.entity_attrs: - if attr in entity.cw_edited: - # already set, skip this attribute - continue - genfunc = self.generate(attr) - if genfunc: - entity.cw_edited.edited_attribute(attr, genfunc(entity)) - if isinstance(extid, unicode): - extid = extid.encode('utf-8') - return self.source, extid - - def generate(self, rtype): - return getattr(self, 'gen_%s' % rtype, None) - - def gen_cwuri(self, entity): - assert self.baseurl, 'baseurl is None while generating cwuri' - return u'%s%s' % (self.baseurl, entity.eid) - - def gen_creation_date(self, entity): - return self.time - - def gen_modification_date(self, entity): - return self.time - - def gen_created_by(self, entity): - return self._cnx.user.eid - - def gen_owned_by(self, entity): - return self._cnx.user.eid - - -########################################################################### -## SQL object store ####################################################### -########################################################################### -class SQLGenObjectStore(NoHookRQLObjectStore): - """Controller of the data import process. This version is based - on direct insertions throught SQL command (COPY FROM or execute many). - - >>> store = SQLGenObjectStore(cnx) - >>> store.create_entity('Person', ...) - >>> store.flush() - """ - - def __init__(self, cnx, dump_output_dir=None, nb_threads_statement=3): - """ - Initialize a SQLGenObjectStore. - - Parameters: - - - cnx: connection on the cubicweb instance - - dump_output_dir: a directory to dump failed statements - for easier recovery. Default is None (no dump). - - nb_threads_statement: number of threads used - for SQL insertion (default is 3). - """ - super(SQLGenObjectStore, self).__init__(cnx) - ### hijack default source - self.source = SQLGenSourceWrapper( - self.source, cnx.vreg.schema, - dump_output_dir=dump_output_dir, - nb_threads_statement=nb_threads_statement) - ### XXX This is done in super().__init__(), but should be - ### redone here to link to the correct source - self.add_relation = self.source.add_relation - self.indexes_etypes = {} - - def flush(self): - """Flush data to the database""" - self.source.flush() - - def relate(self, subj_eid, rtype, obj_eid, **kwargs): - if subj_eid is None or obj_eid is None: - return - # XXX Could subjtype be inferred ? - self.source.add_relation(self._cnx, subj_eid, rtype, obj_eid, - self.rschema(rtype).inlined, **kwargs) - if self.rschema(rtype).symmetric: - self.source.add_relation(self._cnx, obj_eid, rtype, subj_eid, - self.rschema(rtype).inlined, **kwargs) - - def drop_indexes(self, etype): - """Drop indexes for a given entity type""" - if etype not in self.indexes_etypes: - cu = self._cnx.cnxset.cu - def index_to_attr(index): - """turn an index name to (database) attribute name""" - return index.replace(etype.lower(), '').replace('idx', '').strip('_') - indices = [(index, index_to_attr(index)) - for index in self.source.dbhelper.list_indices(cu, etype) - # Do not consider 'cw_etype_pkey' index - if not index.endswith('key')] - self.indexes_etypes[etype] = indices - for index, attr in self.indexes_etypes[etype]: - self._cnx.system_sql('DROP INDEX %s' % index) - - def create_indexes(self, etype): - """Recreate indexes for a given entity type""" - for index, attr in self.indexes_etypes.get(etype, []): - sql = 'CREATE INDEX %s ON cw_%s(%s)' % (index, etype, attr) - self._cnx.system_sql(sql) - - -########################################################################### -## SQL Source ############################################################# -########################################################################### - -class SQLGenSourceWrapper(object): - - def __init__(self, system_source, schema, - dump_output_dir=None, nb_threads_statement=3): - self.system_source = system_source - self._sql = threading.local() - # Explicitely backport attributes from system source - self._storage_handler = self.system_source._storage_handler - self.preprocess_entity = self.system_source.preprocess_entity - self.sqlgen = self.system_source.sqlgen - self.uri = self.system_source.uri - self.eid = self.system_source.eid - # Directory to write temporary files - self.dump_output_dir = dump_output_dir - # Allow to execute code with SQLite backend that does - # not support (yet...) copy_from - # XXX Should be dealt with in logilab.database - spcfrom = system_source.dbhelper.dbapi_module.support_copy_from - self.support_copy_from = spcfrom - self.dbencoding = system_source.dbhelper.dbencoding - self.nb_threads_statement = nb_threads_statement - # initialize thread-local data for main thread - self.init_thread_locals() - self._inlined_rtypes_cache = {} - self._fill_inlined_rtypes_cache(schema) - self.schema = schema - self.do_fti = False - - def _fill_inlined_rtypes_cache(self, schema): - cache = self._inlined_rtypes_cache - for eschema in schema.entities(): - for rschema in eschema.ordered_relations(): - if rschema.inlined: - cache[eschema.type] = SQL_PREFIX + rschema.type - - def init_thread_locals(self): - """initializes thread-local data""" - self._sql.entities = defaultdict(list) - self._sql.relations = {} - self._sql.inlined_relations = {} - # keep track, for each eid of the corresponding data dict - self._sql.eid_insertdicts = {} - - def flush(self): - print 'starting flush' - _entities_sql = self._sql.entities - _relations_sql = self._sql.relations - _inlined_relations_sql = self._sql.inlined_relations - _insertdicts = self._sql.eid_insertdicts - try: - # try, for each inlined_relation, to find if we're also creating - # the host entity (i.e. the subject of the relation). - # In that case, simply update the insert dict and remove - # the need to make the - # UPDATE statement - for statement, datalist in _inlined_relations_sql.iteritems(): - new_datalist = [] - # for a given inlined relation, - # browse each couple to be inserted - for data in datalist: - keys = list(data) - # For inlined relations, it exists only two case: - # (rtype, cw_eid) or (cw_eid, rtype) - if keys[0] == 'cw_eid': - rtype = keys[1] - else: - rtype = keys[0] - updated_eid = data['cw_eid'] - if updated_eid in _insertdicts: - _insertdicts[updated_eid][rtype] = data[rtype] - else: - # could not find corresponding insert dict, keep the - # UPDATE query - new_datalist.append(data) - _inlined_relations_sql[statement] = new_datalist - _import_statements(self.system_source.get_connection, - _entities_sql.items() - + _relations_sql.items() - + _inlined_relations_sql.items(), - dump_output_dir=self.dump_output_dir, - nb_threads=self.nb_threads_statement, - support_copy_from=self.support_copy_from, - encoding=self.dbencoding) - finally: - _entities_sql.clear() - _relations_sql.clear() - _insertdicts.clear() - _inlined_relations_sql.clear() - - def add_relation(self, cnx, subject, rtype, object, - inlined=False, **kwargs): - if inlined: - _sql = self._sql.inlined_relations - data = {'cw_eid': subject, SQL_PREFIX + rtype: object} - subjtype = kwargs.get('subjtype') - if subjtype is None: - # Try to infer it - targets = [t.type for t in - self.schema.rschema(rtype).subjects()] - if len(targets) == 1: - subjtype = targets[0] - else: - raise ValueError('You should give the subject etype for ' - 'inlined relation %s' - ', as it cannot be inferred: ' - 'this type is given as keyword argument ' - '``subjtype``'% rtype) - statement = self.sqlgen.update(SQL_PREFIX + subjtype, - data, ['cw_eid']) - else: - _sql = self._sql.relations - data = {'eid_from': subject, 'eid_to': object} - statement = self.sqlgen.insert('%s_relation' % rtype, data) - if statement in _sql: - _sql[statement].append(data) - else: - _sql[statement] = [data] - - def add_entity(self, cnx, entity): - with self._storage_handler(entity, 'added'): - attrs = self.preprocess_entity(entity) - rtypes = self._inlined_rtypes_cache.get(entity.cw_etype, ()) - if isinstance(rtypes, str): - rtypes = (rtypes,) - for rtype in rtypes: - if rtype not in attrs: - attrs[rtype] = None - sql = self.sqlgen.insert(SQL_PREFIX + entity.cw_etype, attrs) - self._sql.eid_insertdicts[entity.eid] = attrs - self._append_to_entities(sql, attrs) - - def _append_to_entities(self, sql, attrs): - self._sql.entities[sql].append(attrs) - - def _handle_insert_entity_sql(self, cnx, sql, attrs): - # We have to overwrite the source given in parameters - # as here, we directly use the system source - attrs['asource'] = self.system_source.uri - self._append_to_entities(sql, attrs) - - def _handle_is_relation_sql(self, cnx, sql, attrs): - self._append_to_entities(sql, attrs) - - def _handle_is_instance_of_sql(self, cnx, sql, attrs): - self._append_to_entities(sql, attrs) - - def _handle_source_relation_sql(self, cnx, sql, attrs): - self._append_to_entities(sql, attrs) - - # add_info is _copypasted_ from the one in NativeSQLSource. We want it - # there because it will use the _handlers of the SQLGenSourceWrapper, which - # are not like the ones in the native source. - def add_info(self, cnx, entity, source, extid): - """add type and source info for an eid into the system table""" - # begin by inserting eid/type/source/extid into the entities table - if extid is not None: - assert isinstance(extid, str) - extid = b64encode(extid) - attrs = {'type': entity.cw_etype, 'eid': entity.eid, 'extid': extid, - 'asource': source.uri} - self._handle_insert_entity_sql(cnx, self.sqlgen.insert('entities', attrs), attrs) - # insert core relations: is, is_instance_of and cw_source - try: - self._handle_is_relation_sql(cnx, 'INSERT INTO is_relation(eid_from,eid_to) VALUES (%s,%s)', - (entity.eid, eschema_eid(cnx, entity.e_schema))) - except IndexError: - # during schema serialization, skip - pass - else: - for eschema in entity.e_schema.ancestors() + [entity.e_schema]: - self._handle_is_relation_sql(cnx, - 'INSERT INTO is_instance_of_relation(eid_from,eid_to) VALUES (%s,%s)', - (entity.eid, eschema_eid(cnx, eschema))) - if 'CWSource' in self.schema and source.eid is not None: # else, cw < 3.10 - self._handle_is_relation_sql(cnx, 'INSERT INTO cw_source_relation(eid_from,eid_to) VALUES (%s,%s)', - (entity.eid, source.eid)) - # now we can update the full text index - if self.do_fti and self.need_fti_indexation(entity.cw_etype): - self.index_entity(cnx, entity=entity) diff -r 2fe19ba68daa -r 2fdf67ef3341 dataimport/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dataimport/__init__.py Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,35 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Package containing various utilities to import data into cubicweb.""" + + +def callfunc_every(func, number, iterable): + """yield items of `iterable` one by one and call function `func` + every `number` iterations. Always call function `func` at the end. + """ + for idx, item in enumerate(iterable): + yield item + if not idx % number: + func() + func() + +# import for backward compat +from cubicweb.dataimport.stores import * +from cubicweb.dataimport.pgstore import * +from cubicweb.dataimport.csv import * +from cubicweb.dataimport.deprecated import * diff -r 2fe19ba68daa -r 2fdf67ef3341 dataimport/csv.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dataimport/csv.py Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,113 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Functions to help importing CSV data""" + +from __future__ import absolute_import + +import csv as csvmod +import warnings +import os.path as osp + +from logilab.common import shellutils + + +def count_lines(stream_or_filename): + if isinstance(stream_or_filename, basestring): + f = open(stream_or_filename) + else: + f = stream_or_filename + f.seek(0) + for i, line in enumerate(f): + pass + f.seek(0) + return i+1 + + +def ucsvreader_pb(stream_or_path, encoding='utf-8', delimiter=',', quotechar='"', + skipfirst=False, withpb=True, skip_empty=True, separator=None, + quote=None): + """same as :func:`ucsvreader` but a progress bar is displayed as we iter on rows""" + if separator is not None: + delimiter = separator + warnings.warn("[3.20] 'separator' kwarg is deprecated, use 'delimiter' instead") + if quote is not None: + quotechar = quote + warnings.warn("[3.20] 'quote' kwarg is deprecated, use 'quotechar' instead") + if isinstance(stream_or_path, basestring): + if not osp.exists(stream_or_path): + raise Exception("file doesn't exists: %s" % stream_or_path) + stream = open(stream_or_path) + else: + stream = stream_or_path + rowcount = count_lines(stream) + if skipfirst: + rowcount -= 1 + if withpb: + pb = shellutils.ProgressBar(rowcount, 50) + for urow in ucsvreader(stream, encoding, delimiter, quotechar, + skipfirst=skipfirst, skip_empty=skip_empty): + yield urow + if withpb: + pb.update() + print ' %s rows imported' % rowcount + + +def ucsvreader(stream, encoding='utf-8', delimiter=',', quotechar='"', + skipfirst=False, ignore_errors=False, skip_empty=True, + separator=None, quote=None): + """A csv reader that accepts files with any encoding and outputs unicode + strings + + if skip_empty (the default), lines without any values specified (only + separators) will be skipped. This is useful for Excel exports which may be + full of such lines. + """ + if separator is not None: + delimiter = separator + warnings.warn("[3.20] 'separator' kwarg is deprecated, use 'delimiter' instead") + if quote is not None: + quotechar = quote + warnings.warn("[3.20] 'quote' kwarg is deprecated, use 'quotechar' instead") + it = iter(csvmod.reader(stream, delimiter=delimiter, quotechar=quotechar)) + if not ignore_errors: + if skipfirst: + it.next() + for row in it: + decoded = [item.decode(encoding) for item in row] + if not skip_empty or any(decoded): + yield decoded + else: + if skipfirst: + try: + row = it.next() + except csvmod.Error: + pass + # Safe version, that can cope with error in CSV file + while True: + try: + row = it.next() + # End of CSV, break + except StopIteration: + break + # Error in CSV, ignore line and continue + except csvmod.Error: + continue + decoded = [item.decode(encoding) for item in row] + if not skip_empty or any(decoded): + yield decoded + diff -r 2fe19ba68daa -r 2fdf67ef3341 dataimport/deprecated.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dataimport/deprecated.py Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,460 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Old and deprecated dataimport API that provides tools to import tabular data. + + +Example of use (run this with `cubicweb-ctl shell instance import-script.py`): + +.. sourcecode:: python + + from cubicweb.dataimport import * + # define data generators + GENERATORS = [] + + USERS = [('Prenom', 'firstname', ()), + ('Nom', 'surname', ()), + ('Identifiant', 'login', ()), + ] + + def gen_users(ctl): + for row in ctl.iter_and_commit('utilisateurs'): + entity = mk_entity(row, USERS) + entity['upassword'] = 'motdepasse' + ctl.check('login', entity['login'], None) + entity = ctl.store.prepare_insert_entity('CWUser', **entity) + email = ctl.store.prepare_insert_entity('EmailAddress', address=row['email']) + ctl.store.prepare_insert_relation(entity, 'use_email', email) + ctl.store.rql('SET U in_group G WHERE G name "users", U eid %(x)s', {'x': entity}) + + CHK = [('login', check_doubles, 'Utilisateurs Login', + 'Deux utilisateurs ne devraient pas avoir le meme login.'), + ] + + GENERATORS.append( (gen_users, CHK) ) + + # create controller + ctl = CWImportController(RQLObjectStore(cnx)) + ctl.askerror = 1 + ctl.generators = GENERATORS + ctl.data['utilisateurs'] = lazytable(ucsvreader(open('users.csv'))) + # run + ctl.run() + +.. BUG file with one column are not parsable +.. TODO rollback() invocation is not possible yet +""" + +import sys +import traceback +from StringIO import StringIO + +from logilab.common import attrdict, shellutils +from logilab.common.date import strptime +from logilab.common.deprecation import deprecated, class_deprecated + +from cubicweb import QueryError +from cubicweb.dataimport import callfunc_every + + +@deprecated('[3.21] deprecated') +def lazytable(reader): + """The first row is taken to be the header of the table and + used to output a dict for each row of data. + + >>> data = lazytable(ucsvreader(open(filename))) + """ + header = reader.next() + for row in reader: + yield dict(zip(header, row)) + + +@deprecated('[3.21] deprecated') +def lazydbtable(cu, table, headers, orderby=None): + """return an iterator on rows of a sql table. On each row, fetch columns + defined in headers and return values as a dictionary. + + >>> data = lazydbtable(cu, 'experimentation', ('id', 'nickname', 'gps')) + """ + sql = 'SELECT %s FROM %s' % (','.join(headers), table,) + if orderby: + sql += ' ORDER BY %s' % ','.join(orderby) + cu.execute(sql) + while True: + row = cu.fetchone() + if row is None: + break + yield dict(zip(headers, row)) + + +@deprecated('[3.21] deprecated') +def tell(msg): + print msg + + +@deprecated('[3.21] deprecated') +def confirm(question): + """A confirm function that asks for yes/no/abort and exits on abort.""" + answer = shellutils.ASK.ask(question, ('Y', 'n', 'abort'), 'Y') + if answer == 'abort': + sys.exit(1) + return answer == 'Y' + + +class catch_error(object): + """Helper for @contextmanager decorator.""" + __metaclass__ = class_deprecated + __deprecation_warning__ = '[3.21] deprecated' + + def __init__(self, ctl, key='unexpected error', msg=None): + self.ctl = ctl + self.key = key + self.msg = msg + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + if type is not None: + if issubclass(type, (KeyboardInterrupt, SystemExit)): + return # re-raise + if self.ctl.catcherrors: + self.ctl.record_error(self.key, None, type, value, traceback) + return True # silent + +@deprecated('[3.21] deprecated') +def mk_entity(row, map): + """Return a dict made from sanitized mapped values. + + ValueError can be raised on unexpected values found in checkers + + >>> row = {'myname': u'dupont'} + >>> map = [('myname', u'name', (call_transform_method('title'),))] + >>> mk_entity(row, map) + {'name': u'Dupont'} + >>> row = {'myname': u'dupont', 'optname': u''} + >>> map = [('myname', u'name', (call_transform_method('title'),)), + ... ('optname', u'MARKER', (optional,))] + >>> mk_entity(row, map) + {'name': u'Dupont', 'optname': None} + """ + res = {} + assert isinstance(row, dict) + assert isinstance(map, list) + for src, dest, funcs in map: + try: + res[dest] = row[src] + except KeyError: + continue + try: + for func in funcs: + res[dest] = func(res[dest]) + if res[dest] is None: + break + except ValueError as err: + raise ValueError('error with %r field: %s' % (src, err)), None, sys.exc_info()[-1] + return res + + +# base sanitizing/coercing functions ########################################### + +@deprecated('[3.21] deprecated') +def optional(value): + """checker to filter optional field + + If value is undefined (ex: empty string), return None that will + break the checkers validation chain + + General use is to add 'optional' check in first condition to avoid + ValueError by further checkers + + >>> MAPPER = [(u'value', 'value', (optional, int))] + >>> row = {'value': u'XXX'} + >>> mk_entity(row, MAPPER) + {'value': None} + >>> row = {'value': u'100'} + >>> mk_entity(row, MAPPER) + {'value': 100} + """ + if value: + return value + return None + + +@deprecated('[3.21] deprecated') +def required(value): + """raise ValueError if value is empty + + This check should be often found in last position in the chain. + """ + if value: + return value + raise ValueError("required") + + +@deprecated('[3.21] deprecated') +def todatetime(format='%d/%m/%Y'): + """return a transformation function to turn string input value into a + `datetime.datetime` instance, using given format. + + Follow it by `todate` or `totime` functions from `logilab.common.date` if + you want a `date`/`time` instance instead of `datetime`. + """ + def coerce(value): + return strptime(value, format) + return coerce + + +@deprecated('[3.21] deprecated') +def call_transform_method(methodname, *args, **kwargs): + """return value returned by calling the given method on input""" + def coerce(value): + return getattr(value, methodname)(*args, **kwargs) + return coerce + + +@deprecated('[3.21] deprecated') +def call_check_method(methodname, *args, **kwargs): + """check value returned by calling the given method on input is true, + else raise ValueError + """ + def check(value): + if getattr(value, methodname)(*args, **kwargs): + return value + raise ValueError('%s not verified on %r' % (methodname, value)) + return check + + +# base integrity checking functions ############################################ + +@deprecated('[3.21] deprecated') +def check_doubles(buckets): + """Extract the keys that have more than one item in their bucket.""" + return [(k, len(v)) for k, v in buckets.items() if len(v) > 1] + + +@deprecated('[3.21] deprecated') +def check_doubles_not_none(buckets): + """Extract the keys that have more than one item in their bucket.""" + return [(k, len(v)) for k, v in buckets.items() + if k is not None and len(v) > 1] + + +class ObjectStore(object): + """Store objects in memory for *faster* validation (development mode) + + But it will not enforce the constraints of the schema and hence will miss some problems + + >>> store = ObjectStore() + >>> user = store.prepare_insert_entity('CWUser', login=u'johndoe') + >>> group = store.prepare_insert_entity('CWUser', name=u'unknown') + >>> store.prepare_insert_relation(user, 'in_group', group) + """ + __metaclass__ = class_deprecated + __deprecation_warning__ = '[3.21] use the new importer API' + + def __init__(self): + self.items = [] + self.eids = {} + self.types = {} + self.relations = set() + self.indexes = {} + + def prepare_insert_entity(self, etype, **data): + """Given an entity type, attributes and inlined relations, return an eid for the entity that + would be inserted with a real store. + """ + data = attrdict(data) + data['eid'] = eid = len(self.items) + self.items.append(data) + self.eids[eid] = data + self.types.setdefault(etype, []).append(eid) + return eid + + def prepare_update_entity(self, etype, eid, **kwargs): + """Given an entity type and eid, updates the corresponding fake entity with specified + attributes and inlined relations. + """ + assert eid in self.types[etype], 'Trying to update with wrong type {}'.format(etype) + data = self.eids[eid] + data.update(kwargs) + + def prepare_insert_relation(self, eid_from, rtype, eid_to, **kwargs): + """Store into the `relations` attribute that a relation ``rtype`` exists between entities + with eids ``eid_from`` and ``eid_to``. + """ + relation = eid_from, rtype, eid_to + self.relations.add(relation) + return relation + + def flush(self): + """Nothing to flush for this store.""" + pass + + def commit(self): + """Nothing to commit for this store.""" + return + + def finish(self): + """Nothing to do once import is terminated for this store.""" + pass + + @property + def nb_inserted_entities(self): + return len(self.eids) + + @property + def nb_inserted_types(self): + return len(self.types) + + @property + def nb_inserted_relations(self): + return len(self.relations) + + @deprecated('[3.21] use prepare_insert_entity instead') + def create_entity(self, etype, **data): + self.prepare_insert_entity(etype, **data) + return attrdict(data) + + @deprecated('[3.21] use prepare_insert_relation instead') + def relate(self, eid_from, rtype, eid_to, **kwargs): + self.prepare_insert_relation(eid_from, rtype, eid_to, **kwargs) + + +class CWImportController(object): + """Controller of the data import process. + + >>> ctl = CWImportController(store) + >>> ctl.generators = list_of_data_generators + >>> ctl.data = dict_of_data_tables + >>> ctl.run() + """ + __metaclass__ = class_deprecated + __deprecation_warning__ = '[3.21] use the new importer API' + + def __init__(self, store, askerror=0, catcherrors=None, tell=tell, + commitevery=50): + self.store = store + self.generators = None + self.data = {} + self.errors = None + self.askerror = askerror + if catcherrors is None: + catcherrors = askerror + self.catcherrors = catcherrors + self.commitevery = commitevery # set to None to do a single commit + self._tell = tell + + def check(self, type, key, value): + self._checks.setdefault(type, {}).setdefault(key, []).append(value) + + def check_map(self, entity, key, map, default): + try: + entity[key] = map[entity[key]] + except KeyError: + self.check(key, entity[key], None) + entity[key] = default + + def record_error(self, key, msg=None, type=None, value=None, tb=None): + tmp = StringIO() + if type is None: + traceback.print_exc(file=tmp) + else: + traceback.print_exception(type, value, tb, file=tmp) + # use a list to avoid counting a errors instead of one + errorlog = self.errors.setdefault(key, []) + if msg is None: + errorlog.append(tmp.getvalue().splitlines()) + else: + errorlog.append( (msg, tmp.getvalue().splitlines()) ) + + def run(self): + self.errors = {} + if self.commitevery is None: + self.tell('Will commit all or nothing.') + else: + self.tell('Will commit every %s iterations' % self.commitevery) + for func, checks in self.generators: + self._checks = {} + func_name = func.__name__ + self.tell("Run import function '%s'..." % func_name) + try: + func(self) + except Exception: + if self.catcherrors: + self.record_error(func_name, 'While calling %s' % func.__name__) + else: + self._print_stats() + raise + for key, func, title, help in checks: + buckets = self._checks.get(key) + if buckets: + err = func(buckets) + if err: + self.errors[title] = (help, err) + try: + txuuid = self.store.commit() + if txuuid is not None: + self.tell('Transaction commited (txuuid: %s)' % txuuid) + except QueryError as ex: + self.tell('Transaction aborted: %s' % ex) + self._print_stats() + if self.errors: + if self.askerror == 2 or (self.askerror and confirm('Display errors ?')): + from pprint import pformat + for errkey, error in self.errors.items(): + self.tell("\n%s (%s): %d\n" % (error[0], errkey, len(error[1]))) + self.tell(pformat(sorted(error[1]))) + + def _print_stats(self): + nberrors = sum(len(err) for err in self.errors.itervalues()) + self.tell('\nImport statistics: %i entities, %i types, %i relations and %i errors' + % (self.store.nb_inserted_entities, + self.store.nb_inserted_types, + self.store.nb_inserted_relations, + nberrors)) + + def get_data(self, key): + return self.data.get(key) + + def index(self, name, key, value, unique=False): + """create a new index + + If unique is set to True, only first occurence will be kept not the following ones + """ + if unique: + try: + if value in self.store.indexes[name][key]: + return + except KeyError: + # we're sure that one is the first occurence; so continue... + pass + self.store.indexes.setdefault(name, {}).setdefault(key, []).append(value) + + def tell(self, msg): + self._tell(msg) + + def iter_and_commit(self, datakey): + """iter rows, triggering commit every self.commitevery iterations""" + if self.commitevery is None: + return self.get_data(datakey) + else: + return callfunc_every(self.store.commit, + self.commitevery, + self.get_data(datakey)) + + diff -r 2fe19ba68daa -r 2fdf67ef3341 dataimport/importer.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dataimport/importer.py Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,417 @@ +# copyright 2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr -- mailto:contact@logilab.fr +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . +"""Data import of external entities. + +Main entry points: + +.. autoclass:: ExtEntitiesImporter +.. autoclass:: ExtEntity + +Utilities: + +.. autofunction:: cwuri2eid +.. autoclass:: RelationMapping +.. autofunction:: cubicweb.dataimport.importer.use_extid_as_cwuri +""" + +from collections import defaultdict +import logging + +from logilab.mtconverter import xml_escape + + +def cwuri2eid(cnx, etypes, source_eid=None): + """Return a dictionary mapping cwuri to eid for entities of the given entity types and / or + source. + """ + assert source_eid or etypes, 'no entity types nor source specified' + rql = 'Any U, X WHERE X cwuri U' + args = {} + if len(etypes) == 1: + rql += ', X is %s' % etypes[0] + elif etypes: + rql += ', X is IN (%s)' % ','.join(etypes) + if source_eid is not None: + rql += ', X cw_source S, S eid %(s)s' + args['s'] = source_eid + return dict(cnx.execute(rql, args)) + + +def use_extid_as_cwuri(extid2eid): + """Return a generator of :class:`ExtEntity` objects that will set `cwuri` + using entity's extid if the entity does not exist yet and has no `cwuri` + defined. + + `extid2eid` is an extid to eid dictionary coming from an + :class:`ExtEntitiesImporter` instance. + + Example usage: + + .. code-block:: python + + importer = SKOSExtEntitiesImporter(cnx, store, import_log) + set_cwuri = use_extid_as_cwuri(importer.extid2eid) + importer.import_entities(set_cwuri(extentities)) + """ + def use_extid_as_cwuri_filter(extentities): + for extentity in extentities: + if extentity.extid not in extid2eid: + extentity.values.setdefault('cwuri', set([unicode(extentity.extid)])) + yield extentity + return use_extid_as_cwuri_filter + + +class RelationMapping(object): + """Read-only mapping from relation type to set of related (subject, object) eids. + + If `source` is specified, only returns relations implying entities from + this source. + """ + + def __init__(self, cnx, source=None): + self.cnx = cnx + self._rql_template = 'Any S,O WHERE S {} O' + self._kwargs = {} + if source is not None: + self._rql_template += ', S cw_source SO, O cw_source SO, SO eid %(s)s' + self._kwargs['s'] = source.eid + + def __getitem__(self, rtype): + """Return a set of (subject, object) eids already related by `rtype`""" + rql = self._rql_template.format(rtype) + return set(tuple(x) for x in self.cnx.execute(rql, self._kwargs)) + + +class ExtEntity(object): + """Transitional representation of an entity for use in data importer. + + An external entity has the following properties: + + * ``extid`` (external id), an identifier for the ext entity, + + * ``etype`` (entity type), a string which must be the name of one entity type in the schema + (eg. ``'Person'``, ``'Animal'``, ...), + + * ``values``, a dictionary whose keys are attribute or relation names from the schema (eg. + ``'first_name'``, ``'friend'``), and whose values are *sets* + + For instance: + + .. code-block:: python + + ext_entity.extid = 'http://example.org/person/debby' + ext_entity.etype = 'Person' + ext_entity.values = {'first_name': set([u"Deborah", u"Debby"]), + 'friend': set(['http://example.org/person/john'])} + + """ + + def __init__(self, etype, extid, values=None): + self.etype = etype + self.extid = extid + if values is None: + values = {} + self.values = values + self._schema = None + + def __repr__(self): + return '<%s %s %s>' % (self.etype, self.extid, self.values) + + def iter_rdefs(self): + """Yield (key, rtype, role) defined in `.values` dict, with: + + * `key` is the original key in `.values` (i.e. the relation type or a 2-uple (relation type, + role)) + + * `rtype` is a yams relation type, expected to be found in the schema (attribute or + relation) + + * `role` is the role of the entity in the relation, 'subject' or 'object' + + Iteration is done on a copy of the keys so values may be inserted/deleted during it. + """ + for key in list(self.values): + if isinstance(key, tuple): + rtype, role = key + assert role in ('subject', 'object'), key + yield key, rtype, role + else: + yield key, key, 'subject' + + def prepare(self, schema): + """Prepare an external entity for later insertion: + + * ensure attributes and inlined relations have a single value + * turn set([value]) into value and remove key associated to empty set + * remove non inlined relations and return them as a [(e1key, relation, e2key)] list + + Return a list of non inlined relations that may be inserted later, each relations defined by + a 3-tuple (subject extid, relation type, object extid). + + Take care the importer may call this method several times. + """ + assert self._schema is None, 'prepare() has already been called for %s' % self + self._schema = schema + eschema = schema.eschema(self.etype) + deferred = [] + entity_dict = self.values + for key, rtype, role in self.iter_rdefs(): + rschema = schema.rschema(rtype) + if rschema.final or (rschema.inlined and role == 'subject'): + assert len(entity_dict[key]) <= 1, \ + "more than one value for %s: %s (%s)" % (rtype, entity_dict[key], self.extid) + if entity_dict[key]: + entity_dict[rtype] = entity_dict[key].pop() + if key != rtype: + del entity_dict[key] + if (rschema.final and eschema.has_metadata(rtype, 'format') + and not rtype + '_format' in entity_dict): + entity_dict[rtype + '_format'] = u'text/plain' + else: + del entity_dict[key] + else: + for target_extid in entity_dict.pop(key): + if role == 'subject': + deferred.append((self.extid, rtype, target_extid)) + else: + deferred.append((target_extid, rtype, self.extid)) + return deferred + + def is_ready(self, extid2eid): + """Return True if the ext entity is ready, i.e. has all the URIs used in inlined relations + currently existing. + """ + assert self._schema, 'prepare() method should be called first on %s' % self + # as .prepare has been called, we know that .values only contains subject relation *type* as + # key (no more (rtype, role) tuple) + schema = self._schema + entity_dict = self.values + for rtype in entity_dict: + rschema = schema.rschema(rtype) + if not rschema.final: + # .prepare() should drop other cases from the entity dict + assert rschema.inlined + if not entity_dict[rtype] in extid2eid: + return False + # entity is ready, replace all relation's extid by eids + for rtype in entity_dict: + rschema = schema.rschema(rtype) + if rschema.inlined: + entity_dict[rtype] = extid2eid[entity_dict[rtype]] + return True + + +class ExtEntitiesImporter(object): + """This class is responsible for importing externals entities, that is instances of + :class:`ExtEntity`, into CubicWeb entities. + + :param schema: the CubicWeb's instance schema + :param store: a CubicWeb `Store` + :param extid2eid: optional {extid: eid} dictionary giving information on existing entities. It + will be completed during import. You may want to use :func:`cwuri2eid` to build it. + :param existing_relation: optional {rtype: set((subj eid, obj eid))} mapping giving information on + existing relations of a given type. You may want to use :class:`RelationMapping` to build it. + :param etypes_order_hint: optional ordered iterable on entity types, giving an hint on the order in + which they should be attempted to be imported + :param import_log: optional object implementing the :class:`SimpleImportLog` interface to record + events occuring during the import + :param raise_on_error: optional boolean flag - default to false, indicating whether errors should + be raised or logged. You usually want them to be raised during test but to be logged in + production. + + Instances of this class are meant to import external entities through :meth:`import_entities` + which handles a stream of :class:`ExtEntity`. One may then plug arbitrary filters into the + external entities stream. + + .. automethod:: import_entities + + """ + + def __init__(self, schema, store, extid2eid=None, existing_relations=None, + etypes_order_hint=(), import_log=None, raise_on_error=False): + self.schema = schema + self.store = store + self.extid2eid = extid2eid if extid2eid is not None else {} + self.existing_relations = (existing_relations if existing_relations is not None + else defaultdict(set)) + self.etypes_order_hint = etypes_order_hint + if import_log is None: + import_log = SimpleImportLog('') + self.import_log = import_log + self.raise_on_error = raise_on_error + # set of created/updated eids + self.created = set() + self.updated = set() + + def import_entities(self, ext_entities): + """Import given external entities (:class:`ExtEntity`) stream (usually a generator).""" + # {etype: [etype dict]} of entities that are in the import queue + queue = {} + # order entity dictionaries then create/update them + deferred = self._import_entities(ext_entities, queue) + # create deferred relations that don't exist already + missing_relations = self.prepare_insert_deferred_relations(deferred) + self._warn_about_missing_work(queue, missing_relations) + + def _import_entities(self, ext_entities, queue): + extid2eid = self.extid2eid + deferred = {} # non inlined relations that may be deferred + self.import_log.record_debug('importing entities') + for ext_entity in self.iter_ext_entities(ext_entities, deferred, queue): + try: + eid = extid2eid[ext_entity.extid] + except KeyError: + self.prepare_insert_entity(ext_entity) + else: + if ext_entity.values: + self.prepare_update_entity(ext_entity, eid) + return deferred + + def iter_ext_entities(self, ext_entities, deferred, queue): + """Yield external entities in an order which attempts to satisfy + schema constraints (inlined / cardinality) and to optimize the import. + """ + schema = self.schema + extid2eid = self.extid2eid + for ext_entity in ext_entities: + # check data in the transitional representation and prepare it for + # later insertion in the database + for subject_uri, rtype, object_uri in ext_entity.prepare(schema): + deferred.setdefault(rtype, set()).add((subject_uri, object_uri)) + if not ext_entity.is_ready(extid2eid): + queue.setdefault(ext_entity.etype, []).append(ext_entity) + continue + yield ext_entity + # check for some entities in the queue that may now be ready. We'll have to restart + # search for ready entities until no one is generated + new = True + while new: + new = False + for etype in self.etypes_order_hint: + if etype in queue: + new_queue = [] + for ext_entity in queue[etype]: + if ext_entity.is_ready(extid2eid): + yield ext_entity + # may unlock entity previously handled within this loop + new = True + else: + new_queue.append(ext_entity) + if new_queue: + queue[etype][:] = new_queue + else: + del queue[etype] + + def prepare_insert_entity(self, ext_entity): + """Call the store to prepare insertion of the given external entity""" + eid = self.store.prepare_insert_entity(ext_entity.etype, **ext_entity.values) + self.extid2eid[ext_entity.extid] = eid + self.created.add(eid) + return eid + + def prepare_update_entity(self, ext_entity, eid): + """Call the store to prepare update of the given external entity""" + self.store.prepare_update_entity(ext_entity.etype, eid, **ext_entity.values) + self.updated.add(eid) + + def prepare_insert_deferred_relations(self, deferred): + """Call the store to insert deferred relations (not handled during insertion/update for + entities). Return a list of relations `[(subj ext id, obj ext id)]` that may not be inserted + because the target entities don't exists yet. + """ + prepare_insert_relation = self.store.prepare_insert_relation + rschema = self.schema.rschema + extid2eid = self.extid2eid + missing_relations = [] + for rtype, relations in deferred.items(): + self.import_log.record_debug('importing %s %s relations' % (len(relations), rtype)) + symmetric = rschema(rtype).symmetric + existing = self.existing_relations[rtype] + for subject_uri, object_uri in relations: + try: + subject_eid = extid2eid[subject_uri] + object_eid = extid2eid[object_uri] + except KeyError: + missing_relations.append((subject_uri, rtype, object_uri)) + continue + if (subject_eid, object_eid) not in existing: + prepare_insert_relation(subject_eid, rtype, object_eid) + existing.add((subject_eid, object_eid)) + if symmetric: + existing.add((object_eid, subject_eid)) + return missing_relations + + def _warn_about_missing_work(self, queue, missing_relations): + error = self.import_log.record_error + if queue: + msgs = ["can't create some entities, is there some cycle or " + "missing data?"] + for ext_entities in queue.values(): + for ext_entity in ext_entities: + msgs.append(str(ext_entity)) + map(error, msgs) + if self.raise_on_error: + raise Exception('\n'.join(msgs)) + if missing_relations: + msgs = ["can't create some relations, is there missing data?"] + for subject_uri, rtype, object_uri in missing_relations: + msgs.append("%s %s %s" % (subject_uri, rtype, object_uri)) + map(error, msgs) + if self.raise_on_error: + raise Exception('\n'.join(msgs)) + + +class SimpleImportLog(object): + """Fake CWDataImport log using a simple text format. + + Useful to display logs in the UI instead of storing them to the + database. + """ + + def __init__(self, filename): + self.logs = [] + self.filename = filename + + def record_debug(self, msg, path=None, line=None): + self._log(logging.DEBUG, msg, path, line) + + def record_info(self, msg, path=None, line=None): + self._log(logging.INFO, msg, path, line) + + def record_warning(self, msg, path=None, line=None): + self._log(logging.WARNING, msg, path, line) + + def record_error(self, msg, path=None, line=None): + self._log(logging.ERROR, msg, path, line) + + def record_fatal(self, msg, path=None, line=None): + self._log(logging.FATAL, msg, path, line) + + def _log(self, severity, msg, path, line): + encodedmsg = u'%s\t%s\t%s\t%s' % (severity, self.filename, + line or u'', msg) + self.logs.append(encodedmsg) + + +class HTMLImportLog(SimpleImportLog): + """Fake CWDataImport log using a simple HTML format.""" + def __init__(self, filename): + super(HTMLImportLog, self).__init__(xml_escape(filename)) + + def _log(self, severity, msg, path, line): + encodedmsg = u'%s\t%s\t%s\t%s
' % (severity, self.filename, + line or u'', xml_escape(msg)) + self.logs.append(encodedmsg) diff -r 2fe19ba68daa -r 2fdf67ef3341 dataimport/pgstore.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dataimport/pgstore.py Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,472 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""Postgres specific store""" + +import threading +import warnings +import cPickle +import os.path as osp +from StringIO import StringIO +from time import asctime +from datetime import date, datetime, time +from collections import defaultdict +from base64 import b64encode + +from cubicweb.utils import make_uid +from cubicweb.server.sqlutils import SQL_PREFIX +from cubicweb.dataimport.stores import NoHookRQLObjectStore + +def _import_statements(sql_connect, statements, nb_threads=3, + dump_output_dir=None, + support_copy_from=True, encoding='utf-8'): + """ + Import a bunch of sql statements, using different threads. + """ + try: + chunksize = (len(statements) / nb_threads) + 1 + threads = [] + for i in xrange(nb_threads): + chunks = statements[i*chunksize:(i+1)*chunksize] + thread = threading.Thread(target=_execmany_thread, + args=(sql_connect, chunks, + dump_output_dir, + support_copy_from, + encoding)) + thread.start() + threads.append(thread) + for t in threads: + t.join() + except Exception: + print 'Error in import statements' + +def _execmany_thread_not_copy_from(cu, statement, data, table=None, + columns=None, encoding='utf-8'): + """ Execute thread without copy from + """ + cu.executemany(statement, data) + +def _execmany_thread_copy_from(cu, statement, data, table, + columns, encoding='utf-8'): + """ Execute thread with copy from + """ + buf = _create_copyfrom_buffer(data, columns, encoding=encoding) + if buf is None: + _execmany_thread_not_copy_from(cu, statement, data) + else: + if columns is None: + cu.copy_from(buf, table, null='NULL') + else: + cu.copy_from(buf, table, null='NULL', columns=columns) + +def _execmany_thread(sql_connect, statements, dump_output_dir=None, + support_copy_from=True, encoding='utf-8'): + """ + Execute sql statement. If 'INSERT INTO', try to use 'COPY FROM' command, + or fallback to execute_many. + """ + if support_copy_from: + execmany_func = _execmany_thread_copy_from + else: + execmany_func = _execmany_thread_not_copy_from + cnx = sql_connect() + cu = cnx.cursor() + try: + for statement, data in statements: + table = None + columns = None + try: + if not statement.startswith('INSERT INTO'): + cu.executemany(statement, data) + continue + table = statement.split()[2] + if isinstance(data[0], (tuple, list)): + columns = None + else: + columns = list(data[0]) + execmany_func(cu, statement, data, table, columns, encoding) + except Exception: + print 'unable to copy data into table %s' % table + # Error in import statement, save data in dump_output_dir + if dump_output_dir is not None: + pdata = {'data': data, 'statement': statement, + 'time': asctime(), 'columns': columns} + filename = make_uid() + try: + with open(osp.join(dump_output_dir, + '%s.pickle' % filename), 'w') as fobj: + fobj.write(cPickle.dumps(pdata)) + except IOError: + print 'ERROR while pickling in', dump_output_dir, filename+'.pickle' + pass + cnx.rollback() + raise + finally: + cnx.commit() + cu.close() + + +def _copyfrom_buffer_convert_None(value, **opts): + '''Convert None value to "NULL"''' + return 'NULL' + +def _copyfrom_buffer_convert_number(value, **opts): + '''Convert a number into its string representation''' + return str(value) + +def _copyfrom_buffer_convert_string(value, **opts): + '''Convert string value. + + Recognized keywords: + :encoding: resulting string encoding (default: utf-8) + ''' + encoding = opts.get('encoding','utf-8') + escape_chars = ((u'\\', ur'\\'), (u'\t', u'\\t'), (u'\r', u'\\r'), + (u'\n', u'\\n')) + for char, replace in escape_chars: + value = value.replace(char, replace) + if isinstance(value, unicode): + value = value.encode(encoding) + return value + +def _copyfrom_buffer_convert_date(value, **opts): + '''Convert date into "YYYY-MM-DD"''' + # Do not use strftime, as it yields issue with date < 1900 + # (http://bugs.python.org/issue1777412) + return '%04d-%02d-%02d' % (value.year, value.month, value.day) + +def _copyfrom_buffer_convert_datetime(value, **opts): + '''Convert date into "YYYY-MM-DD HH:MM:SS.UUUUUU"''' + # Do not use strftime, as it yields issue with date < 1900 + # (http://bugs.python.org/issue1777412) + return '%s %s' % (_copyfrom_buffer_convert_date(value, **opts), + _copyfrom_buffer_convert_time(value, **opts)) + +def _copyfrom_buffer_convert_time(value, **opts): + '''Convert time into "HH:MM:SS.UUUUUU"''' + return '%02d:%02d:%02d.%06d' % (value.hour, value.minute, + value.second, value.microsecond) + +# (types, converter) list. +_COPYFROM_BUFFER_CONVERTERS = [ + (type(None), _copyfrom_buffer_convert_None), + ((long, int, float), _copyfrom_buffer_convert_number), + (basestring, _copyfrom_buffer_convert_string), + (datetime, _copyfrom_buffer_convert_datetime), + (date, _copyfrom_buffer_convert_date), + (time, _copyfrom_buffer_convert_time), +] + +def _create_copyfrom_buffer(data, columns=None, **convert_opts): + """ + Create a StringIO buffer for 'COPY FROM' command. + Deals with Unicode, Int, Float, Date... (see ``converters``) + + :data: a sequence/dict of tuples + :columns: list of columns to consider (default to all columns) + :converter_opts: keyword arguements given to converters + """ + # Create a list rather than directly create a StringIO + # to correctly write lines separated by '\n' in a single step + rows = [] + if columns is None: + if isinstance(data[0], (tuple, list)): + columns = range(len(data[0])) + elif isinstance(data[0], dict): + columns = data[0].keys() + else: + raise ValueError('Could not get columns: you must provide columns.') + for row in data: + # Iterate over the different columns and the different values + # and try to convert them to a correct datatype. + # If an error is raised, do not continue. + formatted_row = [] + for col in columns: + try: + value = row[col] + except KeyError: + warnings.warn(u"Column %s is not accessible in row %s" + % (col, row), RuntimeWarning) + # XXX 'value' set to None so that the import does not end in + # error. + # Instead, the extra keys are set to NULL from the + # database point of view. + value = None + for types, converter in _COPYFROM_BUFFER_CONVERTERS: + if isinstance(value, types): + value = converter(value, **convert_opts) + break + else: + raise ValueError("Unsupported value type %s" % type(value)) + # We push the value to the new formatted row + # if the value is not None and could be converted to a string. + formatted_row.append(value) + rows.append('\t'.join(formatted_row)) + return StringIO('\n'.join(rows)) + + +class SQLGenObjectStore(NoHookRQLObjectStore): + """Controller of the data import process. This version is based + on direct insertions throught SQL command (COPY FROM or execute many). + + >>> store = SQLGenObjectStore(cnx) + >>> store.create_entity('Person', ...) + >>> store.flush() + """ + + def __init__(self, cnx, dump_output_dir=None, nb_threads_statement=3): + """ + Initialize a SQLGenObjectStore. + + Parameters: + + - cnx: connection on the cubicweb instance + - dump_output_dir: a directory to dump failed statements + for easier recovery. Default is None (no dump). + - nb_threads_statement: number of threads used + for SQL insertion (default is 3). + """ + super(SQLGenObjectStore, self).__init__(cnx) + ### hijack default source + self.source = SQLGenSourceWrapper( + self.source, cnx.vreg.schema, + dump_output_dir=dump_output_dir, + nb_threads_statement=nb_threads_statement) + ### XXX This is done in super().__init__(), but should be + ### redone here to link to the correct source + self.add_relation = self.source.add_relation + self.indexes_etypes = {} + + def flush(self): + """Flush data to the database""" + self.source.flush() + + def relate(self, subj_eid, rtype, obj_eid, **kwargs): + if subj_eid is None or obj_eid is None: + return + # XXX Could subjtype be inferred ? + self.source.add_relation(self._cnx, subj_eid, rtype, obj_eid, + self.rschema(rtype).inlined, **kwargs) + if self.rschema(rtype).symmetric: + self.source.add_relation(self._cnx, obj_eid, rtype, subj_eid, + self.rschema(rtype).inlined, **kwargs) + + def drop_indexes(self, etype): + """Drop indexes for a given entity type""" + if etype not in self.indexes_etypes: + cu = self._cnx.cnxset.cu + def index_to_attr(index): + """turn an index name to (database) attribute name""" + return index.replace(etype.lower(), '').replace('idx', '').strip('_') + indices = [(index, index_to_attr(index)) + for index in self.source.dbhelper.list_indices(cu, etype) + # Do not consider 'cw_etype_pkey' index + if not index.endswith('key')] + self.indexes_etypes[etype] = indices + for index, attr in self.indexes_etypes[etype]: + self._cnx.system_sql('DROP INDEX %s' % index) + + def create_indexes(self, etype): + """Recreate indexes for a given entity type""" + for index, attr in self.indexes_etypes.get(etype, []): + sql = 'CREATE INDEX %s ON cw_%s(%s)' % (index, etype, attr) + self._cnx.system_sql(sql) + + +########################################################################### +## SQL Source ############################################################# +########################################################################### + +class SQLGenSourceWrapper(object): + + def __init__(self, system_source, schema, + dump_output_dir=None, nb_threads_statement=3): + self.system_source = system_source + self._sql = threading.local() + # Explicitely backport attributes from system source + self._storage_handler = self.system_source._storage_handler + self.preprocess_entity = self.system_source.preprocess_entity + self.sqlgen = self.system_source.sqlgen + self.uri = self.system_source.uri + self.eid = self.system_source.eid + # Directory to write temporary files + self.dump_output_dir = dump_output_dir + # Allow to execute code with SQLite backend that does + # not support (yet...) copy_from + # XXX Should be dealt with in logilab.database + spcfrom = system_source.dbhelper.dbapi_module.support_copy_from + self.support_copy_from = spcfrom + self.dbencoding = system_source.dbhelper.dbencoding + self.nb_threads_statement = nb_threads_statement + # initialize thread-local data for main thread + self.init_thread_locals() + self._inlined_rtypes_cache = {} + self._fill_inlined_rtypes_cache(schema) + self.schema = schema + self.do_fti = False + + def _fill_inlined_rtypes_cache(self, schema): + cache = self._inlined_rtypes_cache + for eschema in schema.entities(): + for rschema in eschema.ordered_relations(): + if rschema.inlined: + cache[eschema.type] = SQL_PREFIX + rschema.type + + def init_thread_locals(self): + """initializes thread-local data""" + self._sql.entities = defaultdict(list) + self._sql.relations = {} + self._sql.inlined_relations = {} + # keep track, for each eid of the corresponding data dict + self._sql.eid_insertdicts = {} + + def flush(self): + print 'starting flush' + _entities_sql = self._sql.entities + _relations_sql = self._sql.relations + _inlined_relations_sql = self._sql.inlined_relations + _insertdicts = self._sql.eid_insertdicts + try: + # try, for each inlined_relation, to find if we're also creating + # the host entity (i.e. the subject of the relation). + # In that case, simply update the insert dict and remove + # the need to make the + # UPDATE statement + for statement, datalist in _inlined_relations_sql.iteritems(): + new_datalist = [] + # for a given inlined relation, + # browse each couple to be inserted + for data in datalist: + keys = list(data) + # For inlined relations, it exists only two case: + # (rtype, cw_eid) or (cw_eid, rtype) + if keys[0] == 'cw_eid': + rtype = keys[1] + else: + rtype = keys[0] + updated_eid = data['cw_eid'] + if updated_eid in _insertdicts: + _insertdicts[updated_eid][rtype] = data[rtype] + else: + # could not find corresponding insert dict, keep the + # UPDATE query + new_datalist.append(data) + _inlined_relations_sql[statement] = new_datalist + _import_statements(self.system_source.get_connection, + _entities_sql.items() + + _relations_sql.items() + + _inlined_relations_sql.items(), + dump_output_dir=self.dump_output_dir, + nb_threads=self.nb_threads_statement, + support_copy_from=self.support_copy_from, + encoding=self.dbencoding) + finally: + _entities_sql.clear() + _relations_sql.clear() + _insertdicts.clear() + _inlined_relations_sql.clear() + + def add_relation(self, cnx, subject, rtype, object, + inlined=False, **kwargs): + if inlined: + _sql = self._sql.inlined_relations + data = {'cw_eid': subject, SQL_PREFIX + rtype: object} + subjtype = kwargs.get('subjtype') + if subjtype is None: + # Try to infer it + targets = [t.type for t in + self.schema.rschema(rtype).subjects()] + if len(targets) == 1: + subjtype = targets[0] + else: + raise ValueError('You should give the subject etype for ' + 'inlined relation %s' + ', as it cannot be inferred: ' + 'this type is given as keyword argument ' + '``subjtype``'% rtype) + statement = self.sqlgen.update(SQL_PREFIX + subjtype, + data, ['cw_eid']) + else: + _sql = self._sql.relations + data = {'eid_from': subject, 'eid_to': object} + statement = self.sqlgen.insert('%s_relation' % rtype, data) + if statement in _sql: + _sql[statement].append(data) + else: + _sql[statement] = [data] + + def add_entity(self, cnx, entity): + with self._storage_handler(entity, 'added'): + attrs = self.preprocess_entity(entity) + rtypes = self._inlined_rtypes_cache.get(entity.cw_etype, ()) + if isinstance(rtypes, str): + rtypes = (rtypes,) + for rtype in rtypes: + if rtype not in attrs: + attrs[rtype] = None + sql = self.sqlgen.insert(SQL_PREFIX + entity.cw_etype, attrs) + self._sql.eid_insertdicts[entity.eid] = attrs + self._append_to_entities(sql, attrs) + + def _append_to_entities(self, sql, attrs): + self._sql.entities[sql].append(attrs) + + def _handle_insert_entity_sql(self, cnx, sql, attrs): + # We have to overwrite the source given in parameters + # as here, we directly use the system source + attrs['asource'] = self.system_source.uri + self._append_to_entities(sql, attrs) + + def _handle_is_relation_sql(self, cnx, sql, attrs): + self._append_to_entities(sql, attrs) + + def _handle_is_instance_of_sql(self, cnx, sql, attrs): + self._append_to_entities(sql, attrs) + + def _handle_source_relation_sql(self, cnx, sql, attrs): + self._append_to_entities(sql, attrs) + + # add_info is _copypasted_ from the one in NativeSQLSource. We want it + # there because it will use the _handlers of the SQLGenSourceWrapper, which + # are not like the ones in the native source. + def add_info(self, cnx, entity, source, extid): + """add type and source info for an eid into the system table""" + # begin by inserting eid/type/source/extid into the entities table + if extid is not None: + assert isinstance(extid, str) + extid = b64encode(extid) + attrs = {'type': entity.cw_etype, 'eid': entity.eid, 'extid': extid, + 'asource': source.uri} + self._handle_insert_entity_sql(cnx, self.sqlgen.insert('entities', attrs), attrs) + # insert core relations: is, is_instance_of and cw_source + try: + self._handle_is_relation_sql(cnx, 'INSERT INTO is_relation(eid_from,eid_to) VALUES (%s,%s)', + (entity.eid, eschema_eid(cnx, entity.e_schema))) + except IndexError: + # during schema serialization, skip + pass + else: + for eschema in entity.e_schema.ancestors() + [entity.e_schema]: + self._handle_is_relation_sql(cnx, + 'INSERT INTO is_instance_of_relation(eid_from,eid_to) VALUES (%s,%s)', + (entity.eid, eschema_eid(cnx, eschema))) + if 'CWSource' in self.schema and source.eid is not None: # else, cw < 3.10 + self._handle_is_relation_sql(cnx, 'INSERT INTO cw_source_relation(eid_from,eid_to) VALUES (%s,%s)', + (entity.eid, source.eid)) + # now we can update the full text index + if self.do_fti and self.need_fti_indexation(entity.cw_etype): + self.index_entity(cnx, entity=entity) diff -r 2fe19ba68daa -r 2fdf67ef3341 dataimport/stores.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dataimport/stores.py Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,323 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +""" +Stores are responsible to insert properly formatted entities and relations into the database. They +have the following API:: + + >>> user_eid = store.prepare_insert_entity('CWUser', login=u'johndoe') + >>> group_eid = store.prepare_insert_entity('CWUser', name=u'unknown') + >>> store.relate(user_eid, 'in_group', group_eid) + >>> store.flush() + >>> store.commit() + >>> store.finish() + +Some store **requires a flush** to copy data in the database, so if you want to have store +independant code you should explicitly call it. (There may be multiple flushes during the +process, or only one at the end if there is no memory issue). This is different from the +commit which validates the database transaction. At last, the `finish()` method should be called in +case the store requires additional work once everything is done. + +* ``prepare_insert_entity(, **kwargs) -> eid``: given an entity + type, attributes and inlined relations, return the eid of the entity to be + inserted, *with no guarantee that anything has been inserted in database*, + +* ``prepare_update_entity(, eid, **kwargs) -> None``: given an + entity type and eid, promise for update given attributes and inlined + relations *with no guarantee that anything has been inserted in database*, + +* ``prepare_insert_relation(eid_from, rtype, eid_to) -> None``: indicate that a + relation ``rtype`` should be added between entities with eids ``eid_from`` + and ``eid_to``. Similar to ``prepare_insert_entity()``, *there is no + guarantee that the relation will be inserted in database*, + +* ``flush() -> None``: flush any temporary data to database. May be called + several times during an import, + +* ``commit() -> None``: commit the database transaction, + +* ``finish() -> None``: additional stuff to do after import is terminated. + +.. autoclass:: cubicweb.dataimport.stores.RQLObjectStore +.. autoclass:: cubicweb.dataimport.stores.NoHookRQLObjectStore +.. autoclass:: cubicweb.dataimport.stores.MetaGenerator +""" +import inspect +import warnings +from datetime import datetime +from copy import copy + +from logilab.common.deprecation import deprecated +from logilab.common.decorators import cached + +from cubicweb.schema import META_RTYPES, VIRTUAL_RTYPES +from cubicweb.server.edition import EditedEntity + + +class RQLObjectStore(object): + """Store that works by making RQL queries, hence with all the cubicweb's machinery activated. + """ + + def __init__(self, cnx, commit=None): + if commit is not None: + warnings.warn('[3.19] commit argument should not be specified ' + 'as the cnx object already provides it.', + DeprecationWarning, stacklevel=2) + self._cnx = cnx + self._commit = commit or cnx.commit + # XXX 3.21 deprecated attributes + self.eids = {} + self.types = {} + + def rql(self, *args): + """Execute a RQL query. This is NOT part of the store API.""" + return self._cnx.execute(*args) + + def prepare_insert_entity(self, *args, **kwargs): + """Given an entity type, attributes and inlined relations, returns the inserted entity's + eid. + """ + entity = self._cnx.create_entity(*args, **kwargs) + self.eids[entity.eid] = entity + self.types.setdefault(args[0], []).append(entity.eid) + return entity.eid + + def prepare_update_entity(self, etype, eid, **kwargs): + """Given an entity type and eid, updates the corresponding entity with specified attributes + and inlined relations. + """ + entity = self._cnx.entity_from_eid(eid) + assert entity.cw_etype == etype, 'Trying to update with wrong type {}'.format(etype) + # XXX some inlined relations may already exists + entity.cw_set(**kwargs) + + def prepare_insert_relation(self, eid_from, rtype, eid_to, **kwargs): + """Insert into the database a relation ``rtype`` between entities with eids ``eid_from`` + and ``eid_to``. + """ + self.rql('SET X %s Y WHERE X eid %%(x)s, Y eid %%(y)s' % rtype, + {'x': int(eid_from), 'y': int(eid_to)}) + + def flush(self): + """Nothing to flush for this store.""" + pass + + def commit(self): + """Commit the database transaction.""" + return self._commit() + + @property + def session(self): + warnings.warn('[3.19] deprecated property.', DeprecationWarning, stacklevel=2) + return self._cnx.repo._get_session(self._cnx.sessionid) + + @deprecated("[3.19] use cnx.find(*args, **kwargs).entities() instead") + def find_entities(self, *args, **kwargs): + return self._cnx.find(*args, **kwargs).entities() + + @deprecated("[3.19] use cnx.find(*args, **kwargs).one() instead") + def find_one_entity(self, *args, **kwargs): + return self._cnx.find(*args, **kwargs).one() + + @deprecated('[3.21] use prepare_insert_entity instead') + def create_entity(self, *args, **kwargs): + eid = self.prepare_insert_entity(*args, **kwargs) + return self._cnx.entity_from_eid(eid) + + @deprecated('[3.21] use prepare_insert_relation instead') + def relate(self, eid_from, rtype, eid_to, **kwargs): + self.prepare_insert_relation(eid_from, rtype, eid_to, **kwargs) + + +class NoHookRQLObjectStore(RQLObjectStore): + """Store that works by accessing low-level CubicWeb's source API, with all hooks deactivated. It + must be given a metadata generator object to handle metadata which are usually handled by hooks + (see :class:`MetaGenerator`). + """ + + def __init__(self, cnx, metagen=None): + super(NoHookRQLObjectStore, self).__init__(cnx) + self.source = cnx.repo.system_source + self.rschema = cnx.repo.schema.rschema + self.add_relation = self.source.add_relation + if metagen is None: + metagen = MetaGenerator(cnx) + self.metagen = metagen + self._nb_inserted_entities = 0 + self._nb_inserted_types = 0 + self._nb_inserted_relations = 0 + # deactivate security + cnx.read_security = False + cnx.write_security = False + + def prepare_insert_entity(self, etype, **kwargs): + """Given an entity type, attributes and inlined relations, returns the inserted entity's + eid. + """ + for k, v in kwargs.iteritems(): + kwargs[k] = getattr(v, 'eid', v) + entity, rels = self.metagen.base_etype_dicts(etype) + # make a copy to keep cached entity pristine + entity = copy(entity) + entity.cw_edited = copy(entity.cw_edited) + entity.cw_clear_relation_cache() + entity.cw_edited.update(kwargs, skipsec=False) + entity_source, extid = self.metagen.init_entity(entity) + cnx = self._cnx + self.source.add_info(cnx, entity, entity_source, extid) + self.source.add_entity(cnx, entity) + kwargs = dict() + if inspect.getargspec(self.add_relation).keywords: + kwargs['subjtype'] = entity.cw_etype + for rtype, targeteids in rels.iteritems(): + # targeteids may be a single eid or a list of eids + inlined = self.rschema(rtype).inlined + try: + for targeteid in targeteids: + self.add_relation(cnx, entity.eid, rtype, targeteid, + inlined, **kwargs) + except TypeError: + self.add_relation(cnx, entity.eid, rtype, targeteids, + inlined, **kwargs) + self._nb_inserted_entities += 1 + return entity.eid + + # XXX: prepare_update_entity is inherited from RQLObjectStore, it should be reimplemented to + # actually skip hooks as prepare_insert_entity + + def prepare_insert_relation(self, eid_from, rtype, eid_to, **kwargs): + """Insert into the database a relation ``rtype`` between entities with eids ``eid_from`` + and ``eid_to``. + """ + assert not rtype.startswith('reverse_') + self.add_relation(self._cnx, eid_from, rtype, eid_to, + self.rschema(rtype).inlined) + if self.rschema(rtype).symmetric: + self.add_relation(self._cnx, eid_to, rtype, eid_from, + self.rschema(rtype).inlined) + self._nb_inserted_relations += 1 + + @property + @deprecated('[3.21] deprecated') + def nb_inserted_entities(self): + return self._nb_inserted_entities + + @property + @deprecated('[3.21] deprecated') + def nb_inserted_types(self): + return self._nb_inserted_types + + @property + @deprecated('[3.21] deprecated') + def nb_inserted_relations(self): + return self._nb_inserted_relations + + +class MetaGenerator(object): + """Class responsible for generating standard metadata for imported entities. You may want to + derive it to add application specific's metadata. + + Parameters: + * `cnx`: connection to the repository + * `baseurl`: optional base URL to be used for `cwuri` generation - default to config['base-url'] + * `source`: optional source to be used as `cw_source` for imported entities + """ + META_RELATIONS = (META_RTYPES + - VIRTUAL_RTYPES + - set(('eid', 'cwuri', + 'is', 'is_instance_of', 'cw_source'))) + + def __init__(self, cnx, baseurl=None, source=None): + self._cnx = cnx + if baseurl is None: + config = cnx.vreg.config + baseurl = config['base-url'] or config.default_base_url() + if not baseurl[-1] == '/': + baseurl += '/' + self.baseurl = baseurl + if source is None: + source = cnx.repo.system_source + self.source = source + self.create_eid = cnx.repo.system_source.create_eid + self.time = datetime.now() + # attributes/relations shared by all entities of the same type + self.etype_attrs = [] + self.etype_rels = [] + # attributes/relations specific to each entity + self.entity_attrs = ['cwuri'] + #self.entity_rels = [] XXX not handled (YAGNI?) + schema = cnx.vreg.schema + rschema = schema.rschema + for rtype in self.META_RELATIONS: + # skip owned_by / created_by if user is the internal manager + if cnx.user.eid == -1 and rtype in ('owned_by', 'created_by'): + continue + if rschema(rtype).final: + self.etype_attrs.append(rtype) + else: + self.etype_rels.append(rtype) + + @cached + def base_etype_dicts(self, etype): + entity = self._cnx.vreg['etypes'].etype_class(etype)(self._cnx) + # entity are "surface" copied, avoid shared dict between copies + del entity.cw_extra_kwargs + entity.cw_edited = EditedEntity(entity) + for attr in self.etype_attrs: + genfunc = self.generate(attr) + if genfunc: + entity.cw_edited.edited_attribute(attr, genfunc(entity)) + rels = {} + for rel in self.etype_rels: + genfunc = self.generate(rel) + if genfunc: + rels[rel] = genfunc(entity) + return entity, rels + + def init_entity(self, entity): + entity.eid = self.create_eid(self._cnx) + extid = entity.cw_edited.get('cwuri') + for attr in self.entity_attrs: + if attr in entity.cw_edited: + # already set, skip this attribute + continue + genfunc = self.generate(attr) + if genfunc: + entity.cw_edited.edited_attribute(attr, genfunc(entity)) + if isinstance(extid, unicode): + extid = extid.encode('utf-8') + return self.source, extid + + def generate(self, rtype): + return getattr(self, 'gen_%s' % rtype, None) + + def gen_cwuri(self, entity): + assert self.baseurl, 'baseurl is None while generating cwuri' + return u'%s%s' % (self.baseurl, entity.eid) + + def gen_creation_date(self, entity): + return self.time + + def gen_modification_date(self, entity): + return self.time + + def gen_created_by(self, entity): + return self._cnx.user.eid + + def gen_owned_by(self, entity): + return self._cnx.user.eid + diff -r 2fe19ba68daa -r 2fdf67ef3341 dataimport/test/data/people.csv --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dataimport/test/data/people.csv Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,3 @@ +# uri,name,knows +http://www.example.org/alice,Alice, +http://www.example.org/bob,Bob,http://www.example.org/alice diff -r 2fe19ba68daa -r 2fdf67ef3341 dataimport/test/data/schema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dataimport/test/data/schema.py Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,29 @@ +# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . + +from yams.buildobjs import EntityType, String, SubjectRelation + +from cubicweb.schema import RQLConstraint + + +class Personne(EntityType): + nom = String(required=True) + prenom = String() + enfant = SubjectRelation('Personne', inlined=True, cardinality='?*') + connait = SubjectRelation('Personne', symmetric=True, + constraints=[RQLConstraint('NOT S identity O')]) diff -r 2fe19ba68daa -r 2fdf67ef3341 dataimport/test/test_csv.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dataimport/test/test_csv.py Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,72 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unittest for cubicweb.dataimport.csv""" + +from StringIO import StringIO + +from logilab.common.testlib import TestCase, unittest_main + +from cubicweb.dataimport import csv + + +class UcsvreaderTC(TestCase): + + def test_empty_lines_skipped(self): + stream = StringIO('''a,b,c,d, +1,2,3,4, +,,,, +,,,, +''') + self.assertEqual([[u'a', u'b', u'c', u'd', u''], + [u'1', u'2', u'3', u'4', u''], + ], + list(csv.ucsvreader(stream))) + stream.seek(0) + self.assertEqual([[u'a', u'b', u'c', u'd', u''], + [u'1', u'2', u'3', u'4', u''], + [u'', u'', u'', u'', u''], + [u'', u'', u'', u'', u''] + ], + list(csv.ucsvreader(stream, skip_empty=False))) + + def test_skip_first(self): + stream = StringIO('a,b,c,d,\n1,2,3,4,\n') + reader = csv.ucsvreader(stream, skipfirst=True, ignore_errors=True) + self.assertEqual(list(reader), + [[u'1', u'2', u'3', u'4', u'']]) + + stream.seek(0) + reader = csv.ucsvreader(stream, skipfirst=True, ignore_errors=False) + self.assertEqual(list(reader), + [[u'1', u'2', u'3', u'4', u'']]) + + stream.seek(0) + reader = csv.ucsvreader(stream, skipfirst=False, ignore_errors=True) + self.assertEqual(list(reader), + [[u'a', u'b', u'c', u'd', u''], + [u'1', u'2', u'3', u'4', u'']]) + + stream.seek(0) + reader = csv.ucsvreader(stream, skipfirst=False, ignore_errors=False) + self.assertEqual(list(reader), + [[u'a', u'b', u'c', u'd', u''], + [u'1', u'2', u'3', u'4', u'']]) + + +if __name__ == '__main__': + unittest_main() diff -r 2fe19ba68daa -r 2fdf67ef3341 dataimport/test/test_pgstore.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dataimport/test/test_pgstore.py Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,92 @@ +# coding: utf-8 +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unittest for cubicweb.dataimport.pgstore""" + +import datetime as DT + +from logilab.common.testlib import TestCase, unittest_main + +from cubicweb.dataimport import pgstore + + +class CreateCopyFromBufferTC(TestCase): + + # test converters + + def test_convert_none(self): + cnvt = pgstore._copyfrom_buffer_convert_None + self.assertEqual('NULL', cnvt(None)) + + def test_convert_number(self): + cnvt = pgstore._copyfrom_buffer_convert_number + self.assertEqual('42', cnvt(42)) + self.assertEqual('42', cnvt(42L)) + self.assertEqual('42.42', cnvt(42.42)) + + def test_convert_string(self): + cnvt = pgstore._copyfrom_buffer_convert_string + # simple + self.assertEqual('babar', cnvt('babar')) + # unicode + self.assertEqual('\xc3\xa9l\xc3\xa9phant', cnvt(u'éléphant')) + self.assertEqual('\xe9l\xe9phant', cnvt(u'éléphant', encoding='latin1')) + # escaping + self.assertEqual('babar\\tceleste\\n', cnvt('babar\tceleste\n')) + self.assertEqual(r'C:\\new\tC:\\test', cnvt('C:\\new\tC:\\test')) + + def test_convert_date(self): + cnvt = pgstore._copyfrom_buffer_convert_date + self.assertEqual('0666-01-13', cnvt(DT.date(666, 1, 13))) + + def test_convert_time(self): + cnvt = pgstore._copyfrom_buffer_convert_time + self.assertEqual('06:06:06.000100', cnvt(DT.time(6, 6, 6, 100))) + + def test_convert_datetime(self): + cnvt = pgstore._copyfrom_buffer_convert_datetime + self.assertEqual('0666-06-13 06:06:06.000000', cnvt(DT.datetime(666, 6, 13, 6, 6, 6))) + + # test buffer + def test_create_copyfrom_buffer_tuple(self): + data = ((42, 42L, 42.42, u'éléphant', DT.date(666, 1, 13), DT.time(6, 6, 6), + DT.datetime(666, 6, 13, 6, 6, 6)), + (6, 6L, 6.6, u'babar', DT.date(2014, 1, 14), DT.time(4, 2, 1), + DT.datetime(2014, 1, 1, 0, 0, 0))) + results = pgstore._create_copyfrom_buffer(data) + # all columns + expected = '''42\t42\t42.42\téléphant\t0666-01-13\t06:06:06.000000\t0666-06-13 06:06:06.000000 +6\t6\t6.6\tbabar\t2014-01-14\t04:02:01.000000\t2014-01-01 00:00:00.000000''' + self.assertMultiLineEqual(expected, results.getvalue()) + # selected columns + results = pgstore._create_copyfrom_buffer(data, columns=(1, 3, 6)) + expected = '''42\téléphant\t0666-06-13 06:06:06.000000 +6\tbabar\t2014-01-01 00:00:00.000000''' + self.assertMultiLineEqual(expected, results.getvalue()) + + def test_create_copyfrom_buffer_dict(self): + data = (dict(integer=42, double=42.42, text=u'éléphant', + date=DT.datetime(666, 6, 13, 6, 6, 6)), + dict(integer=6, double=6.6, text=u'babar', + date=DT.datetime(2014, 1, 1, 0, 0, 0))) + results = pgstore._create_copyfrom_buffer(data, ('integer', 'text')) + expected = '''42\téléphant\n6\tbabar''' + self.assertMultiLineEqual(expected, results.getvalue()) + +if __name__ == '__main__': + unittest_main() diff -r 2fe19ba68daa -r 2fdf67ef3341 dataimport/test/test_stores.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dataimport/test/test_stores.py Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,88 @@ +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unittest for cubicweb.dataimport.stores""" + +import datetime as DT + +from cubicweb.dataimport import stores +from cubicweb.devtools.testlib import CubicWebTC + + +class RQLObjectStoreTC(CubicWebTC): + + def test_all(self): + with self.admin_access.repo_cnx() as cnx: + store = stores.RQLObjectStore(cnx) + # Check data insertion + group_eid = store.prepare_insert_entity('CWGroup', name=u'grp') + user_eid = store.prepare_insert_entity('CWUser', login=u'lgn', + upassword=u'pwd') + store.prepare_insert_relation(user_eid, 'in_group', group_eid) + cnx.commit() + users = cnx.execute('CWUser X WHERE X login "lgn"') + self.assertEqual(1, len(users)) + self.assertEqual(user_eid, users.one().eid) + groups = cnx.execute('CWGroup X WHERE U in_group X, U login "lgn"') + self.assertEqual(1, len(users)) + self.assertEqual(group_eid, groups.one().eid) + # Check data update + self.set_description('Check data update') + store.prepare_update_entity('CWGroup', group_eid, name=u'new_grp') + cnx.commit() + group = cnx.execute('CWGroup X WHERE X name "grp"') + self.assertEqual(len(group), 0) + group = cnx.execute('CWGroup X WHERE X name "new_grp"') + self.assertEqual, len(group), 1 + # Check data update with wrong type + with self.assertRaises(AssertionError): + store.prepare_update_entity('CWUser', group_eid, name=u'new_user') + cnx.commit() + group = cnx.execute('CWGroup X WHERE X name "new_user"') + self.assertEqual(len(group), 0) + group = cnx.execute('CWGroup X WHERE X name "new_grp"') + self.assertEqual(len(group), 1) + + +class MetaGeneratorTC(CubicWebTC): + + def test_dont_generate_relation_to_internal_manager(self): + with self.admin_access.repo_cnx() as cnx: + metagen = stores.MetaGenerator(cnx) + self.assertIn('created_by', metagen.etype_rels) + self.assertIn('owned_by', metagen.etype_rels) + with self.repo.internal_cnx() as cnx: + metagen = stores.MetaGenerator(cnx) + self.assertNotIn('created_by', metagen.etype_rels) + self.assertNotIn('owned_by', metagen.etype_rels) + + def test_dont_generate_specified_values(self): + with self.admin_access.repo_cnx() as cnx: + metagen = stores.MetaGenerator(cnx) + # hijack gen_modification_date to ensure we don't go through it + metagen.gen_modification_date = None + md = DT.datetime.now() - DT.timedelta(days=1) + entity, rels = metagen.base_etype_dicts('CWUser') + entity.cw_edited.update(dict(modification_date=md)) + with cnx.ensure_cnx_set: + metagen.init_entity(entity) + self.assertEqual(entity.cw_edited['modification_date'], md) + + +if __name__ == '__main__': + from logilab.common.testlib import unittest_main + unittest_main() diff -r 2fe19ba68daa -r 2fdf67ef3341 dataimport/test/unittest_importer.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dataimport/test/unittest_importer.py Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,190 @@ +# -*- coding: utf-8 -*- +# copyright 2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr -- mailto:contact@logilab.fr +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . +"""Tests for cubicweb.dataimport.importer""" + +from collections import defaultdict + +from logilab.common.testlib import TestCase, unittest_main + +from cubicweb import ValidationError +from cubicweb.devtools.testlib import CubicWebTC +from cubicweb.dataimport import RQLObjectStore, ucsvreader +from cubicweb.dataimport.importer import (ExtEntity, ExtEntitiesImporter, SimpleImportLog, + RelationMapping, use_extid_as_cwuri) + + +class RelationMappingTC(CubicWebTC): + + def test_nosource(self): + with self.admin_access.repo_cnx() as cnx: + alice_eid = cnx.create_entity('Personne', nom=u'alice').eid + bob_eid = cnx.create_entity('Personne', nom=u'bob', connait=alice_eid).eid + cnx.commit() + mapping = RelationMapping(cnx) + self.assertEqual(mapping['connait'], + set([(bob_eid, alice_eid), (alice_eid, bob_eid)])) + + def test_with_source(self): + with self.admin_access.repo_cnx() as cnx: + alice_eid = cnx.create_entity('Personne', nom=u'alice').eid + bob_eid = cnx.create_entity('Personne', nom=u'bob', connait=alice_eid).eid + cnx.commit() + mapping = RelationMapping(cnx, cnx.find('CWSource', name=u'system').one()) + self.assertEqual(mapping['connait'], + set([(bob_eid, alice_eid), (alice_eid, bob_eid)])) + + +class ExtEntitiesImporterTC(CubicWebTC): + + def importer(self, cnx): + store = RQLObjectStore(cnx) + return ExtEntitiesImporter(self.schema, store, raise_on_error=True) + + def test_simple_import(self): + with self.admin_access.repo_cnx() as cnx: + importer = self.importer(cnx) + personne = ExtEntity('Personne', 1, {'nom': set([u'de la lune']), + 'prenom': set([u'Jean'])}) + importer.import_entities([personne]) + cnx.commit() + rset = cnx.execute('Any X WHERE X is Personne') + entity = rset.get_entity(0, 0) + self.assertEqual(entity.nom, u'de la lune') + self.assertEqual(entity.prenom, u'Jean') + + def test_import_missing_required_attribute(self): + """Check import of ext entity with missing required attribute""" + with self.admin_access.repo_cnx() as cnx: + importer = self.importer(cnx) + tag = ExtEntity('Personne', 2, {'prenom': set([u'Jean'])}) + self.assertRaises(ValidationError, importer.import_entities, [tag]) + + def test_import_inlined_relation(self): + """Check import of ext entities with inlined relation""" + with self.admin_access.repo_cnx() as cnx: + importer = self.importer(cnx) + richelieu = ExtEntity('Personne', 3, {'nom': set([u'Richelieu']), + 'enfant': set([4])}) + athos = ExtEntity('Personne', 4, {'nom': set([u'Athos'])}) + importer.import_entities([athos, richelieu]) + cnx.commit() + rset = cnx.execute('Any X WHERE X is Personne, X nom "Richelieu"') + entity = rset.get_entity(0, 0) + self.assertEqual(entity.enfant[0].nom, 'Athos') + + def test_import_non_inlined_relation(self): + """Check import of ext entities with non inlined relation""" + with self.admin_access.repo_cnx() as cnx: + importer = self.importer(cnx) + richelieu = ExtEntity('Personne', 5, {'nom': set([u'Richelieu']), + 'connait': set([6])}) + athos = ExtEntity('Personne', 6, {'nom': set([u'Athos'])}) + importer.import_entities([athos, richelieu]) + cnx.commit() + rset = cnx.execute('Any X WHERE X is Personne, X nom "Richelieu"') + entity = rset.get_entity(0, 0) + self.assertEqual(entity.connait[0].nom, 'Athos') + rset = cnx.execute('Any X WHERE X is Personne, X nom "Athos"') + entity = rset.get_entity(0, 0) + self.assertEqual(entity.connait[0].nom, 'Richelieu') + + def test_import_missing_inlined_relation(self): + """Check import of ext entity with missing inlined relation""" + with self.admin_access.repo_cnx() as cnx: + importer = self.importer(cnx) + richelieu = ExtEntity('Personne', 7, + {'nom': set([u'Richelieu']), 'enfant': set([8])}) + self.assertRaises(Exception, importer.import_entities, [richelieu]) + cnx.commit() + rset = cnx.execute('Any X WHERE X is Personne, X nom "Richelieu"') + self.assertEqual(len(rset), 0) + + def test_import_missing_non_inlined_relation(self): + """Check import of ext entity with missing non-inlined relation""" + with self.admin_access.repo_cnx() as cnx: + importer = self.importer(cnx) + richelieu = ExtEntity('Personne', 9, + {'nom': set([u'Richelieu']), 'connait': set([10])}) + self.assertRaises(Exception, importer.import_entities, [richelieu]) + cnx.commit() + rset = cnx.execute('Any X WHERE X is Personne, X nom "Richelieu"') + entity = rset.get_entity(0, 0) + self.assertEqual(entity.nom, u'Richelieu') + self.assertEqual(len(entity.connait), 0) + + def test_update(self): + """Check update of ext entity""" + with self.admin_access.repo_cnx() as cnx: + importer = self.importer(cnx) + # First import + richelieu = ExtEntity('Personne', 11, + {'nom': {u'Richelieu Diacre'}}) + importer.import_entities([richelieu]) + cnx.commit() + rset = cnx.execute('Any X WHERE X is Personne') + entity = rset.get_entity(0, 0) + self.assertEqual(entity.nom, u'Richelieu Diacre') + # Second import + richelieu = ExtEntity('Personne', 11, + {'nom': {u'Richelieu Cardinal'}}) + importer.import_entities([richelieu]) + cnx.commit() + rset = cnx.execute('Any X WHERE X is Personne') + self.assertEqual(len(rset), 1) + entity = rset.get_entity(0, 0) + self.assertEqual(entity.nom, u'Richelieu Cardinal') + + +class UseExtidAsCwuriTC(TestCase): + + def test(self): + personne = ExtEntity('Personne', 1, {'nom': set([u'de la lune']), + 'prenom': set([u'Jean'])}) + mapping = {} + set_cwuri = use_extid_as_cwuri(mapping) + list(set_cwuri((personne,))) + self.assertIn('cwuri', personne.values) + self.assertEqual(personne.values['cwuri'], set(['1'])) + mapping[1] = 'whatever' + personne.values.pop('cwuri') + list(set_cwuri((personne,))) + self.assertNotIn('cwuri', personne.values) + + +def extentities_from_csv(fpath): + """Yield ExtEntity read from `fpath` CSV file.""" + with open(fpath) as f: + for uri, name, knows in ucsvreader(f, skipfirst=True, skip_empty=False): + yield ExtEntity('Personne', uri, + {'nom': set([name]), 'connait': set([knows])}) + + +class DataimportFunctionalTC(CubicWebTC): + + def test_csv(self): + extenties = extentities_from_csv(self.datapath('people.csv')) + with self.admin_access.repo_cnx() as cnx: + store = RQLObjectStore(cnx) + importer = ExtEntitiesImporter(self.schema, store) + importer.import_entities(extenties) + cnx.commit() + rset = cnx.execute('String N WHERE X nom N, X connait Y, Y nom "Alice"') + self.assertEqual(rset[0][0], u'Bob') + + +if __name__ == '__main__': + unittest_main() diff -r 2fe19ba68daa -r 2fdf67ef3341 dbapi.py --- a/dbapi.py Wed Dec 09 16:36:17 2015 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,836 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""DB-API 2.0 compliant module - -Take a look at http://www.python.org/peps/pep-0249.html - -(most parts of this document are reported here in docstrings) -""" - -__docformat__ = "restructuredtext en" - -from threading import currentThread -from logging import getLogger -from time import time, clock -from itertools import count -from warnings import warn -from os.path import join -from uuid import uuid4 -from urlparse import urlparse - -from logilab.common.logging_ext import set_log_methods -from logilab.common.decorators import monkeypatch, cachedproperty -from logilab.common.deprecation import deprecated - -from cubicweb import (ETYPE_NAME_MAP, AuthenticationError, ProgrammingError, - cwvreg, cwconfig) -from cubicweb.repoapi import get_repository -from cubicweb.req import RequestSessionBase - - -_MARKER = object() - -def _fake_property_value(self, name): - try: - return super(DBAPIRequest, self).property_value(name) - except KeyError: - return '' - -def fake(*args, **kwargs): - return None - -def multiple_connections_fix(): - """some monkey patching necessary when an application has to deal with - several connections to different repositories. It tries to hide buggy class - attributes since classes are not designed to be shared among multiple - registries. - """ - defaultcls = cwvreg.CWRegistryStore.REGISTRY_FACTORY[None] - - etypescls = cwvreg.CWRegistryStore.REGISTRY_FACTORY['etypes'] - orig_etype_class = etypescls.orig_etype_class = etypescls.etype_class - @monkeypatch(defaultcls) - def etype_class(self, etype): - """return an entity class for the given entity type. - Try to find out a specific class for this kind of entity or - default to a dump of the class registered for 'Any' - """ - usercls = orig_etype_class(self, etype) - if etype == 'Any': - return usercls - usercls.e_schema = self.schema.eschema(etype) - return usercls - -def multiple_connections_unfix(): - etypescls = cwvreg.CWRegistryStore.REGISTRY_FACTORY['etypes'] - etypescls.etype_class = etypescls.orig_etype_class - - -class ConnectionProperties(object): - def __init__(self, cnxtype=None, close=True, log=False): - if cnxtype is not None: - warn('[3.16] cnxtype argument is deprecated', DeprecationWarning, - stacklevel=2) - self.cnxtype = cnxtype - self.log_queries = log - self.close_on_del = close - - -@deprecated('[3.19] the dbapi is deprecated. Have a look at the new repoapi.') -def _repo_connect(repo, login, **kwargs): - """Constructor to create a new connection to the given CubicWeb repository. - - Returns a Connection instance. - - Raises AuthenticationError if authentication failed - """ - cnxid = repo.connect(unicode(login), **kwargs) - cnx = Connection(repo, cnxid, kwargs.get('cnxprops')) - if cnx.is_repo_in_memory: - cnx.vreg = repo.vreg - return cnx - -def connect(database, login=None, - cnxprops=None, setvreg=True, mulcnx=True, initlog=True, **kwargs): - """Constructor for creating a connection to the CubicWeb repository. - Returns a :class:`Connection` object. - - Typical usage:: - - cnx = connect('myinstance', login='me', password='toto') - - `database` may be: - - * a simple instance id for in-memory connection - - * a uri like scheme://host:port/instanceid where scheme may be one of - 'pyro', 'inmemory' or 'zmqpickle' - - * if scheme is 'pyro', determine the name server address. If - not specified (e.g. 'pyro:///instanceid'), it will be detected through a - broadcast query. The instance id is the name of the instance in the name - server and may be prefixed by a group (e.g. - 'pyro:///:cubicweb.instanceid') - - * if scheme is handled by ZMQ (eg 'tcp'), you should not specify an - instance id - - Other arguments: - - :login: - the user login to use to authenticate. - - :cnxprops: - a :class:`ConnectionProperties` instance, allowing to specify - the connection method (eg in memory or pyro). A Pyro connection will be - established if you don't specify that argument. - - :setvreg: - flag telling if a registry should be initialized for the connection. - Don't change this unless you know what you're doing. - - :mulcnx: - Will disappear at some point. Try to deal with connections to differents - instances in the same process unless specified otherwise by setting this - flag to False. Don't change this unless you know what you're doing. - - :initlog: - flag telling if logging should be initialized. You usually don't want - logging initialization when establishing the connection from a process - where it's already initialized. - - :kwargs: - there goes authentication tokens. You usually have to specify a password - for the given user, using a named 'password' argument. - """ - if not urlparse(database).scheme: - warn('[3.16] give an qualified URI as database instead of using ' - 'host/cnxprops to specify the connection method', - DeprecationWarning, stacklevel=2) - if cnxprops and cnxprops.cnxtype == 'zmq': - database = kwargs.pop('host') - elif cnxprops and cnxprops.cnxtype == 'inmemory': - database = 'inmemory://' + database - else: - host = kwargs.pop('host', None) - if host is None: - host = '' - group = kwargs.pop('group', None) - if group is None: - group = 'cubicweb' - database = 'pyro://%s/%s.%s' % (host, group, database) - puri = urlparse(database) - method = puri.scheme.lower() - if method == 'inmemory': - config = cwconfig.instance_configuration(puri.netloc) - else: - config = cwconfig.CubicWebNoAppConfiguration() - repo = get_repository(database, config=config) - if method == 'inmemory': - vreg = repo.vreg - elif setvreg: - if mulcnx: - multiple_connections_fix() - vreg = cwvreg.CWRegistryStore(config, initlog=initlog) - schema = repo.get_schema() - for oldetype, newetype in ETYPE_NAME_MAP.items(): - if oldetype in schema: - print 'aliasing', newetype, 'to', oldetype - schema._entities[newetype] = schema._entities[oldetype] - vreg.set_schema(schema) - else: - vreg = None - cnx = _repo_connect(repo, login, cnxprops=cnxprops, **kwargs) - cnx.vreg = vreg - return cnx - -def in_memory_repo(config): - """Return and in_memory Repository object from a config (or vreg)""" - if isinstance(config, cwvreg.CWRegistryStore): - vreg = config - config = None - else: - vreg = None - # get local access to the repository - return get_repository('inmemory://', config=config, vreg=vreg) - -def in_memory_repo_cnx(config, login, **kwargs): - """useful method for testing and scripting to get a dbapi.Connection - object connected to an in-memory repository instance - """ - # connection to the CubicWeb repository - repo = in_memory_repo(config) - return repo, _repo_connect(repo, login, **kwargs) - -# XXX web only method, move to webconfig? -def anonymous_session(vreg): - """return a new anonymous session - - raises an AuthenticationError if anonymous usage is not allowed - """ - anoninfo = vreg.config.anonymous_user() - if anoninfo[0] is None: # no anonymous user - raise AuthenticationError('anonymous access is not authorized') - anon_login, anon_password = anoninfo - # use vreg's repository cache - repo = vreg.config.repository(vreg) - anon_cnx = _repo_connect(repo, anon_login, password=anon_password) - anon_cnx.vreg = vreg - return DBAPISession(anon_cnx, anon_login) - - -class _NeedAuthAccessMock(object): - def __getattribute__(self, attr): - raise AuthenticationError() - def __nonzero__(self): - return False - -class DBAPISession(object): - def __init__(self, cnx, login=None): - self.cnx = cnx - self.data = {} - self.login = login - # dbapi session identifier is the same as the first connection - # identifier, but may later differ in case of auto-reconnection as done - # by the web authentication manager (in cw.web.views.authentication) - if cnx is not None: - self.sessionid = cnx.sessionid - else: - self.sessionid = uuid4().hex - - @property - def anonymous_session(self): - return not self.cnx or self.cnx.anonymous_connection - - def __repr__(self): - return '' % self.sessionid - - -class DBAPIRequest(RequestSessionBase): - #: Request language identifier eg: 'en' - lang = None - - def __init__(self, vreg, session=None): - super(DBAPIRequest, self).__init__(vreg) - #: 'language' => translation_function() mapping - try: - # no vreg or config which doesn't handle translations - self.translations = vreg.config.translations - except AttributeError: - self.translations = {} - #: cache entities built during the request - self._eid_cache = {} - if session is not None: - self.set_session(session) - else: - # these args are initialized after a connection is - # established - self.session = DBAPISession(None) - self.cnx = self.user = _NeedAuthAccessMock() - self.set_default_language(vreg) - - def get_option_value(self, option, foreid=None): - if foreid is not None: - warn('[3.19] foreid argument is deprecated', DeprecationWarning, - stacklevel=2) - return self.cnx.get_option_value(option) - - def set_session(self, session): - """method called by the session handler when the user is authenticated - or an anonymous connection is open - """ - self.session = session - if session.cnx: - self.cnx = session.cnx - self.execute = session.cnx.cursor(self).execute - self.user = self.cnx.user(self) - self.set_entity_cache(self.user) - - def execute(self, *args, **kwargs): # pylint: disable=E0202 - """overriden when session is set. By default raise authentication error - so authentication is requested. - """ - raise AuthenticationError() - - def set_default_language(self, vreg): - try: - lang = vreg.property_value('ui.language') - except Exception: # property may not be registered - lang = 'en' - try: - self.set_language(lang) - except KeyError: - # this occurs usually during test execution - self._ = self.__ = unicode - self.pgettext = lambda x, y: unicode(y) - - # server-side service call ################################################# - - def call_service(self, regid, **kwargs): - return self.cnx.call_service(regid, **kwargs) - - # entities cache management ############################################### - - def entity_cache(self, eid): - return self._eid_cache[eid] - - def set_entity_cache(self, entity): - self._eid_cache[entity.eid] = entity - - def cached_entities(self): - return self._eid_cache.values() - - def drop_entity_cache(self, eid=None): - if eid is None: - self._eid_cache = {} - else: - del self._eid_cache[eid] - - # low level session data management ####################################### - - @deprecated('[3.19] use session or transaction data') - def get_shared_data(self, key, default=None, pop=False, txdata=False): - """see :meth:`Connection.get_shared_data`""" - return self.cnx.get_shared_data(key, default, pop, txdata) - - @deprecated('[3.19] use session or transaction data') - def set_shared_data(self, key, value, txdata=False, querydata=None): - """see :meth:`Connection.set_shared_data`""" - if querydata is not None: - txdata = querydata - warn('[3.10] querydata argument has been renamed to txdata', - DeprecationWarning, stacklevel=2) - return self.cnx.set_shared_data(key, value, txdata) - - # server session compat layer ############################################# - - def entity_metas(self, eid): - """return a tuple (type, sourceuri, extid) for the entity with id """ - return self.cnx.entity_metas(eid) - - def source_defs(self): - """return the definition of sources used by the repository.""" - return self.cnx.source_defs() - - @deprecated('[3.19] use .entity_metas(eid) instead') - def describe(self, eid, asdict=False): - """return a tuple (type, sourceuri, extid) for the entity with id """ - return self.cnx.describe(eid, asdict) - - # these are overridden by set_log_methods below - # only defining here to prevent pylint from complaining - info = warning = error = critical = exception = debug = lambda msg,*a,**kw: None - -set_log_methods(DBAPIRequest, getLogger('cubicweb.dbapi')) - - - -# cursor / connection objects ################################################## - -class Cursor(object): - """These objects represent a database cursor, which is used to manage the - context of a fetch operation. Cursors created from the same connection are - not isolated, i.e., any changes done to the database by a cursor are - immediately visible by the other cursors. Cursors created from different - connections are isolated. - """ - - def __init__(self, connection, repo, req=None): - """This read-only attribute return a reference to the Connection - object on which the cursor was created. - """ - self.connection = connection - """optionnal issuing request instance""" - self.req = req - self._repo = repo - self._sessid = connection.sessionid - - def close(self): - """no effect""" - pass - - def _txid(self): - return self.connection._txid(self) - - def execute(self, rql, args=None, build_descr=True): - """execute a rql query, return resulting rows and their description in - a :class:`~cubicweb.rset.ResultSet` object - - * `rql` should be a Unicode string or a plain ASCII string, containing - the rql query - - * `args` the optional args dictionary associated to the query, with key - matching named substitution in `rql` - - * `build_descr` is a boolean flag indicating if the description should - be built on select queries (if false, the description will be en empty - list) - - on INSERT queries, there will be one row for each inserted entity, - containing its eid - - on SET queries, XXX describe - - DELETE queries returns no result. - - .. Note:: - to maximize the rql parsing/analyzing cache performance, you should - always use substitute arguments in queries, i.e. avoid query such as:: - - execute('Any X WHERE X eid 123') - - use:: - - execute('Any X WHERE X eid %(x)s', {'x': 123}) - """ - rset = self._repo.execute(self._sessid, rql, args, - build_descr=build_descr, **self._txid()) - rset.req = self.req - return rset - - -class LogCursor(Cursor): - """override the standard cursor to log executed queries""" - - def execute(self, operation, parameters=None, build_descr=True): - """override the standard cursor to log executed queries""" - tstart, cstart = time(), clock() - rset = Cursor.execute(self, operation, parameters, build_descr=build_descr) - self.connection.executed_queries.append((operation, parameters, - time() - tstart, clock() - cstart)) - return rset - -def check_not_closed(func): - def decorator(self, *args, **kwargs): - if self._closed is not None: - raise ProgrammingError('Closed connection %s' % self.sessionid) - return func(self, *args, **kwargs) - return decorator - -class Connection(object): - """DB-API 2.0 compatible Connection object for CubicWeb - """ - # make exceptions available through the connection object - ProgrammingError = ProgrammingError - # attributes that may be overriden per connection instance - cursor_class = Cursor - vreg = None - _closed = None - - def __init__(self, repo, cnxid, cnxprops=None): - self._repo = repo - self.sessionid = cnxid - self._close_on_del = getattr(cnxprops, 'close_on_del', True) - self._web_request = False - if cnxprops and cnxprops.log_queries: - self.executed_queries = [] - self.cursor_class = LogCursor - - @property - def is_repo_in_memory(self): - """return True if this is a local, aka in-memory, connection to the - repository - """ - try: - from cubicweb.server.repository import Repository - except ImportError: - # code not available, no way - return False - return isinstance(self._repo, Repository) - - @property # could be a cached property but we want to prevent assigment to - # catch potential programming error. - def anonymous_connection(self): - login = self._repo.user_info(self.sessionid)[1] - anon_login = self.vreg.config.get('anonymous-user') - return login == anon_login - - def __repr__(self): - if self.anonymous_connection: - return '' % self.sessionid - return '' % self.sessionid - - def __enter__(self): - return self.cursor() - - def __exit__(self, exc_type, exc_val, exc_tb): - if exc_type is None: - self.commit() - else: - self.rollback() - return False #propagate the exception - - def __del__(self): - """close the remote connection if necessary""" - if self._closed is None and self._close_on_del: - try: - self.close() - except Exception: - pass - - # server-side service call ################################################# - - @check_not_closed - def call_service(self, regid, **kwargs): - return self._repo.call_service(self.sessionid, regid, **kwargs) - - # connection initialization methods ######################################## - - def load_appobjects(self, cubes=_MARKER, subpath=None, expand=True): - config = self.vreg.config - if cubes is _MARKER: - cubes = self._repo.get_cubes() - elif cubes is None: - cubes = () - else: - if not isinstance(cubes, (list, tuple)): - cubes = (cubes,) - if expand: - cubes = config.expand_cubes(cubes) - if subpath is None: - subpath = esubpath = ('entities', 'views') - else: - esubpath = subpath - if 'views' in subpath: - esubpath = list(subpath) - esubpath.remove('views') - esubpath.append(join('web', 'views')) - # first load available configs, necessary for proper persistent - # properties initialization - config.load_available_configs() - # then init cubes - config.init_cubes(cubes) - # then load appobjects into the registry - vpath = config.build_appobjects_path(reversed(config.cubes_path()), - evobjpath=esubpath, - tvobjpath=subpath) - self.vreg.register_objects(vpath) - - def use_web_compatible_requests(self, baseurl, sitetitle=None): - """monkey patch DBAPIRequest to fake a cw.web.request, so you should - able to call html views using rset from a simple dbapi connection. - - You should call `load_appobjects` at some point to register those views. - """ - DBAPIRequest.property_value = _fake_property_value - DBAPIRequest.next_tabindex = count().next - DBAPIRequest.relative_path = fake - DBAPIRequest.url = fake - DBAPIRequest.get_page_data = fake - DBAPIRequest.set_page_data = fake - # XXX could ask the repo for it's base-url configuration - self.vreg.config.set_option('base-url', baseurl) - self.vreg.config.uiprops = {} - self.vreg.config.datadir_url = baseurl + '/data' - # XXX why is this needed? if really needed, could be fetched by a query - if sitetitle is not None: - self.vreg['propertydefs']['ui.site-title'] = {'default': sitetitle} - self._web_request = True - - def request(self): - if self._web_request: - from cubicweb.web.request import DBAPICubicWebRequestBase - req = DBAPICubicWebRequestBase(self.vreg, False) - req.get_header = lambda x, default=None: default - req.set_session = lambda session: DBAPIRequest.set_session( - req, session) - req.relative_path = lambda includeparams=True: '' - else: - req = DBAPIRequest(self.vreg) - req.set_session(DBAPISession(self)) - return req - - @check_not_closed - def user(self, req=None, props=None): - """return the User object associated to this connection""" - # cnx validity is checked by the call to .user_info - eid, login, groups, properties = self._repo.user_info(self.sessionid, - props) - if req is None: - req = self.request() - rset = req.eid_rset(eid, 'CWUser') - if self.vreg is not None and 'etypes' in self.vreg: - user = self.vreg['etypes'].etype_class('CWUser')( - req, rset, row=0, groups=groups, properties=properties) - else: - from cubicweb.entity import Entity - user = Entity(req, rset, row=0) - user.cw_attr_cache['login'] = login # cache login - return user - - @check_not_closed - def check(self): - """raise `BadConnectionId` if the connection is no more valid, else - return its latest activity timestamp. - """ - return self._repo.check_session(self.sessionid) - - def _txid(self, cursor=None): # pylint: disable=E0202 - # XXX could now handle various isolation level! - # return a dict as bw compat trick - return {'txid': currentThread().getName()} - - # session data methods ##################################################### - - @check_not_closed - def get_shared_data(self, key, default=None, pop=False, txdata=False): - """return value associated to key in the session's data dictionary or - session's transaction's data if `txdata` is true. - - If pop is True, value will be removed from the dictionary. - - If key isn't defined in the dictionary, value specified by the - `default` argument will be returned. - """ - return self._repo.get_shared_data(self.sessionid, key, default, pop, txdata) - - @check_not_closed - def set_shared_data(self, key, value, txdata=False): - """set value associated to `key` in shared data - - if `txdata` is true, the value will be added to the repository - session's query data which are cleared on commit/rollback of the current - transaction. - """ - return self._repo.set_shared_data(self.sessionid, key, value, txdata) - - # meta-data accessors ###################################################### - - @check_not_closed - def source_defs(self): - """Return the definition of sources used by the repository.""" - return self._repo.source_defs() - - @check_not_closed - def get_schema(self): - """Return the schema currently used by the repository.""" - return self._repo.get_schema() - - @check_not_closed - def get_option_value(self, option, foreid=None): - """Return the value for `option` in the configuration. - - `foreid` argument is deprecated and now useless (as of 3.19). - """ - if foreid is not None: - warn('[3.19] foreid argument is deprecated', DeprecationWarning, - stacklevel=2) - return self._repo.get_option_value(option) - - - @check_not_closed - def entity_metas(self, eid): - """return a tuple (type, sourceuri, extid) for the entity with id """ - try: - return self._repo.entity_metas(self.sessionid, eid, **self._txid()) - except AttributeError: - # talking to pre 3.19 repository - metas = self._repo.describe(self.sessionid, eid, **self._txid()) - if len(metas) == 3: # even older backward compat - metas = list(metas) - metas.append(metas[1]) - return dict(zip(('type', 'source', 'extid', 'asource'), metas)) - - - @deprecated('[3.19] use .entity_metas(eid) instead') - @check_not_closed - def describe(self, eid, asdict=False): - try: - metas = self._repo.entity_metas(self.sessionid, eid, **self._txid()) - except AttributeError: - metas = self._repo.describe(self.sessionid, eid, **self._txid()) - # talking to pre 3.19 repository - if len(metas) == 3: # even older backward compat - metas = list(metas) - metas.append(metas[1]) - if asdict: - return dict(zip(('type', 'source', 'extid', 'asource'), metas)) - return metas[:-1] - if asdict: - metas['asource'] = meta['source'] # XXX pre 3.19 client compat - return metas - return metas['type'], metas['source'], metas['extid'] - - - # db-api like interface #################################################### - - @check_not_closed - def commit(self): - """Commit pending transaction for this connection to the repository. - - may raises `Unauthorized` or `ValidationError` if we attempted to do - something we're not allowed to for security or integrity reason. - - If the transaction is undoable, a transaction id will be returned. - """ - return self._repo.commit(self.sessionid, **self._txid()) - - @check_not_closed - def rollback(self): - """This method is optional since not all databases provide transaction - support. - - In case a database does provide transactions this method causes the the - database to roll back to the start of any pending transaction. Closing - a connection without committing the changes first will cause an implicit - rollback to be performed. - """ - self._repo.rollback(self.sessionid, **self._txid()) - - @check_not_closed - def cursor(self, req=None): - """Return a new Cursor Object using the connection. - - On pyro connection, you should get cursor after calling if - load_appobjects method if desired (which you should call if you intend - to use ORM abilities). - """ - if req is None: - req = self.request() - return self.cursor_class(self, self._repo, req=req) - - @check_not_closed - def close(self): - """Close the connection now (rather than whenever __del__ is called). - - The connection will be unusable from this point forward; an Error (or - subclass) exception will be raised if any operation is attempted with - the connection. The same applies to all cursor objects trying to use the - connection. Note that closing a connection without committing the - changes first will cause an implicit rollback to be performed. - """ - self._repo.close(self.sessionid, **self._txid()) - del self._repo # necessary for proper garbage collection - self._closed = 1 - - # undo support ############################################################ - - @check_not_closed - def undoable_transactions(self, ueid=None, req=None, **actionfilters): - """Return a list of undoable transaction objects by the connection's - user, ordered by descendant transaction time. - - Managers may filter according to user (eid) who has done the transaction - using the `ueid` argument. Others will only see their own transactions. - - Additional filtering capabilities is provided by using the following - named arguments: - - * `etype` to get only transactions creating/updating/deleting entities - of the given type - - * `eid` to get only transactions applied to entity of the given eid - - * `action` to get only transactions doing the given action (action in - 'C', 'U', 'D', 'A', 'R'). If `etype`, action can only be 'C', 'U' or - 'D'. - - * `public`: when additional filtering is provided, their are by default - only searched in 'public' actions, unless a `public` argument is given - and set to false. - """ - actionfilters.update(self._txid()) - txinfos = self._repo.undoable_transactions(self.sessionid, ueid, - **actionfilters) - if req is None: - req = self.request() - for txinfo in txinfos: - txinfo.req = req - return txinfos - - @check_not_closed - def transaction_info(self, txuuid, req=None): - """Return transaction object for the given uid. - - raise `NoSuchTransaction` if not found or if session's user is not - allowed (eg not in managers group and the transaction doesn't belong to - him). - """ - txinfo = self._repo.transaction_info(self.sessionid, txuuid, - **self._txid()) - if req is None: - req = self.request() - txinfo.req = req - return txinfo - - @check_not_closed - def transaction_actions(self, txuuid, public=True): - """Return an ordered list of action effectued during that transaction. - - If public is true, return only 'public' actions, eg not ones triggered - under the cover by hooks, else return all actions. - - raise `NoSuchTransaction` if the transaction is not found or if - session's user is not allowed (eg not in managers group and the - transaction doesn't belong to him). - """ - return self._repo.transaction_actions(self.sessionid, txuuid, public, - **self._txid()) - - @check_not_closed - def undo_transaction(self, txuuid): - """Undo the given transaction. Return potential restoration errors. - - raise `NoSuchTransaction` if not found or if session's user is not - allowed (eg not in managers group and the transaction doesn't belong to - him). - """ - return self._repo.undo_transaction(self.sessionid, txuuid, - **self._txid()) - -in_memory_cnx = deprecated('[3.16] use _repo_connect instead)')(_repo_connect) diff -r 2fe19ba68daa -r 2fdf67ef3341 debian/changelog --- a/debian/changelog Wed Dec 09 16:36:17 2015 +0100 +++ b/debian/changelog Wed Dec 09 18:24:09 2015 +0100 @@ -1,3 +1,21 @@ +cubicweb (3.21.2-1) unstable; urgency=medium + + * New upstream release. + + -- Rémi Cardona Fri, 09 Oct 2015 18:00:39 +0200 + +cubicweb (3.21.1-1) unstable; urgency=medium + + * new upstream release + + -- Julien Cristau Tue, 28 Jul 2015 18:05:55 +0200 + +cubicweb (3.21.0-1) unstable; urgency=low + + * New upstream release. + + -- Julien Cristau Fri, 10 Jul 2015 17:04:11 +0200 + cubicweb (3.20.11-1) unstable; urgency=medium * New upstream release. diff -r 2fe19ba68daa -r 2fdf67ef3341 debian/control --- a/debian/control Wed Dec 09 16:36:17 2015 +0100 +++ b/debian/control Wed Dec 09 18:24:09 2015 +0100 @@ -20,7 +20,7 @@ python-lxml, Standards-Version: 3.9.1 Homepage: http://www.cubicweb.org -XS-Python-Version: >= 2.6 +X-Python-Version: >= 2.6 Package: cubicweb Architecture: all @@ -58,10 +58,10 @@ | python-pysqlite2, python-passlib Recommends: - pyro (<< 4.0.0), - cubicweb-documentation (= ${source:Version}) + cubicweb-documentation (= ${source:Version}), Suggests: - python-zmq + python-zmq, + python-cwclientlib (>= 0.4.0), Description: server part of the CubicWeb framework CubicWeb is a semantic web application framework. . @@ -109,7 +109,6 @@ cubicweb-ctl (= ${source:Version}), python-twisted-web Recommends: - pyro (<< 4.0.0), cubicweb-documentation (= ${source:Version}) Description: twisted-based web interface for the CubicWeb framework CubicWeb is a semantic web application framework. @@ -137,6 +136,7 @@ Breaks: cubicweb-inlinedit (<< 1.1.1), cubicweb-bootstrap (<< 0.6.6), + cubicweb-folder (<< 1.10.0), Description: web interface library for the CubicWeb framework CubicWeb is a semantic web application framework. . diff -r 2fe19ba68daa -r 2fdf67ef3341 debian/cubicweb-common.install --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/debian/cubicweb-common.install Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,4 @@ +usr/lib/python2*/*-packages/cubicweb/entities/ +usr/lib/python2*/*-packages/cubicweb/ext/ +usr/share/cubicweb/cubes/ +usr/lib/python2*/*-packages/cubicweb/*.py diff -r 2fe19ba68daa -r 2fdf67ef3341 debian/cubicweb-common.install.in --- a/debian/cubicweb-common.install.in Wed Dec 09 16:36:17 2015 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,4 +0,0 @@ -usr/lib/PY_VERSION/*-packages/cubicweb/entities/ -usr/lib/PY_VERSION/*-packages/cubicweb/ext/ -usr/share/cubicweb/cubes/ -usr/lib/PY_VERSION/*-packages/cubicweb/*.py diff -r 2fe19ba68daa -r 2fdf67ef3341 debian/cubicweb-ctl.cubicweb.init --- a/debian/cubicweb-ctl.cubicweb.init Wed Dec 09 16:36:17 2015 +0100 +++ b/debian/cubicweb-ctl.cubicweb.init Wed Dec 09 18:24:09 2015 +0100 @@ -4,16 +4,14 @@ # Provides: cubicweb # Required-Start: $remote_fs $syslog $local_fs $network # Required-Stop: $remote_fs $syslog $local_fs $network -# Should-Start: postgresql pyro-nsd -# Should-Stop: postgresql pyro-nsd +# Should-Start: postgresql +# Should-Stop: postgresql # Default-Start: 2 3 4 5 # Default-Stop: 0 1 6 # Short-Description: Start cubicweb application at boot time ### END INIT INFO # FIXME Seems to be inadequate here -# FIXME If related to pyro, try instead: -# export PYRO_STORAGE="/tmp" cd /tmp # FIXME Work-around about the following lintian error diff -r 2fe19ba68daa -r 2fdf67ef3341 debian/cubicweb-ctl.install --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/debian/cubicweb-ctl.install Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,3 @@ +usr/bin/cubicweb-ctl usr/bin/ +usr/lib/python2*/*-packages/cubicweb/cwctl.py +../cubicweb-ctl.bash_completion etc/bash_completion.d/cubicweb-ctl diff -r 2fe19ba68daa -r 2fdf67ef3341 debian/cubicweb-ctl.install.in --- a/debian/cubicweb-ctl.install.in Wed Dec 09 16:36:17 2015 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,3 +0,0 @@ -usr/bin/cubicweb-ctl usr/bin/ -usr/lib/PY_VERSION/*-packages/cubicweb/cwctl.py -../cubicweb-ctl.bash_completion etc/bash_completion.d/cubicweb-ctl diff -r 2fe19ba68daa -r 2fdf67ef3341 debian/cubicweb-ctl.postinst --- a/debian/cubicweb-ctl.postinst Wed Dec 09 16:36:17 2015 +0100 +++ b/debian/cubicweb-ctl.postinst Wed Dec 09 18:24:09 2015 +0100 @@ -10,32 +10,6 @@ ;; esac -if [ "$1" = configure ]; then - # XXX bw compat: erudi -> cubicweb migration - if [ -e "/etc/erudi.d/" ]; then - mv /etc/erudi.d/* /etc/cubicweb.d/ && ( - echo 'moved /etc/erudi.d/* to /etc/cubicweb.d/' - sed -i s/ginco/cubicweb/g /etc/*/*.py - sed -i s/erudi/cubicweb/ */*.conf - ) || true # empty dir - fi - if [ -e "/var/log/erudi/" ]; then - mv /var/log/erudi/* /var/log/cubicweb/ && ( - echo 'moved /var/log/erudi/* to /var/log/cubicweb/' - ) || true # empty dir - fi - if [ -e "/var/lib/erudi/backup" ]; then - mv /var/lib/erudi/backup/* /var/lib/cubicweb/backup/ && ( - echo 'moved /var/lib/erudi/backup/* to /var/lib/cubicweb/backup/' - ) || true # empty dir - fi - if [ -e "/var/lib/erudi/instances" ]; then - mv /var/lib/erudi/instances/* /var/lib/cubicweb/instances/ && ( - echo 'moved /var/lib/erudi/instances/* to /var/lib/cubicweb/instances/' - ) || true # empty dir - fi -fi - #DEBHELPER# exit 0 diff -r 2fe19ba68daa -r 2fdf67ef3341 debian/cubicweb-ctl.postrm --- a/debian/cubicweb-ctl.postrm Wed Dec 09 16:36:17 2015 +0100 +++ b/debian/cubicweb-ctl.postrm Wed Dec 09 18:24:09 2015 +0100 @@ -1,8 +1,15 @@ #!/bin/sh -e -if [ "$1" = "purge" ] ; then + +if [ "$1" = "remove" ]; then update-rc.d cubicweb remove >/dev/null fi +if [ "$1" = "purge" ] ; then + rm -rf /etc/cubicweb.d/ + rm -rf /var/log/cubicweb/ + rm -rf /var/lib/cubicweb/ +fi + #DEBHELPER# exit 0 diff -r 2fe19ba68daa -r 2fdf67ef3341 debian/cubicweb-ctl.prerm --- a/debian/cubicweb-ctl.prerm Wed Dec 09 16:36:17 2015 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,13 +0,0 @@ -#! /bin/sh -e - -case "$1" in - purge) - rm -rf /etc/cubicweb.d/ - rm -rf /var/log/cubicweb/ - rm -rf /var/lib/cubicweb/ - ;; -esac - -#DEBHELPER# - -exit 0 diff -r 2fe19ba68daa -r 2fdf67ef3341 debian/cubicweb-dev.install --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/debian/cubicweb-dev.install Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,11 @@ +usr/lib/python2*/*-packages/cubicweb/devtools/ +usr/lib/python2*/*-packages/cubicweb/skeleton/ +usr/lib/python2*/*-packages/cubicweb/test +usr/lib/python2*/*-packages/cubicweb/dataimport/test +usr/lib/python2*/*-packages/cubicweb/entities/test +usr/lib/python2*/*-packages/cubicweb/ext/test +usr/lib/python2*/*-packages/cubicweb/server/test +usr/lib/python2*/*-packages/cubicweb/sobjects/test +usr/lib/python2*/*-packages/cubicweb/hooks/test +usr/lib/python2*/*-packages/cubicweb/web/test +usr/lib/python2*/*-packages/cubicweb/etwist/test diff -r 2fe19ba68daa -r 2fdf67ef3341 debian/cubicweb-dev.install.in --- a/debian/cubicweb-dev.install.in Wed Dec 09 16:36:17 2015 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,10 +0,0 @@ -usr/lib/PY_VERSION/*-packages/cubicweb/devtools/ -usr/lib/PY_VERSION/*-packages/cubicweb/skeleton/ -usr/lib/PY_VERSION/*-packages/cubicweb/test -usr/lib/PY_VERSION/*-packages/cubicweb/entities/test -usr/lib/PY_VERSION/*-packages/cubicweb/ext/test -usr/lib/PY_VERSION/*-packages/cubicweb/server/test -usr/lib/PY_VERSION/*-packages/cubicweb/sobjects/test -usr/lib/PY_VERSION/*-packages/cubicweb/hooks/test -usr/lib/PY_VERSION/*-packages/cubicweb/web/test -usr/lib/PY_VERSION/*-packages/cubicweb/etwist/test diff -r 2fe19ba68daa -r 2fdf67ef3341 debian/cubicweb-doc --- a/debian/cubicweb-doc Wed Dec 09 16:36:17 2015 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,9 +0,0 @@ -Document: cubicweb-doc -Title: CubicWeb documentation -Author: Logilab -Abstract: Some base documentation for CubicWeb users and developpers -Section: Apps/Programming - -Format: HTML -Index: /usr/share/doc/cubicweb-documentation/index.html -Files: /usr/share/doc/cubicweb-documentation/*.html diff -r 2fe19ba68daa -r 2fdf67ef3341 debian/cubicweb-documentation.doc-base --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/debian/cubicweb-documentation.doc-base Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,9 @@ +Document: cubicweb-doc +Title: CubicWeb documentation +Author: Logilab +Abstract: Some base documentation for CubicWeb users and developpers +Section: Apps/Programming + +Format: HTML +Index: /usr/share/doc/cubicweb-documentation/index.html +Files: /usr/share/doc/cubicweb-documentation/*.html diff -r 2fe19ba68daa -r 2fdf67ef3341 debian/cubicweb-documentation.install --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/debian/cubicweb-documentation.install Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,2 @@ +../../doc/book usr/share/doc/cubicweb-documentation +../../doc/_build/html usr/share/doc/cubicweb-documentation diff -r 2fe19ba68daa -r 2fdf67ef3341 debian/cubicweb-documentation.install.in --- a/debian/cubicweb-documentation.install.in Wed Dec 09 16:36:17 2015 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,3 +0,0 @@ -../../doc/book usr/share/doc/cubicweb-documentation -../../doc/html usr/share/doc/cubicweb-documentation -../../debian/cubicweb-doc usr/share/doc-base/cubicweb-doc diff -r 2fe19ba68daa -r 2fdf67ef3341 debian/cubicweb-documentation.postinst --- a/debian/cubicweb-documentation.postinst Wed Dec 09 16:36:17 2015 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,13 +0,0 @@ -#! /bin/sh -e -# - -if [ "$1" = configure ]; then - if which install-docs >/dev/null 2>&1; then - install-docs -i /usr/share/doc-base/cubicweb-doc - fi -fi - - -#DEBHELPER# - -exit 0 diff -r 2fe19ba68daa -r 2fdf67ef3341 debian/cubicweb-documentation.prerm --- a/debian/cubicweb-documentation.prerm Wed Dec 09 16:36:17 2015 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,12 +0,0 @@ -#! /bin/sh -e -# - -if [ "$1" = remove -o "$1" = upgrade ]; then - if which install-docs >/dev/null 2>&1; then - install-docs -r cubicweb-doc - fi -fi - -#DEBHELPER# - -exit 0 diff -r 2fe19ba68daa -r 2fdf67ef3341 debian/cubicweb-server.install --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/debian/cubicweb-server.install Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,6 @@ +usr/lib/python2*/*-packages/cubicweb/dataimport/ +usr/lib/python2*/*-packages/cubicweb/server/ +usr/lib/python2*/*-packages/cubicweb/hooks/ +usr/lib/python2*/*-packages/cubicweb/sobjects/ +usr/lib/python2*/*-packages/cubicweb/schemas/ +usr/share/cubicweb/migration/ diff -r 2fe19ba68daa -r 2fdf67ef3341 debian/cubicweb-server.install.in --- a/debian/cubicweb-server.install.in Wed Dec 09 16:36:17 2015 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,5 +0,0 @@ -usr/lib/PY_VERSION/*-packages/cubicweb/server/ -usr/lib/PY_VERSION/*-packages/cubicweb/hooks/ -usr/lib/PY_VERSION/*-packages/cubicweb/sobjects/ -usr/lib/PY_VERSION/*-packages/cubicweb/schemas/ -usr/share/cubicweb/migration/ diff -r 2fe19ba68daa -r 2fdf67ef3341 debian/cubicweb-twisted.install --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/debian/cubicweb-twisted.install Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,1 @@ +usr/lib/python2*/*-packages/cubicweb/etwist/ diff -r 2fe19ba68daa -r 2fdf67ef3341 debian/cubicweb-twisted.install.in --- a/debian/cubicweb-twisted.install.in Wed Dec 09 16:36:17 2015 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -usr/lib/PY_VERSION/*-packages/cubicweb/etwist/ diff -r 2fe19ba68daa -r 2fdf67ef3341 debian/cubicweb-web.install --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/debian/cubicweb-web.install Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,4 @@ +usr/lib/python2*/*-packages/cubicweb/web +usr/lib/python2*/*-packages/cubicweb/wsgi +usr/share/cubicweb/cubes/shared/data +usr/share/cubicweb/cubes/shared/wdoc diff -r 2fe19ba68daa -r 2fdf67ef3341 debian/cubicweb-web.install.in --- a/debian/cubicweb-web.install.in Wed Dec 09 16:36:17 2015 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,4 +0,0 @@ -usr/lib/PY_VERSION/*-packages/cubicweb/web -usr/lib/PY_VERSION/*-packages/cubicweb/wsgi -usr/share/cubicweb/cubes/shared/data -usr/share/cubicweb/cubes/shared/wdoc diff -r 2fe19ba68daa -r 2fdf67ef3341 debian/pycompat --- a/debian/pycompat Wed Dec 09 16:36:17 2015 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -2 diff -r 2fe19ba68daa -r 2fdf67ef3341 debian/rules --- a/debian/rules Wed Dec 09 16:36:17 2015 +0100 +++ b/debian/rules Wed Dec 09 18:24:09 2015 +0100 @@ -5,8 +5,6 @@ # Uncomment this to turn on verbose mode. #export DH_VERBOSE=1 -PY_VERSION:=$(shell pyversions -d) - build: build-stamp build-stamp: dh_testdir @@ -17,7 +15,7 @@ # documentation build is now made optional since it can break for old # distributions and we don't want to block a new release of Cubicweb # because of documentation issues. - -PYTHONPATH=$${PYTHONPATH:+$${PYTHONPATH}:}$(CURDIR)/debian/pythonpath $(MAKE) -C doc/book/en all + -PYTHONPATH=$${PYTHONPATH:+$${PYTHONPATH}:}$(CURDIR)/debian/pythonpath $(MAKE) -C doc all rm -rf debian/pythonpath touch build-stamp @@ -27,10 +25,10 @@ rm -rf build #rm -rf debian/cubicweb-*/ find . -name "*.pyc" -delete - rm -f $(basename $(wildcard debian/*.in)) + -$(MAKE) -C doc clean dh_clean -install: build $(basename $(wildcard debian/*.in)) +install: build dh_testdir dh_testroot dh_clean @@ -49,30 +47,28 @@ dh_lintian # Remove unittests directory (should be available in cubicweb-dev only) - rm -rf debian/cubicweb-server/usr/lib/${PY_VERSION}/*-packages/cubicweb/server/test - rm -rf debian/cubicweb-server/usr/lib/${PY_VERSION}/*-packages/cubicweb/hooks/test - rm -rf debian/cubicweb-server/usr/lib/${PY_VERSION}/*-packages/cubicweb/sobjects/test - rm -rf debian/cubicweb-web/usr/lib/${PY_VERSION}/*-packages/cubicweb/web/test - rm -rf debian/cubicweb-twisted/usr/lib/${PY_VERSION}/*-packages/cubicweb/etwist/test - rm -rf debian/cubicweb-common/usr/lib/${PY_VERSION}/*-packages/cubicweb/ext/test - rm -rf debian/cubicweb-common/usr/lib/${PY_VERSION}/*-packages/cubicweb/entities/test + rm -rf debian/cubicweb-server/usr/lib/python2*/*-packages/cubicweb/server/test + rm -rf debian/cubicweb-server/usr/lib/python2*/*-packages/cubicweb/hooks/test + rm -rf debian/cubicweb-server/usr/lib/python2*/*-packages/cubicweb/sobjects/test + rm -rf debian/cubicweb-web/usr/lib/python2*/*-packages/cubicweb/web/test + rm -rf debian/cubicweb-twisted/usr/lib/python2*/*-packages/cubicweb/etwist/test + rm -rf debian/cubicweb-common/usr/lib/python2*/*-packages/cubicweb/ext/test + rm -rf debian/cubicweb-common/usr/lib/python2*/*-packages/cubicweb/entities/test -%: %.in - sed "s/PY_VERSION/${PY_VERSION}/g" < $< > $@ - # Build architecture-independent files here. binary-indep: build install dh_testdir dh_testroot -i dh_python2 -i + dh_python2 -i /usr/share/cubicweb dh_installinit -i -n --name cubicweb -u"defaults 99" dh_installlogrotate -i dh_installdocs -i -A README dh_installman -i - dh_installchangelogs -i + dh_installchangelogs -i -Xdoc/changes dh_link -i - dh_compress -i -X.py -X.ini -X.xml -X.js -X.rst -X.txt + dh_compress -i -X.py -X.ini -X.xml -X.js -X.rst -X.txt -Xchangelog.html dh_fixperms -i dh_installdeb -i dh_gencontrol -i diff -r 2fe19ba68daa -r 2fdf67ef3341 devtools/__init__.py --- a/devtools/__init__.py Wed Dec 09 16:36:17 2015 +0100 +++ b/devtools/__init__.py Wed Dec 09 18:24:09 2015 +0100 @@ -26,7 +26,6 @@ import shutil import pickle import glob -import random import subprocess import warnings import tempfile @@ -93,8 +92,6 @@ DEFAULT_PSQL_SOURCES = DEFAULT_SOURCES.copy() DEFAULT_PSQL_SOURCES['system'] = DEFAULT_SOURCES['system'].copy() DEFAULT_PSQL_SOURCES['system']['db-driver'] = 'postgres' -DEFAULT_PSQL_SOURCES['system']['db-host'] = '/tmp' -DEFAULT_PSQL_SOURCES['system']['db-port'] = str(random.randrange(5432, 2**16)) DEFAULT_PSQL_SOURCES['system']['db-user'] = unicode(getpass.getuser()) DEFAULT_PSQL_SOURCES['system']['db-password'] = None @@ -176,8 +173,8 @@ return self._apphome appdatahome = apphome - def load_configuration(self): - super(TestServerConfiguration, self).load_configuration() + def load_configuration(self, **kw): + super(TestServerConfiguration, self).load_configuration(**kw) # no undo support in tests self.global_set_option('undo-enabled', 'n') @@ -237,10 +234,6 @@ def available_languages(self, *args): return self.cw_languages() - def pyro_enabled(self): - # but export PYRO_MULTITHREAD=0 or you get problems with sqlite and - # threads - return True # XXX merge with BaseApptestConfiguration ? class ApptestConfiguration(BaseApptestConfiguration): @@ -251,7 +244,7 @@ skip_db_create_and_restore = False def __init__(self, appid, apphome=None, - log_threshold=logging.CRITICAL, sourcefile=None): + log_threshold=logging.WARNING, sourcefile=None): BaseApptestConfiguration.__init__(self, appid, apphome, log_threshold=log_threshold) self.init_repository = sourcefile is None @@ -303,6 +296,14 @@ # pure consistency check assert self.system_source['db-driver'] == self.DRIVER + # some handlers want to store info here, avoid a warning + from cubicweb.server.sources.native import NativeSQLSource + NativeSQLSource.options += ( + ('global-db-name', + {'type': 'string', 'help': 'for internal use only' + }), + ) + def _ensure_test_backup_db_dir(self): """Return path of directory for database backup. @@ -398,9 +399,9 @@ def _new_repo(self, config): """Factory method to create a new Repository Instance""" - from cubicweb.dbapi import in_memory_repo + from cubicweb.repoapi import _get_inmemory_repo config._cubes = None - repo = in_memory_repo(config) + repo = _get_inmemory_repo(config) config.repository = lambda x=None: repo # extending Repository class repo._has_started = False @@ -499,7 +500,7 @@ repo = self.get_repo(startup=True) cnx = self.get_cnx() with cnx: - pre_setup_func(cnx._cnx, self.config) + pre_setup_func(cnx, self.config) cnx.commit() self.backup_database(test_db_id) @@ -534,6 +535,48 @@ ### postgres test database handling ############################################ +def startpgcluster(pyfile): + """Start a postgresql cluster next to pyfile""" + datadir = join(os.path.dirname(pyfile), 'data', + 'pgdb-%s' % os.path.splitext(os.path.basename(pyfile))[0]) + if not exists(datadir): + try: + subprocess.check_call(['initdb', '-D', datadir, '-E', 'utf-8', '--locale=C']) + + except OSError, err: + if err.errno == errno.ENOENT: + raise OSError('"initdb" could not be found. ' + 'You should add the postgresql bin folder to your PATH ' + '(/usr/lib/postgresql/9.1/bin for example).') + raise + datadir = os.path.abspath(datadir) + pgport = '5432' + env = os.environ.copy() + sockdir = tempfile.mkdtemp(prefix='cwpg') + DEFAULT_PSQL_SOURCES['system']['db-host'] = sockdir + DEFAULT_PSQL_SOURCES['system']['db-port'] = pgport + options = '-h "" -k %s -p %s' % (sockdir, pgport) + options += ' -c fsync=off -c full_page_writes=off' + options += ' -c synchronous_commit=off' + try: + subprocess.check_call(['pg_ctl', 'start', '-w', '-D', datadir, + '-o', options], + env=env) + except OSError, err: + if err.errno == errno.ENOENT: + raise OSError('"pg_ctl" could not be found. ' + 'You should add the postgresql bin folder to your PATH ' + '(/usr/lib/postgresql/9.1/bin for example).') + raise + + +def stoppgcluster(pyfile): + """Kill the postgresql cluster running next to pyfile""" + datadir = join(os.path.dirname(pyfile), 'data', + 'pgdb-%s' % os.path.splitext(os.path.basename(pyfile))[0]) + subprocess.call(['pg_ctl', 'stop', '-D', datadir, '-m', 'fast']) + + class PostgresTestDataBaseHandler(TestDataBaseHandler): DRIVER = 'postgres' @@ -543,45 +586,11 @@ __CTL = set() - @classmethod - def killall(cls): - for datadir in cls.__CTL: - subprocess.call(['pg_ctl', 'stop', '-D', datadir, '-m', 'fast']) - def __init__(self, *args, **kwargs): super(PostgresTestDataBaseHandler, self).__init__(*args, **kwargs) - datadir = realpath(join(self.config.apphome, 'pgdb')) - if datadir in self.__CTL: - return - if not exists(datadir): - try: - subprocess.check_call(['initdb', '-D', datadir, '-E', 'utf-8', '--locale=C']) - - except OSError, err: - if err.errno == errno.ENOENT: - raise OSError('"initdb" could not be found. ' - 'You should add the postgresql bin folder to your PATH ' - '(/usr/lib/postgresql/9.1/bin for example).') - raise - port = self.system_source['db-port'] - directory = self.system_source['db-host'] - env = os.environ.copy() - env['PGPORT'] = str(port) - env['PGHOST'] = str(directory) - options = '-h "" -k %s -p %s' % (directory, port) - options += ' -c fsync=off -c full_page_writes=off' - options += ' -c synchronous_commit=off' - try: - subprocess.check_call(['pg_ctl', 'start', '-w', '-D', datadir, - '-o', options], - env=env) - except OSError, err: - if err.errno == errno.ENOENT: - raise OSError('"pg_ctl" could not be found. ' - 'You should add the postgresql bin folder to your PATH ' - '(/usr/lib/postgresql/9.1/bin for example).') - raise - self.__CTL.add(datadir) + if 'global-db-name' not in self.system_source: + self.system_source['global-db-name'] = self.system_source['db-name'] + self.system_source['db-name'] = self.system_source['db-name'] + str(os.getpid()) @property @cached @@ -590,6 +599,10 @@ return get_db_helper('postgres') @property + def dbname(self): + return self.system_source['global-db-name'] + + @property def dbcnx(self): try: return self._cnx @@ -615,13 +628,18 @@ return backup_name return None + def has_cache(self, db_id): + backup_name = self._backup_name(db_id) + return (super(PostgresTestDataBaseHandler, self).has_cache(db_id) + and backup_name in self.helper.list_databases(self.cursor)) + def init_test_database(self): """initialize a fresh postgresql database used for testing purpose""" from cubicweb.server import init_repository from cubicweb.server.serverctl import system_source_cnx, createdb # connect on the dbms system base to create our base try: - self._drop(self.dbname) + self._drop(self.system_source['db-name']) createdb(self.helper, self.system_source, self.dbcnx, self.cursor) self.dbcnx.commit() cnx = system_source_cnx(self.system_source, special_privs='LANGUAGE C', @@ -699,7 +717,7 @@ """Actual restore of the current database. Use the value tostored in db_cache as input """ - self._drop(self.dbname) + self._drop(self.system_source['db-name']) createdb(self.helper, self.system_source, self.dbcnx, self.cursor, template=backup_coordinates) self.dbcnx.commit() @@ -771,7 +789,7 @@ dbfile = self.absolute_dbfile() backup_file = self.absolute_backup_file(db_id, 'sqlite') shutil.copy(dbfile, backup_file) - # Usefull to debug WHO write a database + # Useful to debug WHO writes a database # backup_stack = self.absolute_backup_file(db_id, '.stack') #with open(backup_stack, 'w') as backup_stack_file: # import traceback @@ -800,7 +818,6 @@ import atexit atexit.register(SQLiteTestDataBaseHandler._cleanup_all_tmpdb) -atexit.register(PostgresTestDataBaseHandler.killall) def install_sqlite_patch(querier): diff -r 2fe19ba68daa -r 2fdf67ef3341 devtools/data/qunit.css --- a/devtools/data/qunit.css Wed Dec 09 16:36:17 2015 +0100 +++ b/devtools/data/qunit.css Wed Dec 09 18:24:09 2015 +0100 @@ -1,119 +1,291 @@ +/*! + * QUnit 1.18.0 + * http://qunitjs.com/ + * + * Copyright jQuery Foundation and other contributors + * Released under the MIT license + * http://jquery.org/license + * + * Date: 2015-04-03T10:23Z + */ -ol#qunit-tests { - font-family:"Helvetica Neue Light", "HelveticaNeue-Light", "Helvetica Neue", Calibri, Helvetica, Arial; - margin:0; - padding:0; - list-style-position:inside; +/** Font Family and Sizes */ + +#qunit-tests, #qunit-header, #qunit-banner, #qunit-testrunner-toolbar, #qunit-userAgent, #qunit-testresult { + font-family: "Helvetica Neue Light", "HelveticaNeue-Light", "Helvetica Neue", Calibri, Helvetica, Arial, sans-serif; +} + +#qunit-testrunner-toolbar, #qunit-userAgent, #qunit-testresult, #qunit-tests li { font-size: small; } +#qunit-tests { font-size: smaller; } + + +/** Resets */ + +#qunit-tests, #qunit-header, #qunit-banner, #qunit-userAgent, #qunit-testresult, #qunit-modulefilter { + margin: 0; + padding: 0; +} + + +/** Header */ + +#qunit-header { + padding: 0.5em 0 0.5em 1em; - font-size: smaller; + color: #8699A4; + background-color: #0D3349; + + font-size: 1.5em; + line-height: 1em; + font-weight: 400; + + border-radius: 5px 5px 0 0; +} + +#qunit-header a { + text-decoration: none; + color: #C2CCD1; } -ol#qunit-tests li{ - padding:0.4em 0.5em 0.4em 2.5em; - border-bottom:1px solid #fff; - font-size:small; - list-style-position:inside; + +#qunit-header a:hover, +#qunit-header a:focus { + color: #FFF; +} + +#qunit-testrunner-toolbar label { + display: inline-block; + padding: 0 0.5em 0 0.1em; +} + +#qunit-banner { + height: 5px; +} + +#qunit-testrunner-toolbar { + padding: 0.5em 1em 0.5em 1em; + color: #5E740B; + background-color: #EEE; + overflow: hidden; } -ol#qunit-tests li ol{ - box-shadow: inset 0px 2px 13px #999; - -moz-box-shadow: inset 0px 2px 13px #999; - -webkit-box-shadow: inset 0px 2px 13px #999; - margin-top:0.5em; - margin-left:0; - padding:0.5em; - background-color:#fff; - border-radius:15px; - -moz-border-radius: 15px; - -webkit-border-radius: 15px; + +#qunit-userAgent { + padding: 0.5em 1em 0.5em 1em; + background-color: #2B81AF; + color: #FFF; + text-shadow: rgba(0, 0, 0, 0.5) 2px 2px 1px; +} + +#qunit-modulefilter-container { + float: right; + padding: 0.2em; +} + +.qunit-url-config { + display: inline-block; + padding: 0.1em; +} + +.qunit-filter { + display: block; + float: right; + margin-left: 1em; +} + +/** Tests: Pass/Fail */ + +#qunit-tests { + list-style-position: inside; +} + +#qunit-tests li { + padding: 0.4em 1em 0.4em 1em; + border-bottom: 1px solid #FFF; + list-style-position: inside; } -ol#qunit-tests li li{ - border-bottom:none; - margin:0.5em; - background-color:#fff; - list-style-position: inside; - padding:0.4em 0.5em 0.4em 0.5em; + +#qunit-tests > li { + display: none; +} + +#qunit-tests li.running, +#qunit-tests li.pass, +#qunit-tests li.fail, +#qunit-tests li.skipped { + display: list-item; +} + +#qunit-tests.hidepass li.running, +#qunit-tests.hidepass li.pass { + visibility: hidden; + position: absolute; + width: 0px; + height: 0px; + padding: 0; + border: 0; + margin: 0; +} + +#qunit-tests li strong { + cursor: pointer; +} + +#qunit-tests li.skipped strong { + cursor: default; +} + +#qunit-tests li a { + padding: 0.5em; + color: #C2CCD1; + text-decoration: none; } -ol#qunit-tests li li.pass{ - border-left:26px solid #C6E746; - background-color:#fff; - color:#5E740B; - } -ol#qunit-tests li li.fail{ - border-left:26px solid #EE5757; - background-color:#fff; - color:#710909; +#qunit-tests li p a { + padding: 0.25em; + color: #6B6464; +} +#qunit-tests li a:hover, +#qunit-tests li a:focus { + color: #000; +} + +#qunit-tests li .runtime { + float: right; + font-size: smaller; } -ol#qunit-tests li.pass{ - background-color:#D2E0E6; - color:#528CE0; + +.qunit-assert-list { + margin-top: 0.5em; + padding: 0.5em; + + background-color: #FFF; + + border-radius: 5px; } -ol#qunit-tests li.fail{ - background-color:#EE5757; - color:#000; + +.qunit-collapsed { + display: none; +} + +#qunit-tests table { + border-collapse: collapse; + margin-top: 0.2em; } -ol#qunit-tests li strong { - cursor:pointer; + +#qunit-tests th { + text-align: right; + vertical-align: top; + padding: 0 0.5em 0 0; +} + +#qunit-tests td { + vertical-align: top; +} + +#qunit-tests pre { + margin: 0; + white-space: pre-wrap; + word-wrap: break-word; } -h1#qunit-header{ - background-color:#0d3349; - margin:0; - padding:0.5em 0 0.5em 1em; - color:#fff; - font-family:"Helvetica Neue Light", "HelveticaNeue-Light", "Helvetica Neue", Calibri, Helvetica, Arial; - border-top-right-radius:15px; - border-top-left-radius:15px; - -moz-border-radius-topright:15px; - -moz-border-radius-topleft:15px; - -webkit-border-top-right-radius:15px; - -webkit-border-top-left-radius:15px; - text-shadow: rgba(0, 0, 0, 0.5) 4px 4px 1px; + +#qunit-tests del { + background-color: #E0F2BE; + color: #374E0C; + text-decoration: none; +} + +#qunit-tests ins { + background-color: #FFCACA; + color: #500; + text-decoration: none; } -h2#qunit-banner{ - font-family:"Helvetica Neue Light", "HelveticaNeue-Light", "Helvetica Neue", Calibri, Helvetica, Arial; - height:5px; - margin:0; - padding:0; + +/*** Test Counts */ + +#qunit-tests b.counts { color: #000; } +#qunit-tests b.passed { color: #5E740B; } +#qunit-tests b.failed { color: #710909; } + +#qunit-tests li li { + padding: 5px; + background-color: #FFF; + border-bottom: none; + list-style-position: inside; } -h2#qunit-banner.qunit-pass{ - background-color:#C6E746; -} -h2#qunit-banner.qunit-fail, #qunit-testrunner-toolbar { - background-color:#EE5757; + +/*** Passing Styles */ + +#qunit-tests li li.pass { + color: #3C510C; + background-color: #FFF; + border-left: 10px solid #C6E746; } -#qunit-testrunner-toolbar { - font-family:"Helvetica Neue Light", "HelveticaNeue-Light", "Helvetica Neue", Calibri, Helvetica, Arial; - padding:0; - /*width:80%;*/ - padding:0em 0 0.5em 2em; - font-size: small; + +#qunit-tests .pass { color: #528CE0; background-color: #D2E0E6; } +#qunit-tests .pass .test-name { color: #366097; } + +#qunit-tests .pass .test-actual, +#qunit-tests .pass .test-expected { color: #999; } + +#qunit-banner.qunit-pass { background-color: #C6E746; } + +/*** Failing Styles */ + +#qunit-tests li li.fail { + color: #710909; + background-color: #FFF; + border-left: 10px solid #EE5757; + white-space: pre; +} + +#qunit-tests > li:last-child { + border-radius: 0 0 5px 5px; } -h2#qunit-userAgent { - font-family:"Helvetica Neue Light", "HelveticaNeue-Light", "Helvetica Neue", Calibri, Helvetica, Arial; - background-color:#2b81af; - margin:0; - padding:0; - color:#fff; - font-size: small; - padding:0.5em 0 0.5em 2.5em; - text-shadow: rgba(0, 0, 0, 0.5) 2px 2px 1px; + +#qunit-tests .fail { color: #000; background-color: #EE5757; } +#qunit-tests .fail .test-name, +#qunit-tests .fail .module-name { color: #000; } + +#qunit-tests .fail .test-actual { color: #EE5757; } +#qunit-tests .fail .test-expected { color: #008000; } + +#qunit-banner.qunit-fail { background-color: #EE5757; } + +/*** Skipped tests */ + +#qunit-tests .skipped { + background-color: #EBECE9; +} + +#qunit-tests .qunit-skipped-label { + background-color: #F4FF77; + display: inline-block; + font-style: normal; + color: #366097; + line-height: 1.8em; + padding: 0 0.5em; + margin: -0.4em 0.4em -0.4em 0; } -p#qunit-testresult{ - font-family:"Helvetica Neue Light", "HelveticaNeue-Light", "Helvetica Neue", Calibri, Helvetica, Arial; - margin:0; - font-size: small; - color:#2b81af; - border-bottom-right-radius:15px; - border-bottom-left-radius:15px; - -moz-border-radius-bottomright:15px; - -moz-border-radius-bottomleft:15px; - -webkit-border-bottom-right-radius:15px; - -webkit-border-bottom-left-radius:15px; - background-color:#D2E0E6; - padding:0.5em 0.5em 0.5em 2.5em; + +/** Result */ + +#qunit-testresult { + padding: 0.5em 1em 0.5em 1em; + + color: #2B81AF; + background-color: #D2E0E6; + + border-bottom: 1px solid #FFF; } -strong b.fail{ - color:#710909; - } -strong b.pass{ - color:#5E740B; - } +#qunit-testresult .module-name { + font-weight: 700; +} + +/** Fixture */ + +#qunit-fixture { + position: absolute; + top: -10000px; + left: -10000px; + width: 1000px; + height: 1000px; +} diff -r 2fe19ba68daa -r 2fdf67ef3341 devtools/data/qunit.js --- a/devtools/data/qunit.js Wed Dec 09 16:36:17 2015 +0100 +++ b/devtools/data/qunit.js Wed Dec 09 18:24:09 2015 +0100 @@ -1,643 +1,713 @@ -/* - * QUnit - A JavaScript Unit Testing Framework - * - * http://docs.jquery.com/QUnit +/*! + * QUnit 1.18.0 + * http://qunitjs.com/ * - * Copyright (c) 2009 John Resig, Jörn Zaefferer - * Dual licensed under the MIT (MIT-LICENSE.txt) - * and GPL (GPL-LICENSE.txt) licenses. + * Copyright jQuery Foundation and other contributors + * Released under the MIT license + * http://jquery.org/license + * + * Date: 2015-04-03T10:23Z */ -(function(window) { - -var QUnit = { - - // Initialize the configuration options - init: function() { - config = { - stats: { all: 0, bad: 0 }, - moduleStats: { all: 0, bad: 0 }, - started: +new Date, - updateRate: 1000, - blocking: false, - autorun: false, - assertions: [], - filters: [], - queue: [] - }; - - var tests = id("qunit-tests"), - banner = id("qunit-banner"), - result = id("qunit-testresult"); - - if ( tests ) { - tests.innerHTML = ""; - } - - if ( banner ) { - banner.className = ""; - } - - if ( result ) { - result.parentNode.removeChild( result ); +(function( window ) { + +var QUnit, + config, + onErrorFnPrev, + loggingCallbacks = {}, + fileName = ( sourceFromStacktrace( 0 ) || "" ).replace( /(:\d+)+\)?/, "" ).replace( /.+\//, "" ), + toString = Object.prototype.toString, + hasOwn = Object.prototype.hasOwnProperty, + // Keep a local reference to Date (GH-283) + Date = window.Date, + now = Date.now || function() { + return new Date().getTime(); + }, + globalStartCalled = false, + runStarted = false, + setTimeout = window.setTimeout, + clearTimeout = window.clearTimeout, + defined = { + document: window.document !== undefined, + setTimeout: window.setTimeout !== undefined, + sessionStorage: (function() { + var x = "qunit-test-string"; + try { + sessionStorage.setItem( x, x ); + sessionStorage.removeItem( x ); + return true; + } catch ( e ) { + return false; + } + }()) + }, + /** + * Provides a normalized error string, correcting an issue + * with IE 7 (and prior) where Error.prototype.toString is + * not properly implemented + * + * Based on http://es5.github.com/#x15.11.4.4 + * + * @param {String|Error} error + * @return {String} error message + */ + errorString = function( error ) { + var name, message, + errorString = error.toString(); + if ( errorString.substring( 0, 7 ) === "[object" ) { + name = error.name ? error.name.toString() : "Error"; + message = error.message ? error.message.toString() : ""; + if ( name && message ) { + return name + ": " + message; + } else if ( name ) { + return name; + } else if ( message ) { + return message; + } else { + return "Error"; + } + } else { + return errorString; } }, - - // call on start of module test to prepend name to all tests - module: function(name, testEnvironment) { - config.currentModule = name; - - synchronize(function() { - if ( config.currentModule ) { - QUnit.moduleDone( config.currentModule, config.moduleStats.bad, config.moduleStats.all ); + /** + * Makes a clone of an object using only Array or Object as base, + * and copies over the own enumerable properties. + * + * @param {Object} obj + * @return {Object} New object with only the own properties (recursively). + */ + objectValues = function( obj ) { + var key, val, + vals = QUnit.is( "array", obj ) ? [] : {}; + for ( key in obj ) { + if ( hasOwn.call( obj, key ) ) { + val = obj[ key ]; + vals[ key ] = val === Object( val ) ? objectValues( val ) : val; } - - config.currentModule = name; - config.moduleTestEnvironment = testEnvironment; - config.moduleStats = { all: 0, bad: 0 }; - - QUnit.moduleStart( name, testEnvironment ); - }); + } + return vals; + }; + +QUnit = {}; + +/** + * Config object: Maintain internal state + * Later exposed as QUnit.config + * `config` initialized at top of scope + */ +config = { + // The queue of tests to run + queue: [], + + // block until document ready + blocking: true, + + // by default, run previously failed tests first + // very useful in combination with "Hide passed tests" checked + reorder: true, + + // by default, modify document.title when suite is done + altertitle: true, + + // by default, scroll to top of the page when suite is done + scrolltop: true, + + // when enabled, all tests must call expect() + requireExpects: false, + + // depth up-to which object will be dumped + maxDepth: 5, + + // add checkboxes that are persisted in the query-string + // when enabled, the id is set to `true` as a `QUnit.config` property + urlConfig: [ + { + id: "hidepassed", + label: "Hide passed tests", + tooltip: "Only show tests and assertions that fail. Stored as query-strings." + }, + { + id: "noglobals", + label: "Check for Globals", + tooltip: "Enabling this will test if any test introduces new properties on the " + + "`window` object. Stored as query-strings." + }, + { + id: "notrycatch", + label: "No try-catch", + tooltip: "Enabling this will run tests outside of a try-catch block. Makes debugging " + + "exceptions in IE reasonable. Stored as query-strings." + } + ], + + // Set of all modules. + modules: [], + + // The first unnamed module + currentModule: { + name: "", + tests: [] }, - asyncTest: function(testName, expected, callback) { + callbacks: {} +}; + +// Push a loose unnamed module to the modules collection +config.modules.push( config.currentModule ); + +// Initialize more QUnit.config and QUnit.urlParams +(function() { + var i, current, + location = window.location || { search: "", protocol: "file:" }, + params = location.search.slice( 1 ).split( "&" ), + length = params.length, + urlParams = {}; + + if ( params[ 0 ] ) { + for ( i = 0; i < length; i++ ) { + current = params[ i ].split( "=" ); + current[ 0 ] = decodeURIComponent( current[ 0 ] ); + + // allow just a key to turn on a flag, e.g., test.html?noglobals + current[ 1 ] = current[ 1 ] ? decodeURIComponent( current[ 1 ] ) : true; + if ( urlParams[ current[ 0 ] ] ) { + urlParams[ current[ 0 ] ] = [].concat( urlParams[ current[ 0 ] ], current[ 1 ] ); + } else { + urlParams[ current[ 0 ] ] = current[ 1 ]; + } + } + } + + if ( urlParams.filter === true ) { + delete urlParams.filter; + } + + QUnit.urlParams = urlParams; + + // String search anywhere in moduleName+testName + config.filter = urlParams.filter; + + if ( urlParams.maxDepth ) { + config.maxDepth = parseInt( urlParams.maxDepth, 10 ) === -1 ? + Number.POSITIVE_INFINITY : + urlParams.maxDepth; + } + + config.testId = []; + if ( urlParams.testId ) { + + // Ensure that urlParams.testId is an array + urlParams.testId = decodeURIComponent( urlParams.testId ).split( "," ); + for ( i = 0; i < urlParams.testId.length; i++ ) { + config.testId.push( urlParams.testId[ i ] ); + } + } + + // Figure out if we're running the tests from a server or not + QUnit.isLocal = location.protocol === "file:"; + + // Expose the current QUnit version + QUnit.version = "1.18.0"; +}()); + +// Root QUnit object. +// `QUnit` initialized at top of scope +extend( QUnit, { + + // call on start of module test to prepend name to all tests + module: function( name, testEnvironment ) { + var currentModule = { + name: name, + testEnvironment: testEnvironment, + tests: [] + }; + + // DEPRECATED: handles setup/teardown functions, + // beforeEach and afterEach should be used instead + if ( testEnvironment && testEnvironment.setup ) { + testEnvironment.beforeEach = testEnvironment.setup; + delete testEnvironment.setup; + } + if ( testEnvironment && testEnvironment.teardown ) { + testEnvironment.afterEach = testEnvironment.teardown; + delete testEnvironment.teardown; + } + + config.modules.push( currentModule ); + config.currentModule = currentModule; + }, + + // DEPRECATED: QUnit.asyncTest() will be removed in QUnit 2.0. + asyncTest: function( testName, expected, callback ) { if ( arguments.length === 2 ) { callback = expected; - expected = 0; + expected = null; } - QUnit.test(testName, expected, callback, true); + QUnit.test( testName, expected, callback, true ); }, - - test: function(testName, expected, callback, async) { - var name = testName, testEnvironment, testEnvironmentArg; + + test: function( testName, expected, callback, async ) { + var test; if ( arguments.length === 2 ) { callback = expected; expected = null; } - // is 2nd argument a testEnvironment? - if ( expected && typeof expected === 'object') { - testEnvironmentArg = expected; - expected = null; - } - - if ( config.currentModule ) { - name = config.currentModule + " module: " + name; - } - - if ( !validTest(name) ) { - return; - } - - synchronize(function() { - QUnit.testStart( testName ); - - testEnvironment = extend({ - setup: function() {}, - teardown: function() {} - }, config.moduleTestEnvironment); - if (testEnvironmentArg) { - extend(testEnvironment,testEnvironmentArg); - } - - // allow utility functions to access the current test environment - QUnit.current_testEnvironment = testEnvironment; - - config.assertions = []; - config.expected = expected; - - try { - if ( !config.pollution ) { - saveGlobal(); - } - - testEnvironment.setup.call(testEnvironment); - } catch(e) { - QUnit.ok( false, "Setup failed on " + name + ": " + e.message ); - } - - if ( async ) { - QUnit.stop(); - } - - try { - callback.call(testEnvironment); - } catch(e) { - fail("Test " + name + " died, exception and test follows", e, callback); - QUnit.ok( false, "Died on test #" + (config.assertions.length + 1) + ": " + e.message ); - // else next test will carry the responsibility - saveGlobal(); - - // Restart the tests if they're blocking - if ( config.blocking ) { - start(); - } - } + + test = new Test({ + testName: testName, + expected: expected, + async: async, + callback: callback + }); + + test.queue(); + }, + + skip: function( testName ) { + var test = new Test({ + testName: testName, + skip: true }); - synchronize(function() { - try { - checkPollution(); - testEnvironment.teardown.call(testEnvironment); - } catch(e) { - QUnit.ok( false, "Teardown failed on " + name + ": " + e.message ); - } - - try { - QUnit.reset(); - } catch(e) { - fail("reset() failed, following Test " + name + ", exception and reset fn follows", e, reset); - } - - if ( config.expected && config.expected != config.assertions.length ) { - QUnit.ok( false, "Expected " + config.expected + " assertions, but " + config.assertions.length + " were run" ); + test.queue(); + }, + + // DEPRECATED: The functionality of QUnit.start() will be altered in QUnit 2.0. + // In QUnit 2.0, invoking it will ONLY affect the `QUnit.config.autostart` blocking behavior. + start: function( count ) { + var globalStartAlreadyCalled = globalStartCalled; + + if ( !config.current ) { + globalStartCalled = true; + + if ( runStarted ) { + throw new Error( "Called start() outside of a test context while already started" ); + } else if ( globalStartAlreadyCalled || count > 1 ) { + throw new Error( "Called start() outside of a test context too many times" ); + } else if ( config.autostart ) { + throw new Error( "Called start() outside of a test context when " + + "QUnit.config.autostart was true" ); + } else if ( !config.pageLoaded ) { + + // The page isn't completely loaded yet, so bail out and let `QUnit.load` handle it + config.autostart = true; + return; } - - var good = 0, bad = 0, - tests = id("qunit-tests"); - - config.stats.all += config.assertions.length; - config.moduleStats.all += config.assertions.length; - - if ( tests ) { - var ol = document.createElement("ol"); - ol.style.display = "none"; - - for ( var i = 0; i < config.assertions.length; i++ ) { - var assertion = config.assertions[i]; - - var li = document.createElement("li"); - li.className = assertion.result ? "pass" : "fail"; - li.appendChild(document.createTextNode(assertion.message || "(no message)")); - ol.appendChild( li ); - - if ( assertion.result ) { - good++; - } else { - bad++; - config.stats.bad++; - config.moduleStats.bad++; - } - } - - var b = document.createElement("strong"); - b.innerHTML = name + " (" + bad + ", " + good + ", " + config.assertions.length + ")"; - - addEvent(b, "click", function() { - var next = b.nextSibling, display = next.style.display; - next.style.display = display === "none" ? "block" : "none"; - }); - - addEvent(b, "dblclick", function(e) { - var target = e && e.target ? e.target : window.event.srcElement; - if ( target.nodeName.toLowerCase() === "strong" ) { - var text = "", node = target.firstChild; - - while ( node.nodeType === 3 ) { - text += node.nodeValue; - node = node.nextSibling; - } - - text = text.replace(/(^\s*|\s*$)/g, ""); - - if ( window.location ) { - window.location.href = window.location.href.match(/^(.+?)(\?.*)?$/)[1] + "?" + encodeURIComponent(text); - } - } - }); - - var li = document.createElement("li"); - li.className = bad ? "fail" : "pass"; - li.appendChild( b ); - li.appendChild( ol ); - tests.appendChild( li ); - - if ( bad ) { - var toolbar = id("qunit-testrunner-toolbar"); - if ( toolbar ) { - toolbar.style.display = "block"; - id("qunit-filter-pass").disabled = null; - id("qunit-filter-missing").disabled = null; - } - } - - } else { - for ( var i = 0; i < config.assertions.length; i++ ) { - if ( !config.assertions[i].result ) { - bad++; - config.stats.bad++; - config.moduleStats.bad++; - } - } + } else { + + // If a test is running, adjust its semaphore + config.current.semaphore -= count || 1; + + // Don't start until equal number of stop-calls + if ( config.current.semaphore > 0 ) { + return; } - QUnit.testDone( testName, bad, config.assertions.length ); - - if ( !window.setTimeout && !config.queue.length ) { - done(); + // throw an Error if start is called more often than stop + if ( config.current.semaphore < 0 ) { + config.current.semaphore = 0; + + QUnit.pushFailure( + "Called start() while already started (test's semaphore was 0 already)", + sourceFromStacktrace( 2 ) + ); + return; } - }); - - if ( window.setTimeout && !config.doneTimer ) { - config.doneTimer = window.setTimeout(function(){ - if ( !config.queue.length ) { - done(); - } else { - synchronize( done ); - } - }, 13); } - }, - - /** - * Specify the number of expected assertions to gurantee that failed test (no assertions are run at all) don't slip through. - */ - expect: function(asserts) { - config.expected = asserts; + + resumeProcessing(); }, - /** - * Asserts true. - * @example ok( "asdfasdf".length > 5, "There must be at least 5 chars" ); - */ - ok: function(a, msg) { - QUnit.log(a, msg); - - config.assertions.push({ - result: !!a, - message: msg - }); - }, - - /** - * Checks that the first two arguments are equal, with an optional message. - * Prints out both actual and expected values. - * - * Prefered to ok( actual == expected, message ) - * - * @example equal( format("Received {0} bytes.", 2), "Received 2 bytes." ); - * - * @param Object actual - * @param Object expected - * @param String message (optional) - */ - equal: function(actual, expected, message) { - push(expected == actual, actual, expected, message); - }, - - notEqual: function(actual, expected, message) { - push(expected != actual, actual, expected, message); - }, - - deepEqual: function(a, b, message) { - push(QUnit.equiv(a, b), a, b, message); - }, - - notDeepEqual: function(a, b, message) { - push(!QUnit.equiv(a, b), a, b, message); - }, - - strictEqual: function(actual, expected, message) { - push(expected === actual, actual, expected, message); - }, - - notStrictEqual: function(actual, expected, message) { - push(expected !== actual, actual, expected, message); + // DEPRECATED: QUnit.stop() will be removed in QUnit 2.0. + stop: function( count ) { + + // If there isn't a test running, don't allow QUnit.stop() to be called + if ( !config.current ) { + throw new Error( "Called stop() outside of a test context" ); + } + + // If a test is running, adjust its semaphore + config.current.semaphore += count || 1; + + pauseProcessing(); }, - - start: function() { - // A slight delay, to avoid any current callbacks - if ( window.setTimeout ) { - window.setTimeout(function() { - if ( config.timeout ) { - clearTimeout(config.timeout); - } - - config.blocking = false; - process(); - }, 13); - } else { - config.blocking = false; - process(); - } - }, - - stop: function(timeout) { - config.blocking = true; - - if ( timeout && window.setTimeout ) { - config.timeout = window.setTimeout(function() { - QUnit.ok( false, "Test timed out" ); - QUnit.start(); - }, timeout); - } - }, - - /** - * Resets the test setup. Useful for tests that modify the DOM. - */ - reset: function() { - if ( window.jQuery ) { - jQuery("#main").html( config.fixture ); - jQuery.event.global = {}; - jQuery.ajaxSettings = extend({}, config.ajaxSettings); - } - }, - - /** - * Trigger an event on an element. - * - * @example triggerEvent( document.body, "click" ); - * - * @param DOMElement elem - * @param String type - */ - triggerEvent: function( elem, type, event ) { - if ( document.createEvent ) { - event = document.createEvent("MouseEvents"); - event.initMouseEvent(type, true, true, elem.ownerDocument.defaultView, - 0, 0, 0, 0, 0, false, false, false, false, 0, null); - elem.dispatchEvent( event ); - - } else if ( elem.fireEvent ) { - elem.fireEvent("on"+type); - } - }, - + + config: config, + // Safe object type checking is: function( type, obj ) { - return Object.prototype.toString.call( obj ) === "[object "+ type +"]"; + return QUnit.objectType( obj ) === type; }, - - // Logging callbacks - done: function(failures, total) {}, - log: function(result, message) {}, - testStart: function(name) {}, - testDone: function(name, failures, total) {}, - moduleStart: function(name, testEnvironment) {}, - moduleDone: function(name, failures, total) {} -}; - -// Backwards compatibility, deprecated -QUnit.equals = QUnit.equal; -QUnit.same = QUnit.deepEqual; - -// Maintain internal state -var config = { - // The queue of tests to run - queue: [], - - // block until document ready - blocking: true -}; - -// Load paramaters -(function() { - var location = window.location || { search: "", protocol: "file:" }, - GETParams = location.search.slice(1).split('&'); - - for ( var i = 0; i < GETParams.length; i++ ) { - GETParams[i] = decodeURIComponent( GETParams[i] ); - if ( GETParams[i] === "noglobals" ) { - GETParams.splice( i, 1 ); - i--; - config.noglobals = true; - } else if ( GETParams[i].search('=') > -1 ) { - GETParams.splice( i, 1 ); - i--; + + objectType: function( obj ) { + if ( typeof obj === "undefined" ) { + return "undefined"; + } + + // Consider: typeof null === object + if ( obj === null ) { + return "null"; + } + + var match = toString.call( obj ).match( /^\[object\s(.*)\]$/ ), + type = match && match[ 1 ] || ""; + + switch ( type ) { + case "Number": + if ( isNaN( obj ) ) { + return "nan"; + } + return "number"; + case "String": + case "Boolean": + case "Array": + case "Date": + case "RegExp": + case "Function": + return type.toLowerCase(); + } + if ( typeof obj === "object" ) { + return "object"; + } + return undefined; + }, + + extend: extend, + + load: function() { + config.pageLoaded = true; + + // Initialize the configuration options + extend( config, { + stats: { all: 0, bad: 0 }, + moduleStats: { all: 0, bad: 0 }, + started: 0, + updateRate: 1000, + autostart: true, + filter: "" + }, true ); + + config.blocking = false; + + if ( config.autostart ) { + resumeProcessing(); } } - - // restrict modules/tests by get parameters - config.filters = GETParams; - - // Figure out if we're running the tests from a server or not - QUnit.isLocal = !!(location.protocol === 'file:'); -})(); - -// Expose the API as global variables, unless an 'exports' -// object exists, in that case we assume we're in CommonJS -if ( typeof exports === "undefined" || typeof require === "undefined" ) { - extend(window, QUnit); - window.QUnit = QUnit; -} else { - extend(exports, QUnit); - exports.QUnit = QUnit; -} - -if ( typeof document === "undefined" || document.readyState === "complete" ) { - config.autorun = true; -} - -addEvent(window, "load", function() { - // Initialize the config, saving the execution queue - var oldconfig = extend({}, config); - QUnit.init(); - extend(config, oldconfig); - - config.blocking = false; - - var userAgent = id("qunit-userAgent"); - if ( userAgent ) { - userAgent.innerHTML = navigator.userAgent; +}); + +// Register logging callbacks +(function() { + var i, l, key, + callbacks = [ "begin", "done", "log", "testStart", "testDone", + "moduleStart", "moduleDone" ]; + + function registerLoggingCallback( key ) { + var loggingCallback = function( callback ) { + if ( QUnit.objectType( callback ) !== "function" ) { + throw new Error( + "QUnit logging methods require a callback function as their first parameters." + ); + } + + config.callbacks[ key ].push( callback ); + }; + + // DEPRECATED: This will be removed on QUnit 2.0.0+ + // Stores the registered functions allowing restoring + // at verifyLoggingCallbacks() if modified + loggingCallbacks[ key ] = loggingCallback; + + return loggingCallback; } - - var toolbar = id("qunit-testrunner-toolbar"); - if ( toolbar ) { - toolbar.style.display = "none"; - - var filter = document.createElement("input"); - filter.type = "checkbox"; - filter.id = "qunit-filter-pass"; - filter.disabled = true; - addEvent( filter, "click", function() { - var li = document.getElementsByTagName("li"); - for ( var i = 0; i < li.length; i++ ) { - if ( li[i].className.indexOf("pass") > -1 ) { - li[i].style.display = filter.checked ? "none" : ""; - } + + for ( i = 0, l = callbacks.length; i < l; i++ ) { + key = callbacks[ i ]; + + // Initialize key collection of logging callback + if ( QUnit.objectType( config.callbacks[ key ] ) === "undefined" ) { + config.callbacks[ key ] = []; + } + + QUnit[ key ] = registerLoggingCallback( key ); + } +})(); + +// `onErrorFnPrev` initialized at top of scope +// Preserve other handlers +onErrorFnPrev = window.onerror; + +// Cover uncaught exceptions +// Returning true will suppress the default browser handler, +// returning false will let it run. +window.onerror = function( error, filePath, linerNr ) { + var ret = false; + if ( onErrorFnPrev ) { + ret = onErrorFnPrev( error, filePath, linerNr ); + } + + // Treat return value as window.onerror itself does, + // Only do our handling if not suppressed. + if ( ret !== true ) { + if ( QUnit.config.current ) { + if ( QUnit.config.current.ignoreGlobalErrors ) { + return true; } - }); - toolbar.appendChild( filter ); - - var label = document.createElement("label"); - label.setAttribute("for", "qunit-filter-pass"); - label.innerHTML = "Hide passed tests"; - toolbar.appendChild( label ); - - var missing = document.createElement("input"); - missing.type = "checkbox"; - missing.id = "qunit-filter-missing"; - missing.disabled = true; - addEvent( missing, "click", function() { - var li = document.getElementsByTagName("li"); - for ( var i = 0; i < li.length; i++ ) { - if ( li[i].className.indexOf("fail") > -1 && li[i].innerHTML.indexOf('missing test - untested code is broken code') > - 1 ) { - li[i].parentNode.parentNode.style.display = missing.checked ? "none" : "block"; - } - } - }); - toolbar.appendChild( missing ); - - label = document.createElement("label"); - label.setAttribute("for", "qunit-filter-missing"); - label.innerHTML = "Hide missing tests (untested code is broken code)"; - toolbar.appendChild( label ); + QUnit.pushFailure( error, filePath + ":" + linerNr ); + } else { + QUnit.test( "global failure", extend(function() { + QUnit.pushFailure( error, filePath + ":" + linerNr ); + }, { validTest: true } ) ); + } + return false; } - var main = id('main'); - if ( main ) { - config.fixture = main.innerHTML; - } - - if ( window.jQuery ) { - config.ajaxSettings = window.jQuery.ajaxSettings; - } - - QUnit.start(); -}); + return ret; +}; function done() { - if ( config.doneTimer && window.clearTimeout ) { - window.clearTimeout( config.doneTimer ); - config.doneTimer = null; - } - - if ( config.queue.length ) { - config.doneTimer = window.setTimeout(function(){ - if ( !config.queue.length ) { - done(); - } else { - synchronize( done ); - } - }, 13); - - return; - } + var runtime, passed; config.autorun = true; // Log the last module results - if ( config.currentModule ) { - QUnit.moduleDone( config.currentModule, config.moduleStats.bad, config.moduleStats.all ); - } - - var banner = id("qunit-banner"), - tests = id("qunit-tests"), - html = ['Tests completed in ', - +new Date - config.started, ' milliseconds.
', - '', config.stats.all - config.stats.bad, ' tests of ', config.stats.all, ' passed, ', config.stats.bad,' failed.'].join(''); - - if ( banner ) { - banner.className = (config.stats.bad ? "qunit-fail" : "qunit-pass"); - } - - if ( tests ) { - var result = id("qunit-testresult"); - - if ( !result ) { - result = document.createElement("p"); - result.id = "qunit-testresult"; - result.className = "result"; - tests.parentNode.insertBefore( result, tests.nextSibling ); - } - - result.innerHTML = html; + if ( config.previousModule ) { + runLoggingCallbacks( "moduleDone", { + name: config.previousModule.name, + tests: config.previousModule.tests, + failed: config.moduleStats.bad, + passed: config.moduleStats.all - config.moduleStats.bad, + total: config.moduleStats.all, + runtime: now() - config.moduleStats.started + }); } - - QUnit.done( config.stats.bad, config.stats.all ); + delete config.previousModule; + + runtime = now() - config.started; + passed = config.stats.all - config.stats.bad; + + runLoggingCallbacks( "done", { + failed: config.stats.bad, + passed: passed, + total: config.stats.all, + runtime: runtime + }); } -function validTest( name ) { - var i = config.filters.length, - run = false; - - if ( !i ) { - return true; +// Doesn't support IE6 to IE9, it will return undefined on these browsers +// See also https://developer.mozilla.org/en/JavaScript/Reference/Global_Objects/Error/Stack +function extractStacktrace( e, offset ) { + offset = offset === undefined ? 4 : offset; + + var stack, include, i; + + if ( e.stack ) { + stack = e.stack.split( "\n" ); + if ( /^error$/i.test( stack[ 0 ] ) ) { + stack.shift(); + } + if ( fileName ) { + include = []; + for ( i = offset; i < stack.length; i++ ) { + if ( stack[ i ].indexOf( fileName ) !== -1 ) { + break; + } + include.push( stack[ i ] ); + } + if ( include.length ) { + return include.join( "\n" ); + } + } + return stack[ offset ]; + + // Support: Safari <=6 only + } else if ( e.sourceURL ) { + + // exclude useless self-reference for generated Error objects + if ( /qunit.js$/.test( e.sourceURL ) ) { + return; + } + + // for actual exceptions, this is useful + return e.sourceURL + ":" + e.line; } - - while ( i-- ) { - var filter = config.filters[i], - not = filter.charAt(0) == '!'; - - if ( not ) { - filter = filter.slice(1); - } - - if ( name.indexOf(filter) !== -1 ) { - return !not; - } - - if ( not ) { - run = true; +} + +function sourceFromStacktrace( offset ) { + var error = new Error(); + + // Support: Safari <=7 only, IE <=10 - 11 only + // Not all browsers generate the `stack` property for `new Error()`, see also #636 + if ( !error.stack ) { + try { + throw error; + } catch ( err ) { + error = err; } } - return run; + return extractStacktrace( error, offset ); } -function push(result, actual, expected, message) { - message = message || (result ? "okay" : "failed"); - QUnit.ok( result, result ? message + ": " + QUnit.jsDump.parse(expected) : message + ", expected: " + QUnit.jsDump.parse(expected) + " result: " + QUnit.jsDump.parse(actual) ); -} - -function synchronize( callback ) { +function synchronize( callback, last ) { + if ( QUnit.objectType( callback ) === "array" ) { + while ( callback.length ) { + synchronize( callback.shift() ); + } + return; + } config.queue.push( callback ); if ( config.autorun && !config.blocking ) { - process(); + process( last ); } } -function process() { - var start = (new Date()).getTime(); +function process( last ) { + function next() { + process( last ); + } + var start = now(); + config.depth = ( config.depth || 0 ) + 1; while ( config.queue.length && !config.blocking ) { - if ( config.updateRate <= 0 || (((new Date()).getTime() - start) < config.updateRate) ) { + if ( !defined.setTimeout || config.updateRate <= 0 || + ( ( now() - start ) < config.updateRate ) ) { + if ( config.current ) { + + // Reset async tracking for each phase of the Test lifecycle + config.current.usedAsync = false; + } config.queue.shift()(); - } else { - setTimeout( process, 13 ); + setTimeout( next, 13 ); break; } } + config.depth--; + if ( last && !config.blocking && !config.queue.length && config.depth === 0 ) { + done(); + } +} + +function begin() { + var i, l, + modulesLog = []; + + // If the test run hasn't officially begun yet + if ( !config.started ) { + + // Record the time of the test run's beginning + config.started = now(); + + verifyLoggingCallbacks(); + + // Delete the loose unnamed module if unused. + if ( config.modules[ 0 ].name === "" && config.modules[ 0 ].tests.length === 0 ) { + config.modules.shift(); + } + + // Avoid unnecessary information by not logging modules' test environments + for ( i = 0, l = config.modules.length; i < l; i++ ) { + modulesLog.push({ + name: config.modules[ i ].name, + tests: config.modules[ i ].tests + }); + } + + // The test run is officially beginning now + runLoggingCallbacks( "begin", { + totalTests: Test.count, + modules: modulesLog + }); + } + + config.blocking = false; + process( true ); +} + +function resumeProcessing() { + runStarted = true; + + // A slight delay to allow this iteration of the event loop to finish (more assertions, etc.) + if ( defined.setTimeout ) { + setTimeout(function() { + if ( config.current && config.current.semaphore > 0 ) { + return; + } + if ( config.timeout ) { + clearTimeout( config.timeout ); + } + + begin(); + }, 13 ); + } else { + begin(); + } +} + +function pauseProcessing() { + config.blocking = true; + + if ( config.testTimeout && defined.setTimeout ) { + clearTimeout( config.timeout ); + config.timeout = setTimeout(function() { + if ( config.current ) { + config.current.semaphore = 0; + QUnit.pushFailure( "Test timed out", sourceFromStacktrace( 2 ) ); + } else { + throw new Error( "Test timed out" ); + } + resumeProcessing(); + }, config.testTimeout ); + } } function saveGlobal() { config.pollution = []; - + if ( config.noglobals ) { for ( var key in window ) { - config.pollution.push( key ); + if ( hasOwn.call( window, key ) ) { + // in Opera sometimes DOM element ids show up here, ignore them + if ( /^qunit-test-output/.test( key ) ) { + continue; + } + config.pollution.push( key ); + } } } } -function checkPollution( name ) { - var old = config.pollution; +function checkPollution() { + var newGlobals, + deletedGlobals, + old = config.pollution; + saveGlobal(); - - var newGlobals = diff( old, config.pollution ); + + newGlobals = diff( config.pollution, old ); if ( newGlobals.length > 0 ) { - ok( false, "Introduced global variable(s): " + newGlobals.join(", ") ); - config.expected++; + QUnit.pushFailure( "Introduced global variable(s): " + newGlobals.join( ", " ) ); } - var deletedGlobals = diff( config.pollution, old ); + deletedGlobals = diff( old, config.pollution ); if ( deletedGlobals.length > 0 ) { - ok( false, "Deleted global variable(s): " + deletedGlobals.join(", ") ); - config.expected++; + QUnit.pushFailure( "Deleted global variable(s): " + deletedGlobals.join( ", " ) ); } } // returns a new Array with the elements that are in a but not in b function diff( a, b ) { - var result = a.slice(); - for ( var i = 0; i < result.length; i++ ) { - for ( var j = 0; j < b.length; j++ ) { - if ( result[i] === b[j] ) { - result.splice(i, 1); + var i, j, + result = a.slice(); + + for ( i = 0; i < result.length; i++ ) { + for ( j = 0; j < b.length; j++ ) { + if ( result[ i ] === b[ j ] ) { + result.splice( i, 1 ); i--; break; } @@ -646,424 +716,3113 @@ return result; } -function fail(message, exception, callback) { - if ( typeof console !== "undefined" && console.error && console.warn ) { - console.error(message); - console.error(exception); - console.warn(callback.toString()); - - } else if ( window.opera && opera.postError ) { - opera.postError(message, exception, callback.toString); - } -} - -function extend(a, b) { +function extend( a, b, undefOnly ) { for ( var prop in b ) { - a[prop] = b[prop]; + if ( hasOwn.call( b, prop ) ) { + + // Avoid "Member not found" error in IE8 caused by messing with window.constructor + if ( !( prop === "constructor" && a === window ) ) { + if ( b[ prop ] === undefined ) { + delete a[ prop ]; + } else if ( !( undefOnly && typeof a[ prop ] !== "undefined" ) ) { + a[ prop ] = b[ prop ]; + } + } + } } return a; } -function addEvent(elem, type, fn) { - if ( elem.addEventListener ) { - elem.addEventListener( type, fn, false ); - } else if ( elem.attachEvent ) { - elem.attachEvent( "on" + type, fn ); +function runLoggingCallbacks( key, args ) { + var i, l, callbacks; + + callbacks = config.callbacks[ key ]; + for ( i = 0, l = callbacks.length; i < l; i++ ) { + callbacks[ i ]( args ); + } +} + +// DEPRECATED: This will be removed on 2.0.0+ +// This function verifies if the loggingCallbacks were modified by the user +// If so, it will restore it, assign the given callback and print a console warning +function verifyLoggingCallbacks() { + var loggingCallback, userCallback; + + for ( loggingCallback in loggingCallbacks ) { + if ( QUnit[ loggingCallback ] !== loggingCallbacks[ loggingCallback ] ) { + + userCallback = QUnit[ loggingCallback ]; + + // Restore the callback function + QUnit[ loggingCallback ] = loggingCallbacks[ loggingCallback ]; + + // Assign the deprecated given callback + QUnit[ loggingCallback ]( userCallback ); + + if ( window.console && window.console.warn ) { + window.console.warn( + "QUnit." + loggingCallback + " was replaced with a new value.\n" + + "Please, check out the documentation on how to apply logging callbacks.\n" + + "Reference: http://api.qunitjs.com/category/callbacks/" + ); + } + } + } +} + +// from jquery.js +function inArray( elem, array ) { + if ( array.indexOf ) { + return array.indexOf( elem ); + } + + for ( var i = 0, length = array.length; i < length; i++ ) { + if ( array[ i ] === elem ) { + return i; + } + } + + return -1; +} + +function Test( settings ) { + var i, l; + + ++Test.count; + + extend( this, settings ); + this.assertions = []; + this.semaphore = 0; + this.usedAsync = false; + this.module = config.currentModule; + this.stack = sourceFromStacktrace( 3 ); + + // Register unique strings + for ( i = 0, l = this.module.tests; i < l.length; i++ ) { + if ( this.module.tests[ i ].name === this.testName ) { + this.testName += " "; + } + } + + this.testId = generateHash( this.module.name, this.testName ); + + this.module.tests.push({ + name: this.testName, + testId: this.testId + }); + + if ( settings.skip ) { + + // Skipped tests will fully ignore any sent callback + this.callback = function() {}; + this.async = false; + this.expected = 0; } else { - fn(); + this.assert = new Assert( this ); } } -function id(name) { - return !!(typeof document !== "undefined" && document && document.getElementById) && - document.getElementById( name ); +Test.count = 0; + +Test.prototype = { + before: function() { + if ( + + // Emit moduleStart when we're switching from one module to another + this.module !== config.previousModule || + + // They could be equal (both undefined) but if the previousModule property doesn't + // yet exist it means this is the first test in a suite that isn't wrapped in a + // module, in which case we'll just emit a moduleStart event for 'undefined'. + // Without this, reporters can get testStart before moduleStart which is a problem. + !hasOwn.call( config, "previousModule" ) + ) { + if ( hasOwn.call( config, "previousModule" ) ) { + runLoggingCallbacks( "moduleDone", { + name: config.previousModule.name, + tests: config.previousModule.tests, + failed: config.moduleStats.bad, + passed: config.moduleStats.all - config.moduleStats.bad, + total: config.moduleStats.all, + runtime: now() - config.moduleStats.started + }); + } + config.previousModule = this.module; + config.moduleStats = { all: 0, bad: 0, started: now() }; + runLoggingCallbacks( "moduleStart", { + name: this.module.name, + tests: this.module.tests + }); + } + + config.current = this; + + this.testEnvironment = extend( {}, this.module.testEnvironment ); + delete this.testEnvironment.beforeEach; + delete this.testEnvironment.afterEach; + + this.started = now(); + runLoggingCallbacks( "testStart", { + name: this.testName, + module: this.module.name, + testId: this.testId + }); + + if ( !config.pollution ) { + saveGlobal(); + } + }, + + run: function() { + var promise; + + config.current = this; + + if ( this.async ) { + QUnit.stop(); + } + + this.callbackStarted = now(); + + if ( config.notrycatch ) { + promise = this.callback.call( this.testEnvironment, this.assert ); + this.resolvePromise( promise ); + return; + } + + try { + promise = this.callback.call( this.testEnvironment, this.assert ); + this.resolvePromise( promise ); + } catch ( e ) { + this.pushFailure( "Died on test #" + ( this.assertions.length + 1 ) + " " + + this.stack + ": " + ( e.message || e ), extractStacktrace( e, 0 ) ); + + // else next test will carry the responsibility + saveGlobal(); + + // Restart the tests if they're blocking + if ( config.blocking ) { + QUnit.start(); + } + } + }, + + after: function() { + checkPollution(); + }, + + queueHook: function( hook, hookName ) { + var promise, + test = this; + return function runHook() { + config.current = test; + if ( config.notrycatch ) { + promise = hook.call( test.testEnvironment, test.assert ); + test.resolvePromise( promise, hookName ); + return; + } + try { + promise = hook.call( test.testEnvironment, test.assert ); + test.resolvePromise( promise, hookName ); + } catch ( error ) { + test.pushFailure( hookName + " failed on " + test.testName + ": " + + ( error.message || error ), extractStacktrace( error, 0 ) ); + } + }; + }, + + // Currently only used for module level hooks, can be used to add global level ones + hooks: function( handler ) { + var hooks = []; + + // Hooks are ignored on skipped tests + if ( this.skip ) { + return hooks; + } + + if ( this.module.testEnvironment && + QUnit.objectType( this.module.testEnvironment[ handler ] ) === "function" ) { + hooks.push( this.queueHook( this.module.testEnvironment[ handler ], handler ) ); + } + + return hooks; + }, + + finish: function() { + config.current = this; + if ( config.requireExpects && this.expected === null ) { + this.pushFailure( "Expected number of assertions to be defined, but expect() was " + + "not called.", this.stack ); + } else if ( this.expected !== null && this.expected !== this.assertions.length ) { + this.pushFailure( "Expected " + this.expected + " assertions, but " + + this.assertions.length + " were run", this.stack ); + } else if ( this.expected === null && !this.assertions.length ) { + this.pushFailure( "Expected at least one assertion, but none were run - call " + + "expect(0) to accept zero assertions.", this.stack ); + } + + var i, + bad = 0; + + this.runtime = now() - this.started; + config.stats.all += this.assertions.length; + config.moduleStats.all += this.assertions.length; + + for ( i = 0; i < this.assertions.length; i++ ) { + if ( !this.assertions[ i ].result ) { + bad++; + config.stats.bad++; + config.moduleStats.bad++; + } + } + + runLoggingCallbacks( "testDone", { + name: this.testName, + module: this.module.name, + skipped: !!this.skip, + failed: bad, + passed: this.assertions.length - bad, + total: this.assertions.length, + runtime: this.runtime, + + // HTML Reporter use + assertions: this.assertions, + testId: this.testId, + + // DEPRECATED: this property will be removed in 2.0.0, use runtime instead + duration: this.runtime + }); + + // QUnit.reset() is deprecated and will be replaced for a new + // fixture reset function on QUnit 2.0/2.1. + // It's still called here for backwards compatibility handling + QUnit.reset(); + + config.current = undefined; + }, + + queue: function() { + var bad, + test = this; + + if ( !this.valid() ) { + return; + } + + function run() { + + // each of these can by async + synchronize([ + function() { + test.before(); + }, + + test.hooks( "beforeEach" ), + + function() { + test.run(); + }, + + test.hooks( "afterEach" ).reverse(), + + function() { + test.after(); + }, + function() { + test.finish(); + } + ]); + } + + // `bad` initialized at top of scope + // defer when previous test run passed, if storage is available + bad = QUnit.config.reorder && defined.sessionStorage && + +sessionStorage.getItem( "qunit-test-" + this.module.name + "-" + this.testName ); + + if ( bad ) { + run(); + } else { + synchronize( run, true ); + } + }, + + push: function( result, actual, expected, message ) { + var source, + details = { + module: this.module.name, + name: this.testName, + result: result, + message: message, + actual: actual, + expected: expected, + testId: this.testId, + runtime: now() - this.started + }; + + if ( !result ) { + source = sourceFromStacktrace(); + + if ( source ) { + details.source = source; + } + } + + runLoggingCallbacks( "log", details ); + + this.assertions.push({ + result: !!result, + message: message + }); + }, + + pushFailure: function( message, source, actual ) { + if ( !this instanceof Test ) { + throw new Error( "pushFailure() assertion outside test context, was " + + sourceFromStacktrace( 2 ) ); + } + + var details = { + module: this.module.name, + name: this.testName, + result: false, + message: message || "error", + actual: actual || null, + testId: this.testId, + runtime: now() - this.started + }; + + if ( source ) { + details.source = source; + } + + runLoggingCallbacks( "log", details ); + + this.assertions.push({ + result: false, + message: message + }); + }, + + resolvePromise: function( promise, phase ) { + var then, message, + test = this; + if ( promise != null ) { + then = promise.then; + if ( QUnit.objectType( then ) === "function" ) { + QUnit.stop(); + then.call( + promise, + QUnit.start, + function( error ) { + message = "Promise rejected " + + ( !phase ? "during" : phase.replace( /Each$/, "" ) ) + + " " + test.testName + ": " + ( error.message || error ); + test.pushFailure( message, extractStacktrace( error, 0 ) ); + + // else next test will carry the responsibility + saveGlobal(); + + // Unblock + QUnit.start(); + } + ); + } + } + }, + + valid: function() { + var include, + filter = config.filter && config.filter.toLowerCase(), + module = QUnit.urlParams.module && QUnit.urlParams.module.toLowerCase(), + fullName = ( this.module.name + ": " + this.testName ).toLowerCase(); + + // Internally-generated tests are always valid + if ( this.callback && this.callback.validTest ) { + return true; + } + + if ( config.testId.length > 0 && inArray( this.testId, config.testId ) < 0 ) { + return false; + } + + if ( module && ( !this.module.name || this.module.name.toLowerCase() !== module ) ) { + return false; + } + + if ( !filter ) { + return true; + } + + include = filter.charAt( 0 ) !== "!"; + if ( !include ) { + filter = filter.slice( 1 ); + } + + // If the filter matches, we need to honour include + if ( fullName.indexOf( filter ) !== -1 ) { + return include; + } + + // Otherwise, do the opposite + return !include; + } + +}; + +// Resets the test setup. Useful for tests that modify the DOM. +/* +DEPRECATED: Use multiple tests instead of resetting inside a test. +Use testStart or testDone for custom cleanup. +This method will throw an error in 2.0, and will be removed in 2.1 +*/ +QUnit.reset = function() { + + // Return on non-browser environments + // This is necessary to not break on node tests + if ( typeof window === "undefined" ) { + return; + } + + var fixture = defined.document && document.getElementById && + document.getElementById( "qunit-fixture" ); + + if ( fixture ) { + fixture.innerHTML = config.fixture; + } +}; + +QUnit.pushFailure = function() { + if ( !QUnit.config.current ) { + throw new Error( "pushFailure() assertion outside test context, in " + + sourceFromStacktrace( 2 ) ); + } + + // Gets current test obj + var currentTest = QUnit.config.current; + + return currentTest.pushFailure.apply( currentTest, arguments ); +}; + +// Based on Java's String.hashCode, a simple but not +// rigorously collision resistant hashing function +function generateHash( module, testName ) { + var hex, + i = 0, + hash = 0, + str = module + "\x1C" + testName, + len = str.length; + + for ( ; i < len; i++ ) { + hash = ( ( hash << 5 ) - hash ) + str.charCodeAt( i ); + hash |= 0; + } + + // Convert the possibly negative integer hash code into an 8 character hex string, which isn't + // strictly necessary but increases user understanding that the id is a SHA-like hash + hex = ( 0x100000000 + hash ).toString( 16 ); + if ( hex.length < 8 ) { + hex = "0000000" + hex; + } + + return hex.slice( -8 ); } +function Assert( testContext ) { + this.test = testContext; +} + +// Assert helpers +QUnit.assert = Assert.prototype = { + + // Specify the number of expected assertions to guarantee that failed test + // (no assertions are run at all) don't slip through. + expect: function( asserts ) { + if ( arguments.length === 1 ) { + this.test.expected = asserts; + } else { + return this.test.expected; + } + }, + + // Increment this Test's semaphore counter, then return a single-use function that + // decrements that counter a maximum of once. + async: function() { + var test = this.test, + popped = false; + + test.semaphore += 1; + test.usedAsync = true; + pauseProcessing(); + + return function done() { + if ( !popped ) { + test.semaphore -= 1; + popped = true; + resumeProcessing(); + } else { + test.pushFailure( "Called the callback returned from `assert.async` more than once", + sourceFromStacktrace( 2 ) ); + } + }; + }, + + // Exports test.push() to the user API + push: function( /* result, actual, expected, message */ ) { + var assert = this, + currentTest = ( assert instanceof Assert && assert.test ) || QUnit.config.current; + + // Backwards compatibility fix. + // Allows the direct use of global exported assertions and QUnit.assert.* + // Although, it's use is not recommended as it can leak assertions + // to other tests from async tests, because we only get a reference to the current test, + // not exactly the test where assertion were intended to be called. + if ( !currentTest ) { + throw new Error( "assertion outside test context, in " + sourceFromStacktrace( 2 ) ); + } + + if ( currentTest.usedAsync === true && currentTest.semaphore === 0 ) { + currentTest.pushFailure( "Assertion after the final `assert.async` was resolved", + sourceFromStacktrace( 2 ) ); + + // Allow this assertion to continue running anyway... + } + + if ( !( assert instanceof Assert ) ) { + assert = currentTest.assert; + } + return assert.test.push.apply( assert.test, arguments ); + }, + + ok: function( result, message ) { + message = message || ( result ? "okay" : "failed, expected argument to be truthy, was: " + + QUnit.dump.parse( result ) ); + this.push( !!result, result, true, message ); + }, + + notOk: function( result, message ) { + message = message || ( !result ? "okay" : "failed, expected argument to be falsy, was: " + + QUnit.dump.parse( result ) ); + this.push( !result, result, false, message ); + }, + + equal: function( actual, expected, message ) { + /*jshint eqeqeq:false */ + this.push( expected == actual, actual, expected, message ); + }, + + notEqual: function( actual, expected, message ) { + /*jshint eqeqeq:false */ + this.push( expected != actual, actual, expected, message ); + }, + + propEqual: function( actual, expected, message ) { + actual = objectValues( actual ); + expected = objectValues( expected ); + this.push( QUnit.equiv( actual, expected ), actual, expected, message ); + }, + + notPropEqual: function( actual, expected, message ) { + actual = objectValues( actual ); + expected = objectValues( expected ); + this.push( !QUnit.equiv( actual, expected ), actual, expected, message ); + }, + + deepEqual: function( actual, expected, message ) { + this.push( QUnit.equiv( actual, expected ), actual, expected, message ); + }, + + notDeepEqual: function( actual, expected, message ) { + this.push( !QUnit.equiv( actual, expected ), actual, expected, message ); + }, + + strictEqual: function( actual, expected, message ) { + this.push( expected === actual, actual, expected, message ); + }, + + notStrictEqual: function( actual, expected, message ) { + this.push( expected !== actual, actual, expected, message ); + }, + + "throws": function( block, expected, message ) { + var actual, expectedType, + expectedOutput = expected, + ok = false, + currentTest = ( this instanceof Assert && this.test ) || QUnit.config.current; + + // 'expected' is optional unless doing string comparison + if ( message == null && typeof expected === "string" ) { + message = expected; + expected = null; + } + + currentTest.ignoreGlobalErrors = true; + try { + block.call( currentTest.testEnvironment ); + } catch (e) { + actual = e; + } + currentTest.ignoreGlobalErrors = false; + + if ( actual ) { + expectedType = QUnit.objectType( expected ); + + // we don't want to validate thrown error + if ( !expected ) { + ok = true; + expectedOutput = null; + + // expected is a regexp + } else if ( expectedType === "regexp" ) { + ok = expected.test( errorString( actual ) ); + + // expected is a string + } else if ( expectedType === "string" ) { + ok = expected === errorString( actual ); + + // expected is a constructor, maybe an Error constructor + } else if ( expectedType === "function" && actual instanceof expected ) { + ok = true; + + // expected is an Error object + } else if ( expectedType === "object" ) { + ok = actual instanceof expected.constructor && + actual.name === expected.name && + actual.message === expected.message; + + // expected is a validation function which returns true if validation passed + } else if ( expectedType === "function" && expected.call( {}, actual ) === true ) { + expectedOutput = null; + ok = true; + } + } + + currentTest.assert.push( ok, actual, expectedOutput, message ); + } +}; + +// Provide an alternative to assert.throws(), for enviroments that consider throws a reserved word +// Known to us are: Closure Compiler, Narwhal +(function() { + /*jshint sub:true */ + Assert.prototype.raises = Assert.prototype[ "throws" ]; +}()); + // Test for equality any JavaScript type. -// Discussions and reference: http://philrathe.com/articles/equiv -// Test suites: http://philrathe.com/tests/equiv // Author: Philippe Rathé -QUnit.equiv = function () { - - var innerEquiv; // the real equiv function - var callers = []; // stack to decide between skip/abort functions - var parents = []; // stack to avoiding loops from circular referencing - - - // Determine what is o. - function hoozit(o) { - if (QUnit.is("String", o)) { - return "string"; - - } else if (QUnit.is("Boolean", o)) { - return "boolean"; - - } else if (QUnit.is("Number", o)) { - - if (isNaN(o)) { - return "nan"; +QUnit.equiv = (function() { + + // Call the o related callback with the given arguments. + function bindCallbacks( o, callbacks, args ) { + var prop = QUnit.objectType( o ); + if ( prop ) { + if ( QUnit.objectType( callbacks[ prop ] ) === "function" ) { + return callbacks[ prop ].apply( callbacks, args ); + } else { + return callbacks[ prop ]; // or undefined + } + } + } + + // the real equiv function + var innerEquiv, + + // stack to decide between skip/abort functions + callers = [], + + // stack to avoiding loops from circular referencing + parents = [], + parentsB = [], + + getProto = Object.getPrototypeOf || function( obj ) { + /* jshint camelcase: false, proto: true */ + return obj.__proto__; + }, + callbacks = (function() { + + // for string, boolean, number and null + function useStrictEquality( b, a ) { + + /*jshint eqeqeq:false */ + if ( b instanceof a.constructor || a instanceof b.constructor ) { + + // to catch short annotation VS 'new' annotation of a + // declaration + // e.g. var i = 1; + // var j = new Number(1); + return a == b; + } else { + return a === b; + } + } + + return { + "string": useStrictEquality, + "boolean": useStrictEquality, + "number": useStrictEquality, + "null": useStrictEquality, + "undefined": useStrictEquality, + + "nan": function( b ) { + return isNaN( b ); + }, + + "date": function( b, a ) { + return QUnit.objectType( b ) === "date" && a.valueOf() === b.valueOf(); + }, + + "regexp": function( b, a ) { + return QUnit.objectType( b ) === "regexp" && + + // the regex itself + a.source === b.source && + + // and its modifiers + a.global === b.global && + + // (gmi) ... + a.ignoreCase === b.ignoreCase && + a.multiline === b.multiline && + a.sticky === b.sticky; + }, + + // - skip when the property is a method of an instance (OOP) + // - abort otherwise, + // initial === would have catch identical references anyway + "function": function() { + var caller = callers[ callers.length - 1 ]; + return caller !== Object && typeof caller !== "undefined"; + }, + + "array": function( b, a ) { + var i, j, len, loop, aCircular, bCircular; + + // b could be an object literal here + if ( QUnit.objectType( b ) !== "array" ) { + return false; + } + + len = a.length; + if ( len !== b.length ) { + // safe and faster + return false; + } + + // track reference to avoid circular references + parents.push( a ); + parentsB.push( b ); + for ( i = 0; i < len; i++ ) { + loop = false; + for ( j = 0; j < parents.length; j++ ) { + aCircular = parents[ j ] === a[ i ]; + bCircular = parentsB[ j ] === b[ i ]; + if ( aCircular || bCircular ) { + if ( a[ i ] === b[ i ] || aCircular && bCircular ) { + loop = true; + } else { + parents.pop(); + parentsB.pop(); + return false; + } + } + } + if ( !loop && !innerEquiv( a[ i ], b[ i ] ) ) { + parents.pop(); + parentsB.pop(); + return false; + } + } + parents.pop(); + parentsB.pop(); + return true; + }, + + "object": function( b, a ) { + + /*jshint forin:false */ + var i, j, loop, aCircular, bCircular, + // Default to true + eq = true, + aProperties = [], + bProperties = []; + + // comparing constructors is more strict than using + // instanceof + if ( a.constructor !== b.constructor ) { + + // Allow objects with no prototype to be equivalent to + // objects with Object as their constructor. + if ( !( ( getProto( a ) === null && getProto( b ) === Object.prototype ) || + ( getProto( b ) === null && getProto( a ) === Object.prototype ) ) ) { + return false; + } + } + + // stack constructor before traversing properties + callers.push( a.constructor ); + + // track reference to avoid circular references + parents.push( a ); + parentsB.push( b ); + + // be strict: don't ensure hasOwnProperty and go deep + for ( i in a ) { + loop = false; + for ( j = 0; j < parents.length; j++ ) { + aCircular = parents[ j ] === a[ i ]; + bCircular = parentsB[ j ] === b[ i ]; + if ( aCircular || bCircular ) { + if ( a[ i ] === b[ i ] || aCircular && bCircular ) { + loop = true; + } else { + eq = false; + break; + } + } + } + aProperties.push( i ); + if ( !loop && !innerEquiv( a[ i ], b[ i ] ) ) { + eq = false; + break; + } + } + + parents.pop(); + parentsB.pop(); + callers.pop(); // unstack, we are done + + for ( i in b ) { + bProperties.push( i ); // collect b's properties + } + + // Ensures identical properties name + return eq && innerEquiv( aProperties.sort(), bProperties.sort() ); + } + }; + }()); + + innerEquiv = function() { // can take multiple arguments + var args = [].slice.apply( arguments ); + if ( args.length < 2 ) { + return true; // end transition + } + + return ( (function( a, b ) { + if ( a === b ) { + return true; // catch the most you can + } else if ( a === null || b === null || typeof a === "undefined" || + typeof b === "undefined" || + QUnit.objectType( a ) !== QUnit.objectType( b ) ) { + + // don't lose time with error prone cases + return false; + } else { + return bindCallbacks( a, callbacks, [ b, a ] ); + } + + // apply transition with (1..n) arguments + }( args[ 0 ], args[ 1 ] ) ) && + innerEquiv.apply( this, args.splice( 1, args.length - 1 ) ) ); + }; + + return innerEquiv; +}()); + +// Based on jsDump by Ariel Flesler +// http://flesler.blogspot.com/2008/05/jsdump-pretty-dump-of-any-javascript.html +QUnit.dump = (function() { + function quote( str ) { + return "\"" + str.toString().replace( /"/g, "\\\"" ) + "\""; + } + function literal( o ) { + return o + ""; + } + function join( pre, arr, post ) { + var s = dump.separator(), + base = dump.indent(), + inner = dump.indent( 1 ); + if ( arr.join ) { + arr = arr.join( "," + s + inner ); + } + if ( !arr ) { + return pre + post; + } + return [ pre, inner + arr, base + post ].join( s ); + } + function array( arr, stack ) { + var i = arr.length, + ret = new Array( i ); + + if ( dump.maxDepth && dump.depth > dump.maxDepth ) { + return "[object Array]"; + } + + this.up(); + while ( i-- ) { + ret[ i ] = this.parse( arr[ i ], undefined, stack ); + } + this.down(); + return join( "[", ret, "]" ); + } + + var reName = /^function (\w+)/, + dump = { + + // objType is used mostly internally, you can fix a (custom) type in advance + parse: function( obj, objType, stack ) { + stack = stack || []; + var res, parser, parserType, + inStack = inArray( obj, stack ); + + if ( inStack !== -1 ) { + return "recursion(" + ( inStack - stack.length ) + ")"; + } + + objType = objType || this.typeOf( obj ); + parser = this.parsers[ objType ]; + parserType = typeof parser; + + if ( parserType === "function" ) { + stack.push( obj ); + res = parser.call( this, obj, stack ); + stack.pop(); + return res; + } + return ( parserType === "string" ) ? parser : this.parsers.error; + }, + typeOf: function( obj ) { + var type; + if ( obj === null ) { + type = "null"; + } else if ( typeof obj === "undefined" ) { + type = "undefined"; + } else if ( QUnit.is( "regexp", obj ) ) { + type = "regexp"; + } else if ( QUnit.is( "date", obj ) ) { + type = "date"; + } else if ( QUnit.is( "function", obj ) ) { + type = "function"; + } else if ( obj.setInterval !== undefined && + obj.document !== undefined && + obj.nodeType === undefined ) { + type = "window"; + } else if ( obj.nodeType === 9 ) { + type = "document"; + } else if ( obj.nodeType ) { + type = "node"; + } else if ( + + // native arrays + toString.call( obj ) === "[object Array]" || + + // NodeList objects + ( typeof obj.length === "number" && obj.item !== undefined && + ( obj.length ? obj.item( 0 ) === obj[ 0 ] : ( obj.item( 0 ) === null && + obj[ 0 ] === undefined ) ) ) + ) { + type = "array"; + } else if ( obj.constructor === Error.prototype.constructor ) { + type = "error"; + } else { + type = typeof obj; + } + return type; + }, + separator: function() { + return this.multiline ? this.HTML ? "
" : "\n" : this.HTML ? " " : " "; + }, + // extra can be a number, shortcut for increasing-calling-decreasing + indent: function( extra ) { + if ( !this.multiline ) { + return ""; + } + var chr = this.indentChar; + if ( this.HTML ) { + chr = chr.replace( /\t/g, " " ).replace( / /g, " " ); + } + return new Array( this.depth + ( extra || 0 ) ).join( chr ); + }, + up: function( a ) { + this.depth += a || 1; + }, + down: function( a ) { + this.depth -= a || 1; + }, + setParser: function( name, parser ) { + this.parsers[ name ] = parser; + }, + // The next 3 are exposed so you can use them + quote: quote, + literal: literal, + join: join, + // + depth: 1, + maxDepth: QUnit.config.maxDepth, + + // This is the list of parsers, to modify them, use dump.setParser + parsers: { + window: "[Window]", + document: "[Document]", + error: function( error ) { + return "Error(\"" + error.message + "\")"; + }, + unknown: "[Unknown]", + "null": "null", + "undefined": "undefined", + "function": function( fn ) { + var ret = "function", + + // functions never have name in IE + name = "name" in fn ? fn.name : ( reName.exec( fn ) || [] )[ 1 ]; + + if ( name ) { + ret += " " + name; + } + ret += "( "; + + ret = [ ret, dump.parse( fn, "functionArgs" ), "){" ].join( "" ); + return join( ret, dump.parse( fn, "functionCode" ), "}" ); + }, + array: array, + nodelist: array, + "arguments": array, + object: function( map, stack ) { + var keys, key, val, i, nonEnumerableProperties, + ret = []; + + if ( dump.maxDepth && dump.depth > dump.maxDepth ) { + return "[object Object]"; + } + + dump.up(); + keys = []; + for ( key in map ) { + keys.push( key ); + } + + // Some properties are not always enumerable on Error objects. + nonEnumerableProperties = [ "message", "name" ]; + for ( i in nonEnumerableProperties ) { + key = nonEnumerableProperties[ i ]; + if ( key in map && inArray( key, keys ) < 0 ) { + keys.push( key ); + } + } + keys.sort(); + for ( i = 0; i < keys.length; i++ ) { + key = keys[ i ]; + val = map[ key ]; + ret.push( dump.parse( key, "key" ) + ": " + + dump.parse( val, undefined, stack ) ); + } + dump.down(); + return join( "{", ret, "}" ); + }, + node: function( node ) { + var len, i, val, + open = dump.HTML ? "<" : "<", + close = dump.HTML ? ">" : ">", + tag = node.nodeName.toLowerCase(), + ret = open + tag, + attrs = node.attributes; + + if ( attrs ) { + for ( i = 0, len = attrs.length; i < len; i++ ) { + val = attrs[ i ].nodeValue; + + // IE6 includes all attributes in .attributes, even ones not explicitly + // set. Those have values like undefined, null, 0, false, "" or + // "inherit". + if ( val && val !== "inherit" ) { + ret += " " + attrs[ i ].nodeName + "=" + + dump.parse( val, "attribute" ); + } + } + } + ret += close; + + // Show content of TextNode or CDATASection + if ( node.nodeType === 3 || node.nodeType === 4 ) { + ret += node.nodeValue; + } + + return ret + open + "/" + tag + close; + }, + + // function calls it internally, it's the arguments part of the function + functionArgs: function( fn ) { + var args, + l = fn.length; + + if ( !l ) { + return ""; + } + + args = new Array( l ); + while ( l-- ) { + + // 97 is 'a' + args[ l ] = String.fromCharCode( 97 + l ); + } + return " " + args.join( ", " ) + " "; + }, + // object calls it internally, the key part of an item in a map + key: quote, + // function calls it internally, it's the content of the function + functionCode: "[code]", + // node calls it internally, it's an html attribute value + attribute: quote, + string: quote, + date: quote, + regexp: literal, + number: literal, + "boolean": literal + }, + // if true, entities are escaped ( <, >, \t, space and \n ) + HTML: false, + // indentation unit + indentChar: " ", + // if true, items in a collection, are separated by a \n, else just a space. + multiline: true + }; + + return dump; +}()); + +// back compat +QUnit.jsDump = QUnit.dump; + +// For browser, export only select globals +if ( typeof window !== "undefined" ) { + + // Deprecated + // Extend assert methods to QUnit and Global scope through Backwards compatibility + (function() { + var i, + assertions = Assert.prototype; + + function applyCurrent( current ) { + return function() { + var assert = new Assert( QUnit.config.current ); + current.apply( assert, arguments ); + }; + } + + for ( i in assertions ) { + QUnit[ i ] = applyCurrent( assertions[ i ] ); + } + })(); + + (function() { + var i, l, + keys = [ + "test", + "module", + "expect", + "asyncTest", + "start", + "stop", + "ok", + "notOk", + "equal", + "notEqual", + "propEqual", + "notPropEqual", + "deepEqual", + "notDeepEqual", + "strictEqual", + "notStrictEqual", + "throws" + ]; + + for ( i = 0, l = keys.length; i < l; i++ ) { + window[ keys[ i ] ] = QUnit[ keys[ i ] ]; + } + })(); + + window.QUnit = QUnit; +} + +// For nodejs +if ( typeof module !== "undefined" && module && module.exports ) { + module.exports = QUnit; + + // For consistency with CommonJS environments' exports + module.exports.QUnit = QUnit; +} + +// For CommonJS with exports, but without module.exports, like Rhino +if ( typeof exports !== "undefined" && exports ) { + exports.QUnit = QUnit; +} + +if ( typeof define === "function" && define.amd ) { + define( function() { + return QUnit; + } ); + QUnit.config.autostart = false; +} + +// Get a reference to the global object, like window in browsers +}( (function() { + return this; +})() )); + +/*istanbul ignore next */ +// jscs:disable maximumLineLength +/* + * This file is a modified version of google-diff-match-patch's JavaScript implementation + * (https://code.google.com/p/google-diff-match-patch/source/browse/trunk/javascript/diff_match_patch_uncompressed.js), + * modifications are licensed as more fully set forth in LICENSE.txt. + * + * The original source of google-diff-match-patch is attributable and licensed as follows: + * + * Copyright 2006 Google Inc. + * http://code.google.com/p/google-diff-match-patch/ + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * More Info: + * https://code.google.com/p/google-diff-match-patch/ + * + * Usage: QUnit.diff(expected, actual) + * + * QUnit.diff( "the quick brown fox jumped over", "the quick fox jumps over" ) === "the quick brown fox jumpsed} Array of diff tuples. + */ + DiffMatchPatch.prototype.DiffMain = function( text1, text2, optChecklines, optDeadline ) { + var deadline, checklines, commonlength, + commonprefix, commonsuffix, diffs; + // Set a deadline by which time the diff must be complete. + if ( typeof optDeadline === "undefined" ) { + if ( this.DiffTimeout <= 0 ) { + optDeadline = Number.MAX_VALUE; } else { - return "number"; - } - - } else if (typeof o === "undefined") { - return "undefined"; - - // consider: typeof null === object - } else if (o === null) { - return "null"; - - // consider: typeof [] === object - } else if (QUnit.is( "Array", o)) { - return "array"; - - // consider: typeof new Date() === object - } else if (QUnit.is( "Date", o)) { - return "date"; - - // consider: /./ instanceof Object; - // /./ instanceof RegExp; - // typeof /./ === "function"; // => false in IE and Opera, - // true in FF and Safari - } else if (QUnit.is( "RegExp", o)) { - return "regexp"; - - } else if (typeof o === "object") { - return "object"; - - } else if (QUnit.is( "Function", o)) { - return "function"; - } else { - return undefined; - } - } - - // Call the o related callback with the given arguments. - function bindCallbacks(o, callbacks, args) { - var prop = hoozit(o); - if (prop) { - if (hoozit(callbacks[prop]) === "function") { - return callbacks[prop].apply(callbacks, args); - } else { - return callbacks[prop]; // or undefined + optDeadline = ( new Date() ).getTime() + this.DiffTimeout * 1000; } } - } - - var callbacks = function () { - - // for string, boolean, number and null - function useStrictEquality(b, a) { - if (b instanceof a.constructor || a instanceof b.constructor) { - // to catch short annotaion VS 'new' annotation of a declaration - // e.g. var i = 1; - // var j = new Number(1); - return a == b; - } else { - return a === b; + deadline = optDeadline; + + // Check for null inputs. + if ( text1 === null || text2 === null ) { + throw new Error( "Null input. (DiffMain)" ); + } + + // Check for equality (speedup). + if ( text1 === text2 ) { + if ( text1 ) { + return [ + [ DIFF_EQUAL, text1 ] + ]; + } + return []; + } + + if ( typeof optChecklines === "undefined" ) { + optChecklines = true; + } + + checklines = optChecklines; + + // Trim off common prefix (speedup). + commonlength = this.diffCommonPrefix( text1, text2 ); + commonprefix = text1.substring( 0, commonlength ); + text1 = text1.substring( commonlength ); + text2 = text2.substring( commonlength ); + + // Trim off common suffix (speedup). + ///////// + commonlength = this.diffCommonSuffix( text1, text2 ); + commonsuffix = text1.substring( text1.length - commonlength ); + text1 = text1.substring( 0, text1.length - commonlength ); + text2 = text2.substring( 0, text2.length - commonlength ); + + // Compute the diff on the middle block. + diffs = this.diffCompute( text1, text2, checklines, deadline ); + + // Restore the prefix and suffix. + if ( commonprefix ) { + diffs.unshift( [ DIFF_EQUAL, commonprefix ] ); + } + if ( commonsuffix ) { + diffs.push( [ DIFF_EQUAL, commonsuffix ] ); + } + this.diffCleanupMerge( diffs ); + return diffs; + }; + + /** + * Reduce the number of edits by eliminating operationally trivial equalities. + * @param {!Array.} diffs Array of diff tuples. + */ + DiffMatchPatch.prototype.diffCleanupEfficiency = function( diffs ) { + var changes, equalities, equalitiesLength, lastequality, + pointer, preIns, preDel, postIns, postDel; + changes = false; + equalities = []; // Stack of indices where equalities are found. + equalitiesLength = 0; // Keeping our own length var is faster in JS. + /** @type {?string} */ + lastequality = null; + // Always equal to diffs[equalities[equalitiesLength - 1]][1] + pointer = 0; // Index of current position. + // Is there an insertion operation before the last equality. + preIns = false; + // Is there a deletion operation before the last equality. + preDel = false; + // Is there an insertion operation after the last equality. + postIns = false; + // Is there a deletion operation after the last equality. + postDel = false; + while ( pointer < diffs.length ) { + if ( diffs[ pointer ][ 0 ] === DIFF_EQUAL ) { // Equality found. + if ( diffs[ pointer ][ 1 ].length < this.DiffEditCost && ( postIns || postDel ) ) { + // Candidate found. + equalities[ equalitiesLength++ ] = pointer; + preIns = postIns; + preDel = postDel; + lastequality = diffs[ pointer ][ 1 ]; + } else { + // Not a candidate, and can never become one. + equalitiesLength = 0; + lastequality = null; + } + postIns = postDel = false; + } else { // An insertion or deletion. + if ( diffs[ pointer ][ 0 ] === DIFF_DELETE ) { + postDel = true; + } else { + postIns = true; + } + /* + * Five types to be split: + * ABXYCD + * AXCD + * ABXC + * AXCD + * ABXC + */ + if ( lastequality && ( ( preIns && preDel && postIns && postDel ) || + ( ( lastequality.length < this.DiffEditCost / 2 ) && + ( preIns + preDel + postIns + postDel ) === 3 ) ) ) { + // Duplicate record. + diffs.splice( equalities[equalitiesLength - 1], 0, [ DIFF_DELETE, lastequality ] ); + // Change second copy to insert. + diffs[ equalities[ equalitiesLength - 1 ] + 1 ][ 0 ] = DIFF_INSERT; + equalitiesLength--; // Throw away the equality we just deleted; + lastequality = null; + if (preIns && preDel) { + // No changes made which could affect previous entry, keep going. + postIns = postDel = true; + equalitiesLength = 0; + } else { + equalitiesLength--; // Throw away the previous equality. + pointer = equalitiesLength > 0 ? equalities[ equalitiesLength - 1 ] : -1; + postIns = postDel = false; + } + changes = true; + } + } + pointer++; + } + + if ( changes ) { + this.diffCleanupMerge( diffs ); + } + }; + + /** + * Convert a diff array into a pretty HTML report. + * @param {!Array.} diffs Array of diff tuples. + * @param {integer} string to be beautified. + * @return {string} HTML representation. + */ + DiffMatchPatch.prototype.diffPrettyHtml = function( diffs ) { + var op, data, x, html = []; + for ( x = 0; x < diffs.length; x++ ) { + op = diffs[x][0]; // Operation (insert, delete, equal) + data = diffs[x][1]; // Text of change. + switch ( op ) { + case DIFF_INSERT: + html[x] = "" + data + ""; + break; + case DIFF_DELETE: + html[x] = "" + data + ""; + break; + case DIFF_EQUAL: + html[x] = "" + data + ""; + break; } } - - return { - "string": useStrictEquality, - "boolean": useStrictEquality, - "number": useStrictEquality, - "null": useStrictEquality, - "undefined": useStrictEquality, - - "nan": function (b) { - return isNaN(b); - }, - - "date": function (b, a) { - return hoozit(b) === "date" && a.valueOf() === b.valueOf(); - }, - - "regexp": function (b, a) { - return hoozit(b) === "regexp" && - a.source === b.source && // the regex itself - a.global === b.global && // and its modifers (gmi) ... - a.ignoreCase === b.ignoreCase && - a.multiline === b.multiline; - }, - - // - skip when the property is a method of an instance (OOP) - // - abort otherwise, - // initial === would have catch identical references anyway - "function": function () { - var caller = callers[callers.length - 1]; - return caller !== Object && - typeof caller !== "undefined"; - }, - - "array": function (b, a) { - var i, j, loop; - var len; - - // b could be an object literal here - if ( ! (hoozit(b) === "array")) { - return false; - } - - len = a.length; - if (len !== b.length) { // safe and faster - return false; + return html.join(""); + }; + + /** + * Determine the common prefix of two strings. + * @param {string} text1 First string. + * @param {string} text2 Second string. + * @return {number} The number of characters common to the start of each + * string. + */ + DiffMatchPatch.prototype.diffCommonPrefix = function( text1, text2 ) { + var pointermid, pointermax, pointermin, pointerstart; + // Quick check for common null cases. + if ( !text1 || !text2 || text1.charAt(0) !== text2.charAt(0) ) { + return 0; + } + // Binary search. + // Performance analysis: http://neil.fraser.name/news/2007/10/09/ + pointermin = 0; + pointermax = Math.min( text1.length, text2.length ); + pointermid = pointermax; + pointerstart = 0; + while ( pointermin < pointermid ) { + if ( text1.substring( pointerstart, pointermid ) === text2.substring( pointerstart, pointermid ) ) { + pointermin = pointermid; + pointerstart = pointermin; + } else { + pointermax = pointermid; + } + pointermid = Math.floor( ( pointermax - pointermin ) / 2 + pointermin ); + } + return pointermid; + }; + + /** + * Determine the common suffix of two strings. + * @param {string} text1 First string. + * @param {string} text2 Second string. + * @return {number} The number of characters common to the end of each string. + */ + DiffMatchPatch.prototype.diffCommonSuffix = function( text1, text2 ) { + var pointermid, pointermax, pointermin, pointerend; + // Quick check for common null cases. + if (!text1 || !text2 || text1.charAt(text1.length - 1) !== text2.charAt(text2.length - 1)) { + return 0; + } + // Binary search. + // Performance analysis: http://neil.fraser.name/news/2007/10/09/ + pointermin = 0; + pointermax = Math.min(text1.length, text2.length); + pointermid = pointermax; + pointerend = 0; + while ( pointermin < pointermid ) { + if (text1.substring( text1.length - pointermid, text1.length - pointerend ) === + text2.substring( text2.length - pointermid, text2.length - pointerend ) ) { + pointermin = pointermid; + pointerend = pointermin; + } else { + pointermax = pointermid; + } + pointermid = Math.floor( ( pointermax - pointermin ) / 2 + pointermin ); + } + return pointermid; + }; + + /** + * Find the differences between two texts. Assumes that the texts do not + * have any common prefix or suffix. + * @param {string} text1 Old string to be diffed. + * @param {string} text2 New string to be diffed. + * @param {boolean} checklines Speedup flag. If false, then don't run a + * line-level diff first to identify the changed areas. + * If true, then run a faster, slightly less optimal diff. + * @param {number} deadline Time when the diff should be complete by. + * @return {!Array.} Array of diff tuples. + * @private + */ + DiffMatchPatch.prototype.diffCompute = function( text1, text2, checklines, deadline ) { + var diffs, longtext, shorttext, i, hm, + text1A, text2A, text1B, text2B, + midCommon, diffsA, diffsB; + + if ( !text1 ) { + // Just add some text (speedup). + return [ + [ DIFF_INSERT, text2 ] + ]; + } + + if (!text2) { + // Just delete some text (speedup). + return [ + [ DIFF_DELETE, text1 ] + ]; + } + + longtext = text1.length > text2.length ? text1 : text2; + shorttext = text1.length > text2.length ? text2 : text1; + i = longtext.indexOf( shorttext ); + if ( i !== -1 ) { + // Shorter text is inside the longer text (speedup). + diffs = [ + [ DIFF_INSERT, longtext.substring( 0, i ) ], + [ DIFF_EQUAL, shorttext ], + [ DIFF_INSERT, longtext.substring( i + shorttext.length ) ] + ]; + // Swap insertions for deletions if diff is reversed. + if ( text1.length > text2.length ) { + diffs[0][0] = diffs[2][0] = DIFF_DELETE; + } + return diffs; + } + + if ( shorttext.length === 1 ) { + // Single character string. + // After the previous speedup, the character can't be an equality. + return [ + [ DIFF_DELETE, text1 ], + [ DIFF_INSERT, text2 ] + ]; + } + + // Check to see if the problem can be split in two. + hm = this.diffHalfMatch(text1, text2); + if (hm) { + // A half-match was found, sort out the return data. + text1A = hm[0]; + text1B = hm[1]; + text2A = hm[2]; + text2B = hm[3]; + midCommon = hm[4]; + // Send both pairs off for separate processing. + diffsA = this.DiffMain(text1A, text2A, checklines, deadline); + diffsB = this.DiffMain(text1B, text2B, checklines, deadline); + // Merge the results. + return diffsA.concat([ + [ DIFF_EQUAL, midCommon ] + ], diffsB); + } + + if (checklines && text1.length > 100 && text2.length > 100) { + return this.diffLineMode(text1, text2, deadline); + } + + return this.diffBisect(text1, text2, deadline); + }; + + /** + * Do the two texts share a substring which is at least half the length of the + * longer text? + * This speedup can produce non-minimal diffs. + * @param {string} text1 First string. + * @param {string} text2 Second string. + * @return {Array.} Five element Array, containing the prefix of + * text1, the suffix of text1, the prefix of text2, the suffix of + * text2 and the common middle. Or null if there was no match. + * @private + */ + DiffMatchPatch.prototype.diffHalfMatch = function(text1, text2) { + var longtext, shorttext, dmp, + text1A, text2B, text2A, text1B, midCommon, + hm1, hm2, hm; + if (this.DiffTimeout <= 0) { + // Don't risk returning a non-optimal diff if we have unlimited time. + return null; + } + longtext = text1.length > text2.length ? text1 : text2; + shorttext = text1.length > text2.length ? text2 : text1; + if (longtext.length < 4 || shorttext.length * 2 < longtext.length) { + return null; // Pointless. + } + dmp = this; // 'this' becomes 'window' in a closure. + + /** + * Does a substring of shorttext exist within longtext such that the substring + * is at least half the length of longtext? + * Closure, but does not reference any external variables. + * @param {string} longtext Longer string. + * @param {string} shorttext Shorter string. + * @param {number} i Start index of quarter length substring within longtext. + * @return {Array.} Five element Array, containing the prefix of + * longtext, the suffix of longtext, the prefix of shorttext, the suffix + * of shorttext and the common middle. Or null if there was no match. + * @private + */ + function diffHalfMatchI(longtext, shorttext, i) { + var seed, j, bestCommon, prefixLength, suffixLength, + bestLongtextA, bestLongtextB, bestShorttextA, bestShorttextB; + // Start with a 1/4 length substring at position i as a seed. + seed = longtext.substring(i, i + Math.floor(longtext.length / 4)); + j = -1; + bestCommon = ""; + while ((j = shorttext.indexOf(seed, j + 1)) !== -1) { + prefixLength = dmp.diffCommonPrefix(longtext.substring(i), + shorttext.substring(j)); + suffixLength = dmp.diffCommonSuffix(longtext.substring(0, i), + shorttext.substring(0, j)); + if (bestCommon.length < suffixLength + prefixLength) { + bestCommon = shorttext.substring(j - suffixLength, j) + + shorttext.substring(j, j + prefixLength); + bestLongtextA = longtext.substring(0, i - suffixLength); + bestLongtextB = longtext.substring(i + prefixLength); + bestShorttextA = shorttext.substring(0, j - suffixLength); + bestShorttextB = shorttext.substring(j + prefixLength); } - - //track reference to avoid circular references - parents.push(a); - for (i = 0; i < len; i++) { - loop = false; - for(j=0;j= longtext.length) { + return [ bestLongtextA, bestLongtextB, + bestShorttextA, bestShorttextB, bestCommon + ]; + } else { + return null; + } + } + + // First check if the second quarter is the seed for a half-match. + hm1 = diffHalfMatchI(longtext, shorttext, + Math.ceil(longtext.length / 4)); + // Check again based on the third quarter. + hm2 = diffHalfMatchI(longtext, shorttext, + Math.ceil(longtext.length / 2)); + if (!hm1 && !hm2) { + return null; + } else if (!hm2) { + hm = hm1; + } else if (!hm1) { + hm = hm2; + } else { + // Both matched. Select the longest. + hm = hm1[4].length > hm2[4].length ? hm1 : hm2; + } + + // A half-match was found, sort out the return data. + text1A, text1B, text2A, text2B; + if (text1.length > text2.length) { + text1A = hm[0]; + text1B = hm[1]; + text2A = hm[2]; + text2B = hm[3]; + } else { + text2A = hm[0]; + text2B = hm[1]; + text1A = hm[2]; + text1B = hm[3]; + } + midCommon = hm[4]; + return [ text1A, text1B, text2A, text2B, midCommon ]; + }; + + /** + * Do a quick line-level diff on both strings, then rediff the parts for + * greater accuracy. + * This speedup can produce non-minimal diffs. + * @param {string} text1 Old string to be diffed. + * @param {string} text2 New string to be diffed. + * @param {number} deadline Time when the diff should be complete by. + * @return {!Array.} Array of diff tuples. + * @private + */ + DiffMatchPatch.prototype.diffLineMode = function(text1, text2, deadline) { + var a, diffs, linearray, pointer, countInsert, + countDelete, textInsert, textDelete, j; + // Scan the text on a line-by-line basis first. + a = this.diffLinesToChars(text1, text2); + text1 = a.chars1; + text2 = a.chars2; + linearray = a.lineArray; + + diffs = this.DiffMain(text1, text2, false, deadline); + + // Convert the diff back to original text. + this.diffCharsToLines(diffs, linearray); + // Eliminate freak matches (e.g. blank lines) + this.diffCleanupSemantic(diffs); + + // Rediff any replacement blocks, this time character-by-character. + // Add a dummy entry at the end. + diffs.push( [ DIFF_EQUAL, "" ] ); + pointer = 0; + countDelete = 0; + countInsert = 0; + textDelete = ""; + textInsert = ""; + while (pointer < diffs.length) { + switch ( diffs[pointer][0] ) { + case DIFF_INSERT: + countInsert++; + textInsert += diffs[pointer][1]; + break; + case DIFF_DELETE: + countDelete++; + textDelete += diffs[pointer][1]; + break; + case DIFF_EQUAL: + // Upon reaching an equality, check for prior redundancies. + if (countDelete >= 1 && countInsert >= 1) { + // Delete the offending records and add the merged ones. + diffs.splice(pointer - countDelete - countInsert, + countDelete + countInsert); + pointer = pointer - countDelete - countInsert; + a = this.DiffMain(textDelete, textInsert, false, deadline); + for (j = a.length - 1; j >= 0; j--) { + diffs.splice( pointer, 0, a[j] ); } + pointer = pointer + a.length; } - if (!loop && ! innerEquiv(a[i], b[i])) { - parents.pop(); - return false; - } + countInsert = 0; + countDelete = 0; + textDelete = ""; + textInsert = ""; + break; + } + pointer++; + } + diffs.pop(); // Remove the dummy entry at the end. + + return diffs; + }; + + /** + * Find the 'middle snake' of a diff, split the problem in two + * and return the recursively constructed diff. + * See Myers 1986 paper: An O(ND) Difference Algorithm and Its Variations. + * @param {string} text1 Old string to be diffed. + * @param {string} text2 New string to be diffed. + * @param {number} deadline Time at which to bail if not yet complete. + * @return {!Array.} Array of diff tuples. + * @private + */ + DiffMatchPatch.prototype.diffBisect = function(text1, text2, deadline) { + var text1Length, text2Length, maxD, vOffset, vLength, + v1, v2, x, delta, front, k1start, k1end, k2start, + k2end, k2Offset, k1Offset, x1, x2, y1, y2, d, k1, k2; + // Cache the text lengths to prevent multiple calls. + text1Length = text1.length; + text2Length = text2.length; + maxD = Math.ceil((text1Length + text2Length) / 2); + vOffset = maxD; + vLength = 2 * maxD; + v1 = new Array(vLength); + v2 = new Array(vLength); + // Setting all elements to -1 is faster in Chrome & Firefox than mixing + // integers and undefined. + for (x = 0; x < vLength; x++) { + v1[x] = -1; + v2[x] = -1; + } + v1[vOffset + 1] = 0; + v2[vOffset + 1] = 0; + delta = text1Length - text2Length; + // If the total number of characters is odd, then the front path will collide + // with the reverse path. + front = (delta % 2 !== 0); + // Offsets for start and end of k loop. + // Prevents mapping of space beyond the grid. + k1start = 0; + k1end = 0; + k2start = 0; + k2end = 0; + for (d = 0; d < maxD; d++) { + // Bail out if deadline is reached. + if ((new Date()).getTime() > deadline) { + break; + } + + // Walk the front path one step. + for (k1 = -d + k1start; k1 <= d - k1end; k1 += 2) { + k1Offset = vOffset + k1; + if ( k1 === -d || ( k1 !== d && v1[ k1Offset - 1 ] < v1[ k1Offset + 1 ] ) ) { + x1 = v1[k1Offset + 1]; + } else { + x1 = v1[k1Offset - 1] + 1; } - parents.pop(); - return true; - }, - - "object": function (b, a) { - var i, j, loop; - var eq = true; // unless we can proove it - var aProperties = [], bProperties = []; // collection of strings - - // comparing constructors is more strict than using instanceof - if ( a.constructor !== b.constructor) { - return false; + y1 = x1 - k1; + while (x1 < text1Length && y1 < text2Length && + text1.charAt(x1) === text2.charAt(y1)) { + x1++; + y1++; } - - // stack constructor before traversing properties - callers.push(a.constructor); - //track reference to avoid circular references - parents.push(a); - - for (i in a) { // be strict: don't ensures hasOwnProperty and go deep - loop = false; - for(j=0;j text1Length) { + // Ran off the right of the graph. + k1end += 2; + } else if (y1 > text2Length) { + // Ran off the bottom of the graph. + k1start += 2; + } else if (front) { + k2Offset = vOffset + delta - k1; + if (k2Offset >= 0 && k2Offset < vLength && v2[k2Offset] !== -1) { + // Mirror x2 onto top-left coordinate system. + x2 = text1Length - v2[k2Offset]; + if (x1 >= x2) { + // Overlap detected. + return this.diffBisectSplit(text1, text2, x1, y1, deadline); + } } } - - callers.pop(); // unstack, we are done - parents.pop(); - - for (i in b) { - bProperties.push(i); // collect b's properties + } + + // Walk the reverse path one step. + for (k2 = -d + k2start; k2 <= d - k2end; k2 += 2) { + k2Offset = vOffset + k2; + if ( k2 === -d || (k2 !== d && v2[ k2Offset - 1 ] < v2[ k2Offset + 1 ] ) ) { + x2 = v2[k2Offset + 1]; + } else { + x2 = v2[k2Offset - 1] + 1; + } + y2 = x2 - k2; + while (x2 < text1Length && y2 < text2Length && + text1.charAt(text1Length - x2 - 1) === + text2.charAt(text2Length - y2 - 1)) { + x2++; + y2++; + } + v2[k2Offset] = x2; + if (x2 > text1Length) { + // Ran off the left of the graph. + k2end += 2; + } else if (y2 > text2Length) { + // Ran off the top of the graph. + k2start += 2; + } else if (!front) { + k1Offset = vOffset + delta - k2; + if (k1Offset >= 0 && k1Offset < vLength && v1[k1Offset] !== -1) { + x1 = v1[k1Offset]; + y1 = vOffset + x1 - k1Offset; + // Mirror x2 onto top-left coordinate system. + x2 = text1Length - x2; + if (x1 >= x2) { + // Overlap detected. + return this.diffBisectSplit(text1, text2, x1, y1, deadline); + } + } + } + } + } + // Diff took too long and hit the deadline or + // number of diffs equals number of characters, no commonality at all. + return [ + [ DIFF_DELETE, text1 ], + [ DIFF_INSERT, text2 ] + ]; + }; + + /** + * Given the location of the 'middle snake', split the diff in two parts + * and recurse. + * @param {string} text1 Old string to be diffed. + * @param {string} text2 New string to be diffed. + * @param {number} x Index of split point in text1. + * @param {number} y Index of split point in text2. + * @param {number} deadline Time at which to bail if not yet complete. + * @return {!Array.} Array of diff tuples. + * @private + */ + DiffMatchPatch.prototype.diffBisectSplit = function( text1, text2, x, y, deadline ) { + var text1a, text1b, text2a, text2b, diffs, diffsb; + text1a = text1.substring(0, x); + text2a = text2.substring(0, y); + text1b = text1.substring(x); + text2b = text2.substring(y); + + // Compute both diffs serially. + diffs = this.DiffMain(text1a, text2a, false, deadline); + diffsb = this.DiffMain(text1b, text2b, false, deadline); + + return diffs.concat(diffsb); + }; + + /** + * Reduce the number of edits by eliminating semantically trivial equalities. + * @param {!Array.} diffs Array of diff tuples. + */ + DiffMatchPatch.prototype.diffCleanupSemantic = function(diffs) { + var changes, equalities, equalitiesLength, lastequality, + pointer, lengthInsertions2, lengthDeletions2, lengthInsertions1, + lengthDeletions1, deletion, insertion, overlapLength1, overlapLength2; + changes = false; + equalities = []; // Stack of indices where equalities are found. + equalitiesLength = 0; // Keeping our own length var is faster in JS. + /** @type {?string} */ + lastequality = null; + // Always equal to diffs[equalities[equalitiesLength - 1]][1] + pointer = 0; // Index of current position. + // Number of characters that changed prior to the equality. + lengthInsertions1 = 0; + lengthDeletions1 = 0; + // Number of characters that changed after the equality. + lengthInsertions2 = 0; + lengthDeletions2 = 0; + while (pointer < diffs.length) { + if (diffs[pointer][0] === DIFF_EQUAL) { // Equality found. + equalities[equalitiesLength++] = pointer; + lengthInsertions1 = lengthInsertions2; + lengthDeletions1 = lengthDeletions2; + lengthInsertions2 = 0; + lengthDeletions2 = 0; + lastequality = diffs[pointer][1]; + } else { // An insertion or deletion. + if (diffs[pointer][0] === DIFF_INSERT) { + lengthInsertions2 += diffs[pointer][1].length; + } else { + lengthDeletions2 += diffs[pointer][1].length; + } + // Eliminate an equality that is smaller or equal to the edits on both + // sides of it. + if (lastequality && (lastequality.length <= + Math.max(lengthInsertions1, lengthDeletions1)) && + (lastequality.length <= Math.max(lengthInsertions2, + lengthDeletions2))) { + // Duplicate record. + diffs.splice( equalities[ equalitiesLength - 1 ], 0, [ DIFF_DELETE, lastequality ] ); + // Change second copy to insert. + diffs[equalities[equalitiesLength - 1] + 1][0] = DIFF_INSERT; + // Throw away the equality we just deleted. + equalitiesLength--; + // Throw away the previous equality (it needs to be reevaluated). + equalitiesLength--; + pointer = equalitiesLength > 0 ? equalities[equalitiesLength - 1] : -1; + lengthInsertions1 = 0; // Reset the counters. + lengthDeletions1 = 0; + lengthInsertions2 = 0; + lengthDeletions2 = 0; + lastequality = null; + changes = true; } - - // Ensures identical properties name - return eq && innerEquiv(aProperties.sort(), bProperties.sort()); + } + pointer++; + } + + // Normalize the diff. + if (changes) { + this.diffCleanupMerge(diffs); + } + + // Find any overlaps between deletions and insertions. + // e.g: abcxxxxxxdef + // -> abcxxxdef + // e.g: xxxabcdefxxx + // -> defxxxabc + // Only extract an overlap if it is as big as the edit ahead or behind it. + pointer = 1; + while (pointer < diffs.length) { + if (diffs[pointer - 1][0] === DIFF_DELETE && + diffs[pointer][0] === DIFF_INSERT) { + deletion = diffs[pointer - 1][1]; + insertion = diffs[pointer][1]; + overlapLength1 = this.diffCommonOverlap(deletion, insertion); + overlapLength2 = this.diffCommonOverlap(insertion, deletion); + if (overlapLength1 >= overlapLength2) { + if (overlapLength1 >= deletion.length / 2 || + overlapLength1 >= insertion.length / 2) { + // Overlap found. Insert an equality and trim the surrounding edits. + diffs.splice( pointer, 0, [ DIFF_EQUAL, insertion.substring( 0, overlapLength1 ) ] ); + diffs[pointer - 1][1] = + deletion.substring(0, deletion.length - overlapLength1); + diffs[pointer + 1][1] = insertion.substring(overlapLength1); + pointer++; + } + } else { + if (overlapLength2 >= deletion.length / 2 || + overlapLength2 >= insertion.length / 2) { + // Reverse overlap found. + // Insert an equality and swap and trim the surrounding edits. + diffs.splice( pointer, 0, [ DIFF_EQUAL, deletion.substring( 0, overlapLength2 ) ] ); + diffs[pointer - 1][0] = DIFF_INSERT; + diffs[pointer - 1][1] = + insertion.substring(0, insertion.length - overlapLength2); + diffs[pointer + 1][0] = DIFF_DELETE; + diffs[pointer + 1][1] = + deletion.substring(overlapLength2); + pointer++; + } + } + pointer++; } + pointer++; + } + }; + + /** + * Determine if the suffix of one string is the prefix of another. + * @param {string} text1 First string. + * @param {string} text2 Second string. + * @return {number} The number of characters common to the end of the first + * string and the start of the second string. + * @private + */ + DiffMatchPatch.prototype.diffCommonOverlap = function(text1, text2) { + var text1Length, text2Length, textLength, + best, length, pattern, found; + // Cache the text lengths to prevent multiple calls. + text1Length = text1.length; + text2Length = text2.length; + // Eliminate the null case. + if (text1Length === 0 || text2Length === 0) { + return 0; + } + // Truncate the longer string. + if (text1Length > text2Length) { + text1 = text1.substring(text1Length - text2Length); + } else if (text1Length < text2Length) { + text2 = text2.substring(0, text1Length); + } + textLength = Math.min(text1Length, text2Length); + // Quick check for the worst case. + if (text1 === text2) { + return textLength; + } + + // Start by looking for a single character match + // and increase length until no match is found. + // Performance analysis: http://neil.fraser.name/news/2010/11/04/ + best = 0; + length = 1; + while (true) { + pattern = text1.substring(textLength - length); + found = text2.indexOf(pattern); + if (found === -1) { + return best; + } + length += found; + if (found === 0 || text1.substring(textLength - length) === + text2.substring(0, length)) { + best = length; + length++; + } + } + }; + + /** + * Split two texts into an array of strings. Reduce the texts to a string of + * hashes where each Unicode character represents one line. + * @param {string} text1 First string. + * @param {string} text2 Second string. + * @return {{chars1: string, chars2: string, lineArray: !Array.}} + * An object containing the encoded text1, the encoded text2 and + * the array of unique strings. + * The zeroth element of the array of unique strings is intentionally blank. + * @private + */ + DiffMatchPatch.prototype.diffLinesToChars = function(text1, text2) { + var lineArray, lineHash, chars1, chars2; + lineArray = []; // e.g. lineArray[4] === 'Hello\n' + lineHash = {}; // e.g. lineHash['Hello\n'] === 4 + + // '\x00' is a valid character, but various debuggers don't like it. + // So we'll insert a junk entry to avoid generating a null character. + lineArray[0] = ""; + + /** + * Split a text into an array of strings. Reduce the texts to a string of + * hashes where each Unicode character represents one line. + * Modifies linearray and linehash through being a closure. + * @param {string} text String to encode. + * @return {string} Encoded string. + * @private + */ + function diffLinesToCharsMunge(text) { + var chars, lineStart, lineEnd, lineArrayLength, line; + chars = ""; + // Walk the text, pulling out a substring for each line. + // text.split('\n') would would temporarily double our memory footprint. + // Modifying text would create many large strings to garbage collect. + lineStart = 0; + lineEnd = -1; + // Keeping our own length variable is faster than looking it up. + lineArrayLength = lineArray.length; + while (lineEnd < text.length - 1) { + lineEnd = text.indexOf("\n", lineStart); + if (lineEnd === -1) { + lineEnd = text.length - 1; + } + line = text.substring(lineStart, lineEnd + 1); + lineStart = lineEnd + 1; + + if (lineHash.hasOwnProperty ? lineHash.hasOwnProperty(line) : + (lineHash[line] !== undefined)) { + chars += String.fromCharCode( lineHash[ line ] ); + } else { + chars += String.fromCharCode(lineArrayLength); + lineHash[line] = lineArrayLength; + lineArray[lineArrayLength++] = line; + } + } + return chars; + } + + chars1 = diffLinesToCharsMunge(text1); + chars2 = diffLinesToCharsMunge(text2); + return { + chars1: chars1, + chars2: chars2, + lineArray: lineArray }; - }(); - - innerEquiv = function () { // can take multiple arguments - var args = Array.prototype.slice.apply(arguments); - if (args.length < 2) { - return true; // end transition + }; + + /** + * Rehydrate the text in a diff from a string of line hashes to real lines of + * text. + * @param {!Array.} diffs Array of diff tuples. + * @param {!Array.} lineArray Array of unique strings. + * @private + */ + DiffMatchPatch.prototype.diffCharsToLines = function( diffs, lineArray ) { + var x, chars, text, y; + for ( x = 0; x < diffs.length; x++ ) { + chars = diffs[x][1]; + text = []; + for ( y = 0; y < chars.length; y++ ) { + text[y] = lineArray[chars.charCodeAt(y)]; + } + diffs[x][1] = text.join(""); + } + }; + + /** + * Reorder and merge like edit sections. Merge equalities. + * Any edit section can move as long as it doesn't cross an equality. + * @param {!Array.} diffs Array of diff tuples. + */ + DiffMatchPatch.prototype.diffCleanupMerge = function(diffs) { + var pointer, countDelete, countInsert, textInsert, textDelete, + commonlength, changes; + diffs.push( [ DIFF_EQUAL, "" ] ); // Add a dummy entry at the end. + pointer = 0; + countDelete = 0; + countInsert = 0; + textDelete = ""; + textInsert = ""; + commonlength; + while (pointer < diffs.length) { + switch ( diffs[ pointer ][ 0 ] ) { + case DIFF_INSERT: + countInsert++; + textInsert += diffs[pointer][1]; + pointer++; + break; + case DIFF_DELETE: + countDelete++; + textDelete += diffs[pointer][1]; + pointer++; + break; + case DIFF_EQUAL: + // Upon reaching an equality, check for prior redundancies. + if (countDelete + countInsert > 1) { + if (countDelete !== 0 && countInsert !== 0) { + // Factor out any common prefixies. + commonlength = this.diffCommonPrefix(textInsert, textDelete); + if (commonlength !== 0) { + if ((pointer - countDelete - countInsert) > 0 && + diffs[pointer - countDelete - countInsert - 1][0] === + DIFF_EQUAL) { + diffs[pointer - countDelete - countInsert - 1][1] += + textInsert.substring(0, commonlength); + } else { + diffs.splice( 0, 0, [ DIFF_EQUAL, + textInsert.substring( 0, commonlength ) + ] ); + pointer++; + } + textInsert = textInsert.substring(commonlength); + textDelete = textDelete.substring(commonlength); + } + // Factor out any common suffixies. + commonlength = this.diffCommonSuffix(textInsert, textDelete); + if (commonlength !== 0) { + diffs[pointer][1] = textInsert.substring(textInsert.length - + commonlength) + diffs[pointer][1]; + textInsert = textInsert.substring(0, textInsert.length - + commonlength); + textDelete = textDelete.substring(0, textDelete.length - + commonlength); + } + } + // Delete the offending records and add the merged ones. + if (countDelete === 0) { + diffs.splice( pointer - countInsert, + countDelete + countInsert, [ DIFF_INSERT, textInsert ] ); + } else if (countInsert === 0) { + diffs.splice( pointer - countDelete, + countDelete + countInsert, [ DIFF_DELETE, textDelete ] ); + } else { + diffs.splice( pointer - countDelete - countInsert, + countDelete + countInsert, [ DIFF_DELETE, textDelete ], [ DIFF_INSERT, textInsert ] ); + } + pointer = pointer - countDelete - countInsert + + (countDelete ? 1 : 0) + (countInsert ? 1 : 0) + 1; + } else if (pointer !== 0 && diffs[pointer - 1][0] === DIFF_EQUAL) { + // Merge this equality with the previous one. + diffs[pointer - 1][1] += diffs[pointer][1]; + diffs.splice(pointer, 1); + } else { + pointer++; + } + countInsert = 0; + countDelete = 0; + textDelete = ""; + textInsert = ""; + break; + } + } + if (diffs[diffs.length - 1][1] === "") { + diffs.pop(); // Remove the dummy entry at the end. + } + + // Second pass: look for single edits surrounded on both sides by equalities + // which can be shifted sideways to eliminate an equality. + // e.g: ABAC -> ABAC + changes = false; + pointer = 1; + // Intentionally ignore the first and last element (don't need checking). + while (pointer < diffs.length - 1) { + if (diffs[pointer - 1][0] === DIFF_EQUAL && + diffs[pointer + 1][0] === DIFF_EQUAL) { + // This is a single edit surrounded by equalities. + if ( diffs[ pointer ][ 1 ].substring( diffs[ pointer ][ 1 ].length - + diffs[ pointer - 1 ][ 1 ].length ) === diffs[ pointer - 1 ][ 1 ] ) { + // Shift the edit over the previous equality. + diffs[pointer][1] = diffs[pointer - 1][1] + + diffs[pointer][1].substring(0, diffs[pointer][1].length - + diffs[pointer - 1][1].length); + diffs[pointer + 1][1] = diffs[pointer - 1][1] + diffs[pointer + 1][1]; + diffs.splice(pointer - 1, 1); + changes = true; + } else if ( diffs[ pointer ][ 1 ].substring( 0, diffs[ pointer + 1 ][ 1 ].length ) === + diffs[ pointer + 1 ][ 1 ] ) { + // Shift the edit over the next equality. + diffs[pointer - 1][1] += diffs[pointer + 1][1]; + diffs[pointer][1] = + diffs[pointer][1].substring(diffs[pointer + 1][1].length) + + diffs[pointer + 1][1]; + diffs.splice(pointer + 1, 1); + changes = true; + } + } + pointer++; } - - return (function (a, b) { - if (a === b) { - return true; // catch the most you can - } else if (a === null || b === null || typeof a === "undefined" || typeof b === "undefined" || hoozit(a) !== hoozit(b)) { - return false; // don't lose time with error prone cases - } else { - return bindCallbacks(a, callbacks, [b, a]); - } - - // apply transition with (1..n) arguments - })(args[0], args[1]) && arguments.callee.apply(this, args.splice(1, args.length -1)); + // If shifts were made, the diff needs reordering and another shift sweep. + if (changes) { + this.diffCleanupMerge(diffs); + } + }; + + return function(o, n) { + var diff, output, text; + diff = new DiffMatchPatch(); + output = diff.DiffMain(o, n); + //console.log(output); + diff.diffCleanupEfficiency(output); + text = diff.diffPrettyHtml(output); + + return text; }; - - return innerEquiv; - -}(); +}()); +// jscs:enable + +(function() { + +// Deprecated QUnit.init - Ref #530 +// Re-initialize the configuration options +QUnit.init = function() { + var tests, banner, result, qunit, + config = QUnit.config; + + config.stats = { all: 0, bad: 0 }; + config.moduleStats = { all: 0, bad: 0 }; + config.started = 0; + config.updateRate = 1000; + config.blocking = false; + config.autostart = true; + config.autorun = false; + config.filter = ""; + config.queue = []; + + // Return on non-browser environments + // This is necessary to not break on node tests + if ( typeof window === "undefined" ) { + return; + } + + qunit = id( "qunit" ); + if ( qunit ) { + qunit.innerHTML = + "

" + escapeText( document.title ) + "

" + + "

" + + "
" + + "

" + + "
    "; + } + + tests = id( "qunit-tests" ); + banner = id( "qunit-banner" ); + result = id( "qunit-testresult" ); + + if ( tests ) { + tests.innerHTML = ""; + } + + if ( banner ) { + banner.className = ""; + } + + if ( result ) { + result.parentNode.removeChild( result ); + } + + if ( tests ) { + result = document.createElement( "p" ); + result.id = "qunit-testresult"; + result.className = "result"; + tests.parentNode.insertBefore( result, tests ); + result.innerHTML = "Running...
     "; + } +}; + +// Don't load the HTML Reporter on non-Browser environments +if ( typeof window === "undefined" ) { + return; +} + +var config = QUnit.config, + hasOwn = Object.prototype.hasOwnProperty, + defined = { + document: window.document !== undefined, + sessionStorage: (function() { + var x = "qunit-test-string"; + try { + sessionStorage.setItem( x, x ); + sessionStorage.removeItem( x ); + return true; + } catch ( e ) { + return false; + } + }()) + }, + modulesList = []; + +/** +* Escape text for attribute or text content. +*/ +function escapeText( s ) { + if ( !s ) { + return ""; + } + s = s + ""; + + // Both single quotes and double quotes (for attributes) + return s.replace( /['"<>&]/g, function( s ) { + switch ( s ) { + case "'": + return "'"; + case "\"": + return """; + case "<": + return "<"; + case ">": + return ">"; + case "&": + return "&"; + } + }); +} + +/** + * @param {HTMLElement} elem + * @param {string} type + * @param {Function} fn + */ +function addEvent( elem, type, fn ) { + if ( elem.addEventListener ) { + + // Standards-based browsers + elem.addEventListener( type, fn, false ); + } else if ( elem.attachEvent ) { + + // support: IE <9 + elem.attachEvent( "on" + type, function() { + var event = window.event; + if ( !event.target ) { + event.target = event.srcElement || document; + } + + fn.call( elem, event ); + }); + } +} /** - * jsDump - * Copyright (c) 2008 Ariel Flesler - aflesler(at)gmail(dot)com | http://flesler.blogspot.com - * Licensed under BSD (http://www.opensource.org/licenses/bsd-license.php) - * Date: 5/15/2008 - * @projectDescription Advanced and extensible data dumping for Javascript. - * @version 1.0.0 - * @author Ariel Flesler - * @link {http://flesler.blogspot.com/2008/05/jsdump-pretty-dump-of-any-javascript.html} + * @param {Array|NodeList} elems + * @param {string} type + * @param {Function} fn */ -QUnit.jsDump = (function() { - function quote( str ) { - return '"' + str.toString().replace(/"/g, '\\"') + '"'; - }; - function literal( o ) { - return o + ''; - }; - function join( pre, arr, post ) { - var s = jsDump.separator(), - base = jsDump.indent(), - inner = jsDump.indent(1); - if ( arr.join ) - arr = arr.join( ',' + s + inner ); - if ( !arr ) - return pre + post; - return [ pre, inner + arr, base + post ].join(s); - }; - function array( arr ) { - var i = arr.length, ret = Array(i); - this.up(); - while ( i-- ) - ret[i] = this.parse( arr[i] ); - this.down(); - return join( '[', ret, ']' ); - }; - - var reName = /^function (\w+)/; - - var jsDump = { - parse:function( obj, type ) { //type is used mostly internally, you can fix a (custom)type in advance - var parser = this.parsers[ type || this.typeOf(obj) ]; - type = typeof parser; - - return type == 'function' ? parser.call( this, obj ) : - type == 'string' ? parser : - this.parsers.error; - }, - typeOf:function( obj ) { - var type; - if ( obj === null ) { - type = "null"; - } else if (typeof obj === "undefined") { - type = "undefined"; - } else if (QUnit.is("RegExp", obj)) { - type = "regexp"; - } else if (QUnit.is("Date", obj)) { - type = "date"; - } else if (QUnit.is("Function", obj)) { - type = "function"; - } else if (obj.setInterval && obj.document && !obj.nodeType) { - type = "window"; - } else if (obj.nodeType === 9) { - type = "document"; - } else if (obj.nodeType) { - type = "node"; - } else if (typeof obj === "object" && typeof obj.length === "number" && obj.length >= 0) { - type = "array"; +function addEvents( elems, type, fn ) { + var i = elems.length; + while ( i-- ) { + addEvent( elems[ i ], type, fn ); + } +} + +function hasClass( elem, name ) { + return ( " " + elem.className + " " ).indexOf( " " + name + " " ) >= 0; +} + +function addClass( elem, name ) { + if ( !hasClass( elem, name ) ) { + elem.className += ( elem.className ? " " : "" ) + name; + } +} + +function toggleClass( elem, name ) { + if ( hasClass( elem, name ) ) { + removeClass( elem, name ); + } else { + addClass( elem, name ); + } +} + +function removeClass( elem, name ) { + var set = " " + elem.className + " "; + + // Class name may appear multiple times + while ( set.indexOf( " " + name + " " ) >= 0 ) { + set = set.replace( " " + name + " ", " " ); + } + + // trim for prettiness + elem.className = typeof set.trim === "function" ? set.trim() : set.replace( /^\s+|\s+$/g, "" ); +} + +function id( name ) { + return defined.document && document.getElementById && document.getElementById( name ); +} + +function getUrlConfigHtml() { + var i, j, val, + escaped, escapedTooltip, + selection = false, + len = config.urlConfig.length, + urlConfigHtml = ""; + + for ( i = 0; i < len; i++ ) { + val = config.urlConfig[ i ]; + if ( typeof val === "string" ) { + val = { + id: val, + label: val + }; + } + + escaped = escapeText( val.id ); + escapedTooltip = escapeText( val.tooltip ); + + if ( config[ val.id ] === undefined ) { + config[ val.id ] = QUnit.urlParams[ val.id ]; + } + + if ( !val.value || typeof val.value === "string" ) { + urlConfigHtml += ""; + } else { + urlConfigHtml += ""; + } + } + + return urlConfigHtml; +} + +// Handle "click" events on toolbar checkboxes and "change" for select menus. +// Updates the URL with the new state of `config.urlConfig` values. +function toolbarChanged() { + var updatedUrl, value, + field = this, + params = {}; + + // Detect if field is a select menu or a checkbox + if ( "selectedIndex" in field ) { + value = field.options[ field.selectedIndex ].value || undefined; + } else { + value = field.checked ? ( field.defaultValue || true ) : undefined; + } + + params[ field.name ] = value; + updatedUrl = setUrl( params ); + + if ( "hidepassed" === field.name && "replaceState" in window.history ) { + config[ field.name ] = value || false; + if ( value ) { + addClass( id( "qunit-tests" ), "hidepass" ); + } else { + removeClass( id( "qunit-tests" ), "hidepass" ); + } + + // It is not necessary to refresh the whole page + window.history.replaceState( null, "", updatedUrl ); + } else { + window.location = updatedUrl; + } +} + +function setUrl( params ) { + var key, + querystring = "?"; + + params = QUnit.extend( QUnit.extend( {}, QUnit.urlParams ), params ); + + for ( key in params ) { + if ( hasOwn.call( params, key ) ) { + if ( params[ key ] === undefined ) { + continue; + } + querystring += encodeURIComponent( key ); + if ( params[ key ] !== true ) { + querystring += "=" + encodeURIComponent( params[ key ] ); } - return type; - }, - separator:function() { - return this.multiline ? this.HTML ? '
    ' : '\n' : this.HTML ? ' ' : ' '; - }, - indent:function( extra ) {// extra can be a number, shortcut for increasing-calling-decreasing - if ( !this.multiline ) - return ''; - var chr = this.indentChar; - if ( this.HTML ) - chr = chr.replace(/\t/g,' ').replace(/ /g,' '); - return Array( this._depth_ + (extra||0) ).join(chr); - }, - up:function( a ) { - this._depth_ += a || 1; - }, - down:function( a ) { - this._depth_ -= a || 1; - }, - setParser:function( name, parser ) { - this.parsers[name] = parser; - }, - // The next 3 are exposed so you can use them - quote:quote, - literal:literal, - join:join, - // - _depth_: 1, - // This is the list of parsers, to modify them, use jsDump.setParser - parsers:{ - window: '[Window]', - document: '[Document]', - error:'[ERROR]', //when no parser is found, shouldn't happen - unknown: '[Unknown]', - 'null':'null', - undefined:'undefined', - 'function':function( fn ) { - var ret = 'function', - name = 'name' in fn ? fn.name : (reName.exec(fn)||[])[1];//functions never have name in IE - if ( name ) - ret += ' ' + name; - ret += '('; - - ret = [ ret, this.parse( fn, 'functionArgs' ), '){'].join(''); - return join( ret, this.parse(fn,'functionCode'), '}' ); - }, - array: array, - nodelist: array, - arguments: array, - object:function( map ) { - var ret = [ ]; - this.up(); - for ( var key in map ) - ret.push( this.parse(key,'key') + ': ' + this.parse(map[key]) ); - this.down(); - return join( '{', ret, '}' ); - }, - node:function( node ) { - var open = this.HTML ? '<' : '<', - close = this.HTML ? '>' : '>'; - - var tag = node.nodeName.toLowerCase(), - ret = open + tag; - - for ( var a in this.DOMAttrs ) { - var val = node[this.DOMAttrs[a]]; - if ( val ) - ret += ' ' + a + '=' + this.parse( val, 'attribute' ); - } - return ret + close + open + '/' + tag + close; - }, - functionArgs:function( fn ) {//function calls it internally, it's the arguments part of the function - var l = fn.length; - if ( !l ) return ''; - - var args = Array(l); - while ( l-- ) - args[l] = String.fromCharCode(97+l);//97 is 'a' - return ' ' + args.join(', ') + ' '; - }, - key:quote, //object calls it internally, the key part of an item in a map - functionCode:'[code]', //function calls it internally, it's the content of the function - attribute:quote, //node calls it internally, it's an html attribute value - string:quote, - date:quote, - regexp:literal, //regex - number:literal, - 'boolean':literal - }, - DOMAttrs:{//attributes to dump from nodes, name=>realName - id:'id', - name:'name', - 'class':'className' - }, - HTML:false,//if true, entities are escaped ( <, >, \t, space and \n ) - indentChar:' ',//indentation unit - multiline:false //if true, items in a collection, are separated by a \n, else just a space. - }; - - return jsDump; + querystring += "&"; + } + } + return location.protocol + "//" + location.host + + location.pathname + querystring.slice( 0, -1 ); +} + +function applyUrlParams() { + var selectedModule, + modulesList = id( "qunit-modulefilter" ), + filter = id( "qunit-filter-input" ).value; + + selectedModule = modulesList ? + decodeURIComponent( modulesList.options[ modulesList.selectedIndex ].value ) : + undefined; + + window.location = setUrl({ + module: ( selectedModule === "" ) ? undefined : selectedModule, + filter: ( filter === "" ) ? undefined : filter, + + // Remove testId filter + testId: undefined + }); +} + +function toolbarUrlConfigContainer() { + var urlConfigContainer = document.createElement( "span" ); + + urlConfigContainer.innerHTML = getUrlConfigHtml(); + addClass( urlConfigContainer, "qunit-url-config" ); + + // For oldIE support: + // * Add handlers to the individual elements instead of the container + // * Use "click" instead of "change" for checkboxes + addEvents( urlConfigContainer.getElementsByTagName( "input" ), "click", toolbarChanged ); + addEvents( urlConfigContainer.getElementsByTagName( "select" ), "change", toolbarChanged ); + + return urlConfigContainer; +} + +function toolbarLooseFilter() { + var filter = document.createElement( "form" ), + label = document.createElement( "label" ), + input = document.createElement( "input" ), + button = document.createElement( "button" ); + + addClass( filter, "qunit-filter" ); + + label.innerHTML = "Filter: "; + + input.type = "text"; + input.value = config.filter || ""; + input.name = "filter"; + input.id = "qunit-filter-input"; + + button.innerHTML = "Go"; + + label.appendChild( input ); + + filter.appendChild( label ); + filter.appendChild( button ); + addEvent( filter, "submit", function( ev ) { + applyUrlParams(); + + if ( ev && ev.preventDefault ) { + ev.preventDefault(); + } + + return false; + }); + + return filter; +} + +function toolbarModuleFilterHtml() { + var i, + moduleFilterHtml = ""; + + if ( !modulesList.length ) { + return false; + } + + modulesList.sort(function( a, b ) { + return a.localeCompare( b ); + }); + + moduleFilterHtml += "" + + ""; + + return moduleFilterHtml; +} + +function toolbarModuleFilter() { + var toolbar = id( "qunit-testrunner-toolbar" ), + moduleFilter = document.createElement( "span" ), + moduleFilterHtml = toolbarModuleFilterHtml(); + + if ( !toolbar || !moduleFilterHtml ) { + return false; + } + + moduleFilter.setAttribute( "id", "qunit-modulefilter-container" ); + moduleFilter.innerHTML = moduleFilterHtml; + + addEvent( moduleFilter.lastChild, "change", applyUrlParams ); + + toolbar.appendChild( moduleFilter ); +} + +function appendToolbar() { + var toolbar = id( "qunit-testrunner-toolbar" ); + + if ( toolbar ) { + toolbar.appendChild( toolbarUrlConfigContainer() ); + toolbar.appendChild( toolbarLooseFilter() ); + } +} + +function appendHeader() { + var header = id( "qunit-header" ); + + if ( header ) { + header.innerHTML = "" + header.innerHTML + " "; + } +} + +function appendBanner() { + var banner = id( "qunit-banner" ); + + if ( banner ) { + banner.className = ""; + } +} + +function appendTestResults() { + var tests = id( "qunit-tests" ), + result = id( "qunit-testresult" ); + + if ( result ) { + result.parentNode.removeChild( result ); + } + + if ( tests ) { + tests.innerHTML = ""; + result = document.createElement( "p" ); + result.id = "qunit-testresult"; + result.className = "result"; + tests.parentNode.insertBefore( result, tests ); + result.innerHTML = "Running...
     "; + } +} + +function storeFixture() { + var fixture = id( "qunit-fixture" ); + if ( fixture ) { + config.fixture = fixture.innerHTML; + } +} + +function appendUserAgent() { + var userAgent = id( "qunit-userAgent" ); + + if ( userAgent ) { + userAgent.innerHTML = ""; + userAgent.appendChild( + document.createTextNode( + "QUnit " + QUnit.version + "; " + navigator.userAgent + ) + ); + } +} + +function appendTestsList( modules ) { + var i, l, x, z, test, moduleObj; + + for ( i = 0, l = modules.length; i < l; i++ ) { + moduleObj = modules[ i ]; + + if ( moduleObj.name ) { + modulesList.push( moduleObj.name ); + } + + for ( x = 0, z = moduleObj.tests.length; x < z; x++ ) { + test = moduleObj.tests[ x ]; + + appendTest( test.name, test.testId, moduleObj.name ); + } + } +} + +function appendTest( name, testId, moduleName ) { + var title, rerunTrigger, testBlock, assertList, + tests = id( "qunit-tests" ); + + if ( !tests ) { + return; + } + + title = document.createElement( "strong" ); + title.innerHTML = getNameHtml( name, moduleName ); + + rerunTrigger = document.createElement( "a" ); + rerunTrigger.innerHTML = "Rerun"; + rerunTrigger.href = setUrl({ testId: testId }); + + testBlock = document.createElement( "li" ); + testBlock.appendChild( title ); + testBlock.appendChild( rerunTrigger ); + testBlock.id = "qunit-test-output-" + testId; + + assertList = document.createElement( "ol" ); + assertList.className = "qunit-assert-list"; + + testBlock.appendChild( assertList ); + + tests.appendChild( testBlock ); +} + +// HTML Reporter initialization and load +QUnit.begin(function( details ) { + var qunit = id( "qunit" ); + + // Fixture is the only one necessary to run without the #qunit element + storeFixture(); + + if ( qunit ) { + qunit.innerHTML = + "

    " + escapeText( document.title ) + "

    " + + "

    " + + "
    " + + "

    " + + "
      "; + } + + appendHeader(); + appendBanner(); + appendTestResults(); + appendUserAgent(); + appendToolbar(); + appendTestsList( details.modules ); + toolbarModuleFilter(); + + if ( qunit && config.hidepassed ) { + addClass( qunit.lastChild, "hidepass" ); + } +}); + +QUnit.done(function( details ) { + var i, key, + banner = id( "qunit-banner" ), + tests = id( "qunit-tests" ), + html = [ + "Tests completed in ", + details.runtime, + " milliseconds.
      ", + "", + details.passed, + " assertions of ", + details.total, + " passed, ", + details.failed, + " failed." + ].join( "" ); + + if ( banner ) { + banner.className = details.failed ? "qunit-fail" : "qunit-pass"; + } + + if ( tests ) { + id( "qunit-testresult" ).innerHTML = html; + } + + if ( config.altertitle && defined.document && document.title ) { + + // show ✖ for good, ✔ for bad suite result in title + // use escape sequences in case file gets loaded with non-utf-8-charset + document.title = [ + ( details.failed ? "\u2716" : "\u2714" ), + document.title.replace( /^[\u2714\u2716] /i, "" ) + ].join( " " ); + } + + // clear own sessionStorage items if all tests passed + if ( config.reorder && defined.sessionStorage && details.failed === 0 ) { + for ( i = 0; i < sessionStorage.length; i++ ) { + key = sessionStorage.key( i++ ); + if ( key.indexOf( "qunit-test-" ) === 0 ) { + sessionStorage.removeItem( key ); + } + } + } + + // scroll back to top to show results + if ( config.scrolltop && window.scrollTo ) { + window.scrollTo( 0, 0 ); + } +}); + +function getNameHtml( name, module ) { + var nameHtml = ""; + + if ( module ) { + nameHtml = "" + escapeText( module ) + ": "; + } + + nameHtml += "" + escapeText( name ) + ""; + + return nameHtml; +} + +QUnit.testStart(function( details ) { + var running, testBlock, bad; + + testBlock = id( "qunit-test-output-" + details.testId ); + if ( testBlock ) { + testBlock.className = "running"; + } else { + + // Report later registered tests + appendTest( details.name, details.testId, details.module ); + } + + running = id( "qunit-testresult" ); + if ( running ) { + bad = QUnit.config.reorder && defined.sessionStorage && + +sessionStorage.getItem( "qunit-test-" + details.module + "-" + details.name ); + + running.innerHTML = ( bad ? + "Rerunning previously failed test:
      " : + "Running:
      " ) + + getNameHtml( details.name, details.module ); + } + +}); + +QUnit.log(function( details ) { + var assertList, assertLi, + message, expected, actual, + testItem = id( "qunit-test-output-" + details.testId ); + + if ( !testItem ) { + return; + } + + message = escapeText( details.message ) || ( details.result ? "okay" : "failed" ); + message = "" + message + ""; + message += "@ " + details.runtime + " ms"; + + // pushFailure doesn't provide details.expected + // when it calls, it's implicit to also not show expected and diff stuff + // Also, we need to check details.expected existence, as it can exist and be undefined + if ( !details.result && hasOwn.call( details, "expected" ) ) { + expected = escapeText( QUnit.dump.parse( details.expected ) ); + actual = escapeText( QUnit.dump.parse( details.actual ) ); + message += ""; + + if ( actual !== expected ) { + message += "" + + ""; + } else { + if ( expected.indexOf( "[object Array]" ) !== -1 || + expected.indexOf( "[object Object]" ) !== -1 ) { + message += ""; + } + } + + if ( details.source ) { + message += ""; + } + + message += "
      Expected:
      " +
      +			expected +
      +			"
      Result:
      " +
      +				actual + "
      Diff:
      " +
      +				QUnit.diff( expected, actual ) + "
      Message: " + + "Diff suppressed as the depth of object is more than current max depth (" + + QUnit.config.maxDepth + ").

      Hint: Use QUnit.dump.maxDepth to " + + " run with a higher max depth or " + + "Rerun without max depth.

      Source:
      " +
      +				escapeText( details.source ) + "
      "; + + // this occours when pushFailure is set and we have an extracted stack trace + } else if ( !details.result && details.source ) { + message += "" + + "" + + "
      Source:
      " +
      +			escapeText( details.source ) + "
      "; + } + + assertList = testItem.getElementsByTagName( "ol" )[ 0 ]; + + assertLi = document.createElement( "li" ); + assertLi.className = details.result ? "pass" : "fail"; + assertLi.innerHTML = message; + assertList.appendChild( assertLi ); +}); + +QUnit.testDone(function( details ) { + var testTitle, time, testItem, assertList, + good, bad, testCounts, skipped, + tests = id( "qunit-tests" ); + + if ( !tests ) { + return; + } + + testItem = id( "qunit-test-output-" + details.testId ); + + assertList = testItem.getElementsByTagName( "ol" )[ 0 ]; + + good = details.passed; + bad = details.failed; + + // store result when possible + if ( config.reorder && defined.sessionStorage ) { + if ( bad ) { + sessionStorage.setItem( "qunit-test-" + details.module + "-" + details.name, bad ); + } else { + sessionStorage.removeItem( "qunit-test-" + details.module + "-" + details.name ); + } + } + + if ( bad === 0 ) { + addClass( assertList, "qunit-collapsed" ); + } + + // testItem.firstChild is the test name + testTitle = testItem.firstChild; + + testCounts = bad ? + "" + bad + ", " + "" + good + ", " : + ""; + + testTitle.innerHTML += " (" + testCounts + + details.assertions.length + ")"; + + if ( details.skipped ) { + testItem.className = "skipped"; + skipped = document.createElement( "em" ); + skipped.className = "qunit-skipped-label"; + skipped.innerHTML = "skipped"; + testItem.insertBefore( skipped, testTitle ); + } else { + addEvent( testTitle, "click", function() { + toggleClass( assertList, "qunit-collapsed" ); + }); + + testItem.className = bad ? "fail" : "pass"; + + time = document.createElement( "span" ); + time.className = "runtime"; + time.innerHTML = details.runtime + " ms"; + testItem.insertBefore( time, assertList ); + } +}); + +if ( defined.document ) { + if ( document.readyState === "complete" ) { + QUnit.load(); + } else { + addEvent( window, "load", QUnit.load ); + } +} else { + config.pageLoaded = true; + config.autorun = true; +} + })(); - -})(this); diff -r 2fe19ba68daa -r 2fdf67ef3341 devtools/devctl.py --- a/devtools/devctl.py Wed Dec 09 16:36:17 2015 +0100 +++ b/devtools/devctl.py Wed Dec 09 18:24:09 2015 +0100 @@ -73,10 +73,12 @@ return None def init_log(self): pass - def load_configuration(self): + def load_configuration(self, **kw): pass def default_log_file(self): return None + def default_stats_file(self): + return None def cleanup_sys_modules(config): @@ -580,8 +582,8 @@ # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # -# You should have received a copy of the GNU Lesser General Public License along -# with this program. If not, see . +# You should have received a copy of the GNU Lesser General Public License +# along with this program. If not, see . ''', 'GPL': '''\ @@ -592,7 +594,8 @@ # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more +# details. # # You should have received a copy of the GNU General Public License along with # this program. If not, see . @@ -834,21 +837,11 @@ p.wait() -class GenerateQUnitHTML(Command): - """Generate a QUnit html file to see test in your browser""" - name = "qunit-html" - arguments = ' [...]' - - def run(self, args): - from cubicweb.devtools.qunit import make_qunit_html - print make_qunit_html(args[0], args[1:]) - for cmdcls in (UpdateCubicWebCatalogCommand, UpdateCubeCatalogCommand, #LiveServerCommand, NewCubeCommand, ExamineLogCommand, GenerateSchema, - GenerateQUnitHTML, ): CWCTL.register(cmdcls) diff -r 2fe19ba68daa -r 2fdf67ef3341 devtools/fake.py --- a/devtools/fake.py Wed Dec 09 16:36:17 2015 +0100 +++ b/devtools/fake.py Wed Dec 09 18:24:09 2015 +0100 @@ -1,4 +1,4 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -20,6 +20,8 @@ __docformat__ = "restructuredtext en" +from contextlib import contextmanager + from logilab.database import get_db_helper from cubicweb.req import RequestSessionBase @@ -159,6 +161,10 @@ # for use with enabled_security context manager read_security = write_security = True + @contextmanager + def running_hooks_ops(self): + yield + class FakeRepo(object): querier = None def __init__(self, schema, vreg=None, config=None): @@ -173,7 +179,7 @@ def internal_session(self): return FakeSession(self) - def extid2eid(self, source, extid, etype, session, insert=True): + def extid2eid(self, source, extid, etype, cnx, insert=True): try: return self.extids[extid] except KeyError: @@ -181,10 +187,10 @@ return None self._count += 1 eid = self._count - entity = source.before_entity_insertion(session, extid, etype, eid) + entity = source.before_entity_insertion(cnx, extid, etype, eid) self.extids[extid] = eid self.eids[eid] = extid - source.after_entity_insertion(session, extid, entity) + source.after_entity_insertion(cnx, extid, entity) return eid diff -r 2fe19ba68daa -r 2fdf67ef3341 devtools/httptest.py --- a/devtools/httptest.py Wed Dec 09 16:36:17 2015 +0100 +++ b/devtools/httptest.py Wed Dec 09 18:24:09 2015 +0100 @@ -1,4 +1,4 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -78,8 +78,6 @@ self.global_set_option('port', port) # force rewrite here return 'http://127.0.0.1:%d/' % self['port'] - def pyro_enabled(self): - return False class CubicWebServerTC(CubicWebTC): @@ -139,7 +137,6 @@ passwd = self.admpassword if passwd is None: passwd = user - self.login(user) response = self.web_get("login?__login=%s&__password=%s" % (user, passwd)) assert response.status == httplib.SEE_OTHER, response.status diff -r 2fe19ba68daa -r 2fdf67ef3341 devtools/qunit.py --- a/devtools/qunit.py Wed Dec 09 16:36:17 2015 +0100 +++ b/devtools/qunit.py Wed Dec 09 18:24:09 2015 +0100 @@ -29,7 +29,9 @@ from logilab.common.shellutils import getlogin import cubicweb +from cubicweb.view import View from cubicweb.web.controller import Controller +from cubicweb.web.views.staticcontrollers import StaticFileController, STATIC_CONTROLLERS from cubicweb.devtools.httptest import CubicWebServerTC @@ -66,7 +68,7 @@ self.firefox_cmd = ['firefox', '-no-remote'] if os.name == 'posix': self.firefox_cmd = [osp.join(osp.dirname(__file__), 'data', 'xvfb-run.sh'), - '-a', '-s', '-noreset -screen 0 640x480x8'] + self.firefox_cmd + '-a', '-s', '-noreset -screen 0 800x600x24'] + self.firefox_cmd def start(self, url): self.stop() @@ -102,11 +104,14 @@ test_queue = self.test_queue self._qunit_controller = MyQUnitResultController self.vreg.register(MyQUnitResultController) + self.vreg.register(QUnitView) + self.vreg.register(CWSoftwareRootStaticController) def tearDown(self): super(QUnitTestCase, self).tearDown() self.vreg.unregister(self._qunit_controller) - + self.vreg.unregister(QUnitView) + self.vreg.unregister(CWSoftwareRootStaticController) def abspath(self, path): """use self.__module__ to build absolute path if necessary""" @@ -130,35 +135,21 @@ yield js_test @with_tempdir - def _test_qunit(self, test_file, depends=(), data_files=(), timeout=30): + def _test_qunit(self, test_file, depends=(), data_files=(), timeout=10): assert osp.exists(test_file), test_file for dep in depends: assert osp.exists(dep), dep for data in data_files: assert osp.exists(data), data - # generate html test file - jquery_dir = 'file://' + self.config.locate_resource('jquery.js')[0] - html_test_file = NamedTemporaryFile(suffix='.html', delete=False) - html_test_file.write(make_qunit_html(test_file, depends, - base_url=self.config['base-url'], - web_data_path=jquery_dir)) - html_test_file.flush() - # copying data file - for data in data_files: - copyfile(data, tempfile.tempdir) + QUnitView.test_file = test_file + QUnitView.depends = depends while not self.test_queue.empty(): self.test_queue.get(False) browser = FirefoxHelper() - # start firefox once to let it init the profile (and run system-wide - # add-ons post setup, blegh), and then kill it ... - browser.start('about:blank') - import time; time.sleep(5) - browser.stop() - # ... then actually run the test file - browser.start(html_test_file.name) + browser.start(self.config['base-url'] + "?vid=qunit") test_count = 0 error = False def raise_exception(cls, *data): @@ -220,100 +211,114 @@ def handle_log(self): result = self._cw.form['result'] - message = self._cw.form['message'] - self._log_stack.append('%s: %s' % (result, message)) + message = self._cw.form.get('message', '') + actual = self._cw.form.get('actual') + expected = self._cw.form.get('expected') + source = self._cw.form.get('source') + log = '%s: %s' % (result, message) + if result == 'false' and actual is not None and expected is not None: + log += ' (got: %s, expected: %s)' % (actual, expected) + if source is not None: + log += '\n' + source + self._log_stack.append(log) -def cw_path(*paths): - return file_path(osp.join(cubicweb.CW_SOFTWARE_ROOT, *paths)) - -def file_path(path): - return 'file://' + osp.abspath(path) +class QUnitView(View): + __regid__ = 'qunit' -def build_js_script(host): - return """ - var host = '%s'; + templatable = False - QUnit.moduleStart = function (name) { - jQuery.ajax({ - url: host+'/qunit_result', - data: {"event": "module_start", - "name": name}, - async: false}); - } + depends = None + test_file = None - QUnit.testDone = function (name, failures, total) { - jQuery.ajax({ - url: host+'/qunit_result', - data: {"event": "test_done", - "name": name, - "failures": failures, - "total":total}, - async: false}); - } + def call(self, **kwargs): + w = self.w + req = self._cw + data = { + 'jquery': req.data_url('jquery.js'), + 'web_test': req.build_url('cwsoftwareroot/devtools/data'), + } + w(u''' + + + + + + + + ''' + % data) + w(u'') + w(u'') + w(u'') - html = [''' - - - - - - - - ''' - % data] - if base_url is not None: - html.append('') - html.append('') - html.append('') + prefix = len(cubicweb.CW_SOFTWARE_ROOT) + 1 + for dep in self.depends: + dep = req.build_url('cwsoftwareroot/') + dep[prefix:] + w(u' ' % dep) - for dep in depends: - html.append(' ' % file_path(dep)) + w(u' ') + test_url = req.build_url('cwsoftwareroot/') + self.test_file[prefix:] + w(u' ' % test_url) + w(u''' + +
      +
      + + ''') + - html.append(' ') - html.append(' '% (file_path(test_file),)) - html.append(''' - -
      -
      -

      QUnit example

      -

      -

      -
        - -''') - return u'\n'.join(html) +class CWSoftwareRootStaticController(StaticFileController): + __regid__ = 'cwsoftwareroot' + def publish(self, rset=None): + staticdir = cubicweb.CW_SOFTWARE_ROOT + relpath = self.relpath[len(self.__regid__) + 1:] + return self.static_file(osp.join(staticdir, relpath)) + + +STATIC_CONTROLLERS.append(CWSoftwareRootStaticController) if __name__ == '__main__': diff -r 2fe19ba68daa -r 2fdf67ef3341 devtools/repotest.py --- a/devtools/repotest.py Wed Dec 09 16:36:17 2015 +0100 +++ b/devtools/repotest.py Wed Dec 09 18:24:09 2015 +0100 @@ -259,12 +259,11 @@ def qexecute(self, rql, args=None, build_descr=True): with self.session.new_cnx() as cnx: - with cnx.ensure_cnx_set: - try: - return self.o.execute(cnx, rql, args, build_descr) - finally: - if rql.startswith(('INSERT', 'DELETE', 'SET')): - cnx.commit() + try: + return self.o.execute(cnx, rql, args, build_descr) + finally: + if rql.startswith(('INSERT', 'DELETE', 'SET')): + cnx.commit() class BasePlannerTC(BaseQuerierTC): diff -r 2fe19ba68daa -r 2fdf67ef3341 devtools/test/data/cubes/i18ntestcube/views.py --- a/devtools/test/data/cubes/i18ntestcube/views.py Wed Dec 09 16:36:17 2015 +0100 +++ b/devtools/test/data/cubes/i18ntestcube/views.py Wed Dec 09 18:24:09 2015 +0100 @@ -26,9 +26,6 @@ _myafs = MyAFS() -# XXX useless ASA logilab.common.registry is fixed -_myafs.__module__ = "cubes.i18ntestcube.views" - _myafs.tag_object_of(('*', 'in_forum', 'Forum'), 'main', 'inlined') afs.tag_object_of(('*', 'in_forum', 'Forum'), 'main', 'inlined') diff -r 2fe19ba68daa -r 2fdf67ef3341 devtools/test/data/js_examples/test_simple_failure.js --- a/devtools/test/data/js_examples/test_simple_failure.js Wed Dec 09 16:36:17 2015 +0100 +++ b/devtools/test/data/js_examples/test_simple_failure.js Wed Dec 09 18:24:09 2015 +0100 @@ -1,18 +1,18 @@ $(document).ready(function() { - module("air"); + QUnit.module("air"); - test("test 1", function() { - equals(2, 4); + QUnit.test("test 1", function (assert) { + assert.equal(2, 4); }); - test("test 2", function() { - equals('', '45'); - equals('1024', '32'); + QUnit.test("test 2", function (assert) { + assert.equal('', '45'); + assert.equal('1024', '32'); }); - module("able"); - test("test 3", function() { - same(1, 1); + QUnit.module("able"); + QUnit.test("test 3", function (assert) { + assert.deepEqual(1, 1); }); }); diff -r 2fe19ba68daa -r 2fdf67ef3341 devtools/test/data/js_examples/test_simple_success.js --- a/devtools/test/data/js_examples/test_simple_success.js Wed Dec 09 16:36:17 2015 +0100 +++ b/devtools/test/data/js_examples/test_simple_success.js Wed Dec 09 18:24:09 2015 +0100 @@ -1,17 +1,17 @@ $(document).ready(function() { - module("air"); + QUnit.module("air"); - test("test 1", function() { - equals(2, 2); + QUnit.test("test 1", function (assert) { + assert.equal(2, 2); }); - test("test 2", function() { - equals('45', '45'); + QUnit.test("test 2", function (assert) { + assert.equal('45', '45'); }); - module("able"); - test("test 3", function() { - same(1, 1); + QUnit.module("able"); + QUnit.test("test 3", function (assert) { + assert.deepEqual(1, 1); }); }); diff -r 2fe19ba68daa -r 2fdf67ef3341 devtools/test/data/js_examples/test_with_dep.js --- a/devtools/test/data/js_examples/test_with_dep.js Wed Dec 09 16:36:17 2015 +0100 +++ b/devtools/test/data/js_examples/test_with_dep.js Wed Dec 09 18:24:09 2015 +0100 @@ -1,9 +1,9 @@ $(document).ready(function() { - module("air"); + QUnit.module("air"); - test("test 1", function() { - equals(a, 4); + QUnit.test("test 1", function (assert) { + assert.equal(a, 4); }); }); diff -r 2fe19ba68daa -r 2fdf67ef3341 devtools/test/data/js_examples/test_with_ordered_deps.js --- a/devtools/test/data/js_examples/test_with_ordered_deps.js Wed Dec 09 16:36:17 2015 +0100 +++ b/devtools/test/data/js_examples/test_with_ordered_deps.js Wed Dec 09 18:24:09 2015 +0100 @@ -1,9 +1,9 @@ $(document).ready(function() { - module("air"); + QUnit.module("air"); - test("test 1", function() { - equals(b, 6); + QUnit.test("test 1", function (assert) { + assert.equal(b, 6); }); }); diff -r 2fe19ba68daa -r 2fdf67ef3341 devtools/test/requirements.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/devtools/test/requirements.txt Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,3 @@ +Twisted +webtest +cubicweb-person diff -r 2fe19ba68daa -r 2fdf67ef3341 devtools/test/unittest_i18n.py --- a/devtools/test/unittest_i18n.py Wed Dec 09 16:36:17 2015 +0100 +++ b/devtools/test/unittest_i18n.py Wed Dec 09 18:24:09 2015 +0100 @@ -20,8 +20,9 @@ import os, os.path as osp import sys +import subprocess -from logilab.common.testlib import TestCase, unittest_main +from unittest import TestCase, main from cubicweb.cwconfig import CubicWebNoAppConfiguration @@ -52,28 +53,23 @@ class cubePotGeneratorTC(TestCase): """test case for i18n pot file generator""" - def setUp(self): - self._CUBES_PATH = CubicWebNoAppConfiguration.CUBES_PATH[:] - CubicWebNoAppConfiguration.CUBES_PATH.append(osp.join(DATADIR, 'cubes')) - CubicWebNoAppConfiguration.cls_adjust_sys_path() - - def tearDown(self): - CubicWebNoAppConfiguration.CUBES_PATH[:] = self._CUBES_PATH - def test_i18ncube(self): - # MUST import here to make, since the import statement fire - # the cube paths setup (and then must occur after the setUp) - from cubicweb.devtools.devctl import update_cube_catalogs + env = os.environ.copy() + env['CW_CUBES_PATH'] = osp.join(DATADIR, 'cubes') + if 'PYTHONPATH' in env: + env['PYTHONPATH'] += os.pathsep + else: + env['PYTHONPATH'] = '' + env['PYTHONPATH'] += DATADIR + cwctl = osp.abspath(osp.join(osp.dirname(__file__), '../../bin/cubicweb-ctl')) + with open(os.devnull, 'w') as devnull: + subprocess.check_call([sys.executable, cwctl, 'i18ncube', 'i18ntestcube'], + env=env, stdout=devnull) cube = osp.join(DATADIR, 'cubes', 'i18ntestcube') msgs = load_po(osp.join(cube, 'i18n', 'en.po.ref')) - update_cube_catalogs(cube) newmsgs = load_po(osp.join(cube, 'i18n', 'en.po')) self.assertEqual(msgs, newmsgs) + if __name__ == '__main__': - # XXX dirty hack to make this test runnable using python (works - # fine with pytest, but not with python directly if this hack is - # not present) - # XXX to remove ASA logilab.common is fixed - sys.path.append('') - unittest_main() + main() diff -r 2fe19ba68daa -r 2fdf67ef3341 devtools/test/unittest_testlib.py --- a/devtools/test/unittest_testlib.py Wed Dec 09 16:36:17 2015 +0100 +++ b/devtools/test/unittest_testlib.py Wed Dec 09 18:24:09 2015 +0100 @@ -42,7 +42,7 @@ '__maineid': 0, '__type:0': 'Entity', '_cw_entity_fields:0': '__type,field', - '_cw_fields': 'file,encoding', + '_cw_fields': 'encoding,file', 'eid': [0], 'encoding': u'utf-8', 'field:0': 'value', diff -r 2fe19ba68daa -r 2fdf67ef3341 devtools/testlib.py --- a/devtools/testlib.py Wed Dec 09 16:36:17 2015 +0100 +++ b/devtools/testlib.py Wed Dec 09 18:24:09 2015 +0100 @@ -156,30 +156,6 @@ cwconfig.SMTP = MockSMTP -class TestCaseConnectionProxy(object): - """thin wrapper around `cubicweb.repoapi.ClientConnection` context-manager - used in CubicWebTC (cf. `cubicweb.devtools.testlib.CubicWebTC.login` method) - - It just proxies to the default connection context manager but - restores the original connection on exit. - """ - def __init__(self, testcase, cnx): - self.testcase = testcase - self.cnx = cnx - - def __getattr__(self, attrname): - return getattr(self.cnx, attrname) - - def __enter__(self): - # already open - return self.cnx - - def __exit__(self, exctype, exc, tb): - try: - return self.cnx.__exit__(exctype, exc, tb) - finally: - self.testcase.restore_connection() - # Repoaccess utility ###############################################3########### class RepoAccess(object): @@ -189,8 +165,7 @@ A repo access can create three type of object: - .. automethod:: cubicweb.testlib.RepoAccess.repo_cnx - .. automethod:: cubicweb.testlib.RepoAccess.client_cnx + .. automethod:: cubicweb.testlib.RepoAccess.cnx .. automethod:: cubicweb.testlib.RepoAccess.web_request The RepoAccess need to be closed to destroy the associated Session. @@ -225,16 +200,13 @@ return session @contextmanager - def repo_cnx(self): + def cnx(self): """Context manager returning a server side connection for the user""" with self._session.new_cnx() as cnx: yield cnx - @contextmanager - def client_cnx(self): - """Context manager returning a client side connection for the user""" - with repoapi.ClientConnection(self._session) as cnx: - yield cnx + # aliases for bw compat + client_cnx = repo_cnx = cnx @contextmanager def web_request(self, url=None, headers={}, method='GET', **kwargs): @@ -247,9 +219,10 @@ """ req = self.requestcls(self._repo.vreg, url=url, headers=headers, method=method, form=kwargs) - clt_cnx = repoapi.ClientConnection(self._session) - req.set_cnx(clt_cnx) - with clt_cnx: + with self._session.new_cnx() as cnx: + if 'ecache' in cnx.transaction_data: + del cnx.transaction_data['ecache'] + req.set_cnx(cnx) yield req def close(self): @@ -261,7 +234,7 @@ @contextmanager def shell(self): from cubicweb.server.migractions import ServerMigrationHelper - with repoapi.ClientConnection(self._session) as cnx: + with self._session.new_cnx() as cnx: mih = ServerMigrationHelper(None, repo=self._repo, cnx=cnx, interactive=False, # hack so it don't try to load fs schema @@ -294,17 +267,12 @@ requestcls = fake.FakeRequest tags = TestCase.tags | Tags('cubicweb', 'cw_repo') test_db_id = DEFAULT_EMPTY_DB_ID - _cnxs = set() # establised connection - # stay on connection for leak detection purpose # anonymous is logged by default in cubicweb test cases anonymous_allowed = True def __init__(self, *args, **kwargs): self._admin_session = None - self._admin_clt_cnx = None - self._current_session = None - self._current_clt_cnx = None self.repo = None self._open_access = set() super(CubicWebTC, self).__init__(*args, **kwargs) @@ -315,6 +283,7 @@ """provide a new RepoAccess object for a given user The access is automatically closed at the end of the test.""" + login = unicode(login) access = RepoAccess(self.repo, login, self.requestcls) self._open_access.add(access) return access @@ -326,92 +295,11 @@ except BadConnectionId: continue # already closed - @deprecated('[3.19] explicitly use RepoAccess object in test instead') - def set_cnx(self, cnx): - assert getattr(cnx, '_session', None) is not None - if cnx is self._admin_clt_cnx: - self._pop_custom_cnx() - else: - self._cnxs.add(cnx) # register the cnx to make sure it is removed - self._current_session = cnx._session - self._current_clt_cnx = cnx - @property - @deprecated('[3.19] explicitly use RepoAccess object in test instead') - def cnx(self): - # XXX we want to deprecate this - clt_cnx = self._current_clt_cnx - if clt_cnx is None: - clt_cnx = self._admin_clt_cnx - return clt_cnx - - def _close_cnx(self): - """ensure that all cnx used by a test have been closed""" - for cnx in list(self._cnxs): - if cnx._open and not cnx._session.closed: - cnx.rollback() - cnx.close() - self._cnxs.remove(cnx) - - @property - @deprecated('[3.19] explicitly use RepoAccess object in test instead') def session(self): - """return current server side session""" - # XXX We want to use a srv_connection instead and deprecate this - # property - session = self._current_session - if session is None: - session = self._admin_session - # bypassing all sanity to use the same repo cnx in the session - # - # we can't call set_cnx as the Connection is not managed by the - # session. - session._Session__threaddata.cnx = self._admin_clt_cnx._cnx - else: - session._Session__threaddata.cnx = self.cnx._cnx - session.set_cnxset() - return session - - @property - @deprecated('[3.19] explicitly use RepoAccess object in test instead') - def websession(self): - return self.session - - @property - @deprecated('[3.19] explicitly use RepoAccess object in test instead') - def adminsession(self): - """return current server side session (using default manager account)""" + """return admin session""" return self._admin_session - @deprecated('[3.19] explicitly use RepoAccess object in test instead') - def login(self, login, **kwargs): - """return a connection for the given login/password""" - __ = kwargs.pop('autoclose', True) # not used anymore - if login == self.admlogin: - # undo any previous login, if we're not used as a context manager - self.restore_connection() - return self.cnx - else: - if not kwargs: - kwargs['password'] = str(login) - clt_cnx = repoapi.connect(self.repo, login, **kwargs) - self.set_cnx(clt_cnx) - clt_cnx.__enter__() - return TestCaseConnectionProxy(self, clt_cnx) - - @deprecated('[3.19] explicitly use RepoAccess object in test instead') - def restore_connection(self): - self._pop_custom_cnx() - - def _pop_custom_cnx(self): - if self._current_clt_cnx is not None: - if self._current_clt_cnx._open: - self._current_clt_cnx.close() - if not self._current_session.closed: - self.repo.close(self._current_session.sessionid) - self._current_clt_cnx = None - self._current_session = None - #XXX this doesn't need to a be classmethod anymore def _init_repo(self): """init the repository and connection to it. @@ -425,62 +313,6 @@ login = unicode(db_handler.config.default_admin_config['login']) self.admin_access = self.new_access(login) self._admin_session = self.admin_access._session - self._admin_clt_cnx = repoapi.ClientConnection(self._admin_session) - self._cnxs.add(self._admin_clt_cnx) - self._admin_clt_cnx.__enter__() - - # db api ################################################################## - - @nocoverage - @deprecated('[3.19] explicitly use RepoAccess object in test instead') - def cursor(self, req=None): - if req is not None: - return req.cnx - else: - return self.cnx - - @nocoverage - @deprecated('[3.19] explicitly use RepoAccess object in test instead') - def execute(self, rql, args=None, req=None): - """executes , builds a resultset, and returns a couple (rset, req) - where req is a FakeRequest - """ - req = req or self.request(rql=rql) - return req.execute(unicode(rql), args) - - @nocoverage - @deprecated('[3.19] explicitly use RepoAccess object in test instead') - def commit(self): - try: - return self.cnx.commit() - finally: - self.session.set_cnxset() # ensure cnxset still set after commit - - @nocoverage - @deprecated('[3.19] explicitly use RepoAccess object in test instead') - def rollback(self): - try: - self.cnx.rollback() - except ProgrammingError: - pass # connection closed - finally: - self.session.set_cnxset() # ensure cnxset still set after commit - - @deprecated('[3.19] explicitly use RepoAccess object in test instead') - def request(self, rollbackfirst=False, url=None, headers={}, **kwargs): - """return a web ui request""" - if rollbackfirst: - self.cnx.rollback() - req = self.requestcls(self.vreg, url=url, headers=headers, form=kwargs) - req.set_cnx(self.cnx) - return req - - # server side db api ####################################################### - - @deprecated('[3.19] explicitly use RepoAccess object in test instead') - def sexecute(self, rql, args=None): - self.session.set_cnxset() - return self.session.execute(rql, args) # config management ######################################################## @@ -548,15 +380,6 @@ """return the application schema""" return self.vreg.schema - @deprecated('[3.19] explicitly use RepoAccess object in test instead') - def shell(self): - """return a shell session object""" - from cubicweb.server.migractions import ServerMigrationHelper - return ServerMigrationHelper(None, repo=self.repo, cnx=self.cnx, - interactive=False, - # hack so it don't try to load fs schema - schema=1) - def set_option(self, optname, value): self.config.global_set_option(optname, value) @@ -577,24 +400,17 @@ self.skipTest('repository is not initialised: %r' % previous_failure) try: self._init_repo() - self.addCleanup(self._close_cnx) except Exception as ex: self.__class__._repo_init_failed = ex raise self.addCleanup(self._close_access) self.setup_database() - self._admin_clt_cnx.commit() MAILBOX[:] = [] # reset mailbox def tearDown(self): # XXX hack until logilab.common.testlib is fixed - if self._admin_clt_cnx is not None: - if self._admin_clt_cnx._open: - self._admin_clt_cnx.close() - self._admin_clt_cnx = None if self._admin_session is not None: - if not self._admin_session.closed: - self.repo.close(self._admin_session.sessionid) + self.repo.close(self._admin_session.sessionid) self._admin_session = None while self._cleanups: cleanup, args, kwargs = self._cleanups.pop(-1) @@ -634,20 +450,11 @@ def create_user(self, req, login=None, groups=('users',), password=None, email=None, commit=True, **kwargs): """create and return a new user entity""" - if isinstance(req, basestring): - warn('[3.12] create_user arguments are now (req, login[, groups, password, commit, **kwargs])', - DeprecationWarning, stacklevel=2) - if not isinstance(groups, (tuple, list)): - password = groups - groups = login - elif isinstance(login, tuple): - groups = login - login = req - assert not isinstance(self, type) - req = self._admin_clt_cnx if password is None: - password = login.encode('utf8') - user = req.create_entity('CWUser', login=unicode(login), + password = login + if login is not None: + login = unicode(login) + user = req.create_entity('CWUser', login=login, upassword=password, **kwargs) req.execute('SET X in_group G WHERE X eid %%(x)s, G name IN(%s)' % ','.join(repr(str(g)) for g in groups), @@ -918,7 +725,7 @@ if entity_fields: form[eid_param('_cw_entity_fields', entity.eid)] = ','.join(entity_fields) if fields: - form['_cw_fields'] = ','.join(fields) + form['_cw_fields'] = ','.join(sorted(fields)) return form @deprecated('[3.19] use .admin_request_from_url instead') @@ -1038,8 +845,8 @@ def assertAuthSuccess(self, req, origsession, nbsessions=1): sh = self.app.session_handler session = self.app.get_session(req) - clt_cnx = repoapi.ClientConnection(session) - req.set_cnx(clt_cnx) + cnx = repoapi.Connection(session) + req.set_cnx(cnx) self.assertEqual(len(self.open_sessions), nbsessions, self.open_sessions) self.assertEqual(session.login, origsession.login) self.assertEqual(session.anonymous_session, False) @@ -1212,7 +1019,8 @@ def assertDocTestFile(self, testfile): # doctest returns tuple (failure_count, test_count) - result = self.shell().process_script(testfile) + with self.admin_access.shell() as mih: + result = mih.process_script(testfile) if result[0] and result[1]: raise self.failureException("doctest file '%s' failed" % testfile) @@ -1325,7 +1133,7 @@ """this method populates the database with `how_many` entities of each possible type. It also inserts random relations between them """ - with self.admin_access.repo_cnx() as cnx: + with self.admin_access.cnx() as cnx: with cnx.security_enabled(read=False, write=False): self._auto_populate(cnx, how_many) cnx.commit() diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/3.14.rst --- a/doc/3.14.rst Wed Dec 09 16:36:17 2015 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,164 +0,0 @@ -Whats new in CubicWeb 3.14 -========================== - -First notice CW 3.14 depends on yams 0.34 (which is incompatible with prior -cubicweb releases regarding instance re-creation). - -API changes ------------ - -* `Entity.fetch_rql` `restriction` argument has been deprecated and should be - replaced with a call to the new `Entity.fetch_rqlst` method, get the returned - value (a rql `Select` node) and use the RQL syntax tree API to include the - above-mentionned restrictions. - - Backward compat is kept with proper warning. - -* `Entity.fetch_order` and `Entity.fetch_unrelated_order` class methods have been - replaced by `Entity.cw_fetch_order` and `Entity.cw_fetch_unrelated_order` with - a different prototype: - - - instead of taking (attr, var) as two string argument, they now take (select, - attr, var) where select is the rql syntax tree beinx constructed and var the - variable *node*. - - - instead of returning some string to be inserted in the ORDERBY clause, it has - to modify the syntax tree - - Backward compat is kept with proper warning, BESIDE cases below: - - - custom order method return **something else the a variable name with or - without the sorting order** (e.g. cases where you sort on the value of a - registered procedure as it was done in the tracker for instance). In such - case, an error is logged telling that this sorting is ignored until API - upgrade. - - - client code use direct access to one of those methods on an entity (no code - known to do that). - -* `Entity._rest_attr_info` class method has been renamed to - `Entity.cw_rest_attr_info` - - No backward compat yet since this is a protected method an no code is known to - use it outside cubicweb itself. - -* `AnyEntity.linked_to` has been removed as part of a refactoring of this - functionality (link a entity to another one at creation step). It was replaced - by a `EntityFieldsForm.linked_to` property. - - In the same refactoring, `cubicweb.web.formfield.relvoc_linkedto`, - `cubicweb.web.formfield.relvoc_init` and - `cubicweb.web.formfield.relvoc_unrelated` were removed and replaced by - RelationField methods with the same names, that take a form as a parameter. - - **No backward compatibility yet**. It's still time to cry for it. - Cubes known to be affected: tracker, vcsfile, vcreview. - -* `CWPermission` entity type and its associated require_permission relation type - (abstract) and require_group relation definitions have been moved to a new - `localperms` cube. With this have gone some functions from the - `cubicweb.schemas` package as well as some views. This makes cubicweb itself - smaller while you get all the local permissions stuff into a single, - documented, place. - - Backward compat is kept for existing instances, **though you should have - installed the localperms cubes**. A proper error should be displayed when - trying to migrate to 3.14 an instance the use `CWPermission` without the new - cube installed. For new instances / test, you should add a dependancy on the - new cube in cubes using this feature, along with a dependancy on cubicweb >= - 3.14. - -* jQuery has been updated to 1.6.4 and jquery-tablesorter to 2.0.5. No backward - compat issue known. - -* Table views refactoring : new `RsetTableView` and `EntityTableView`, as well as - rewritten an enhanced version of `PyValTableView` on the same bases, with logic - moved to some column renderers and a layout. Those should be well documented - and deprecates former `TableView`, `EntityAttributesTableView` and `CellView`, - which are however kept for backward compat, with some warnings that may not be - very clear unfortunatly (you may see your own table view subclass name here, - which doesn't make the problem that clear). Notice that `_cw.view('table', - rset, *kwargs)` will be routed to the new `RsetTableView` or to the old - `TableView` depending on given extra arguments. See #1986413. - -* `display_name` don't call .lower() anymore. This may leads to changes in your - user interface. Different msgid for upper/lower cases version of entity type - names, as this is the only proper way to handle this with some languages. - -* `IEditControlAdapter` has been deprecated in favor of `EditController` - overloading, which was made easier by adding dedicated selectors called - `match_edited_type` and `match_form_id`. - -* Pre 3.6 API backward compat has been dropped, though *data* migration - compatibility has been kept. You may have to fix errors due to old API usage - for your instance before to be able to run migration, but then you should be - able to upgrade even a pre 3.6 database. - -* Deprecated `cubicweb.web.views.iprogress` in favor of new `iprogress` cube. - -* Deprecated `cubicweb.web.views.flot` in favor of new `jqplot` cube. - - -Unintrusive API changes ------------------------ - -* Refactored properties forms (eg user preferences and site wide properties) as - well as pagination components to ease overridding. - -* New `cubicweb.web.uihelper` module with high-level helpers for uicfg. - -* New `anonymized_request` decorator to temporary run stuff as an anonymous - user, whatever the currently logged in user. - -* New 'verbatimattr' attribute view. - -* New facet and form widget for Integer used to store binary mask. - -* New `js_href` function to generated proper javascript href. - -* `match_kwargs` and `match_form_params` selectors both accept a new - `once_is_enough` argument. - -* `printable_value` is now a method of request, and may be given dict of - formatters to use. - -* `[Rset]TableView` allows to set None in 'headers', meaning the label should be - fetched from the result set as done by default. - -* Field vocabulary computation on entity creation now takes `__linkto` - information into accounet. - -* Started a `cubicweb.pylintext` pylint plugin to help pylint analyzing cubes. - - -RQL ---- - -* Support for HAVING in 'SET' and 'DELETE' queries. - -* new `AT_TZ` function to get back a timestamp at a given time-zone. - -* new `WEEKDAY` date extraction function - - -User interface changes ----------------------- - -* Datafeed source now present an history of the latest import's log, including - global status and debug/info/warning/error messages issued during - imports. Import logs older than a configurable amount of time are automatically - deleted. - -* Breadcrumbs component is properly kept when creating an entity with '__linkto'. - -* users and groups management now really lead to that (i.e. includes *groups* - management). - -* New 'jsonp' controller with 'jsonexport' and 'ejsonexport' views. - - -Configuration ------------- - -* Added option 'resources-concat' to make javascript/css files concatenation - optional. diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/3.15.rst --- a/doc/3.15.rst Wed Dec 09 16:36:17 2015 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,96 +0,0 @@ -What's new in CubicWeb 3.15? -============================ - -New functionnalities --------------------- - -* Add Zmq server, based on the cutting edge ZMQ (http://www.zeromq.org/) socket - library. This allows to access distant instance, in a similar way as Pyro. - -* Publish/subscribe mechanism using ZMQ for communication among cubicweb - instances. The new zmq-address-sub and zmq-address-pub configuration variables - define where this communication occurs. As of this release this mechanism is - used for entity cache invalidation. - -* Improved WSGI support. While there is still some caveats, most of the code - which was twisted only is now generic and allows related functionalities to work - with a WSGI front-end. - -* Full undo/transaction support : undo of modification has eventually been - implemented, and the configuration simplified (basically you activate it or not - on an instance basis). - -* Controlling HTTP status code used is not much more easier : - - - `WebRequest` now has a `status_out` attribut to control the response status ; - - - most web-side exceptions take an optional ``status`` argument. - -API changes ------------ - -* The base registry implementation has been moved to a new - `logilab.common.registry` module (see #1916014). This includes code from : - - * `cubicweb.vreg` (the whole things that was in there) - * `cw.appobject` (base selectors and all). - - In the process, some renaming was done: - - * the top level registry is now `RegistryStore` (was `VRegistry`), but that - should not impact cubicweb client code ; - - * former selectors functions are now known as "predicate", though you still use - predicates to build an object'selector ; - - * for consistency, the `objectify_selector` decoraror has hence be renamed to - `objectify_predicate` ; - - * on the CubicWeb side, the `selectors` module has been renamed to - `predicates`. - - Debugging refactoring dropped the more need for the `lltrace` decorator. There - should be full backward compat with proper deprecation warnings. Notice the - `yes` predicate and `objectify_predicate` decorator, as well as the - `traced_selection` function should now be imported from the - `logilab.common.registry` module. - -* All login forms are now submitted to /login. Redirection to requested - page is now handled by the login controller (it was previously handle by the - session manager). - -* `Publisher.publish` has been renamed to `Publisher.handle_request`. This - method now contains generic version of logic previously handled by - Twisted. `Controller.publish` is **not** affected. - -Unintrusive API changes ------------------------ - -* New 'ldapfeed' source type, designed to replace 'ldapuser' source with - data-feed (i.e. copy based) source ideas. - -* New 'zmqrql' source type, similar to 'pyrorql' but using ømq instead of Pyro. - -* A new registry called `services` has appeared, where you can register - server-side `cubicweb.server.Service` child classes. Their `call` method can be - invoked from a web-side AppObject instance using new `self._cw.call_service` - method or a server-side one using `self.session.call_service`. This is a new - way to call server-side methods, much cleaner than monkey patching the - Repository class, which becomes a deprecated way to perform similar tasks. - -* a new `ajax-func` registry now hosts all remote functions (i.e. functions - callable through the `asyncRemoteExec` JS api). A convenience `ajaxfunc` - decorator will let you expose your python function easily without all the - appobject standard boilerplate. Backward compatibility is preserved. - -* the 'json' controller is now deprecated in favor of the 'ajax' one. - -* `WebRequest.build_url` can now take a __secure__ argument. When True cubicweb - try to generate an https url. - - -User interface changes ----------------------- - -A new 'undohistory' view expose the undoable transactions and give access to undo -some of them. diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/3.16.rst --- a/doc/3.16.rst Wed Dec 09 16:36:17 2015 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,97 +0,0 @@ -What's new in CubicWeb 3.16? -============================ - -New functionalities --------------------- - -* Add a new dataimport store (`SQLGenObjectStore`). This store enables a fast - import of data (entity creation, link creation) in CubicWeb, by directly - flushing information in SQL. This may only be used with PostgreSQL, as it - requires the 'COPY FROM' command. - - -API changes ------------ - -* Orm: `set_attributes` and `set_relations` are unified (and - deprecated) in favor of `cw_set` that works in all cases. - -* db-api/configuration: all the external repository connection information is - now in an URL (see `#2521848 `_), - allowing to drop specific options of pyro nameserver host, group, etc and fix - broken `ZMQ `_ source. Configuration related changes: - - * Dropped 'pyro-ns-host', 'pyro-instance-id', 'pyro-ns-group' from the client side - configuration, in favor of 'repository-uri'. **NO MIGRATION IS DONE**, - supposing there is no web-only configuration in the wild. - - * Stop discovering the connection method through `repo_method` class attribute - of the configuration, varying according to the configuration class. This is - a first step on the way to a simpler configuration handling. - - DB-API related changes: - - * Stop indicating the connection method using `ConnectionProperties`. - - * Drop `_cnxtype` attribute from `Connection` and `cnxtype` from - `Session`. The former is replaced by a `is_repo_in_memory` property - and the later is totaly useless. - - * Turn `repo_connect` into `_repo_connect` to mark it as a private function. - - * Deprecate `in_memory_cnx` which becomes useless, use `_repo_connect` instead - if necessary. - -* the "tcp://" uri scheme used for `ZMQ `_ - communications (in a way reminiscent of Pyro) is now named - "zmqpickle-tcp://", so as to make room for future zmq-based lightweight - communications (without python objects pickling). - -* Request.base_url gets a `secure=True` optional parameter that yields - an https url if possible, allowing hook-generated content to send - secure urls (e.g. when sending mail notifications) - -* Dataimport ucsvreader gets a new boolean `ignore_errors` - parameter. - - -Unintrusive API changes ------------------------ - -* Drop of `cubicweb.web.uicfg.AutoformSectionRelationTags.bw_tag_map`, - deprecated since 3.6. - - -User interface changes ----------------------- - -* The RQL search bar has now some auto-completion support. It means - relation types or entity types can be suggested while typing. It is - an awesome improvement over the current behaviour ! - -* The `action box` associated with `table` views (from `tableview.py`) - has been transformed into a nice-looking series of small tabs; it - means that the possible actions are immediately visible and need not - be discovered by clicking on an almost invisible icon on the upper - right. - -* The `uicfg` module has moved to web/views/ and ui configuration - objects are now selectable. This will reduce the amount of - subclassing and whole methods replacement usually needed to - customize the ui behaviour in many cases. - -* Remove changelog view, as neither cubicweb nor known - cubes/applications were properly feeding related files. - - -Other changes -------------- - -* 'pyrorql' sources will be automatically updated to use an URL to locate the source - rather than configuration option. 'zmqrql' sources were broken before this change, - so no upgrade is needed... - -* Debugging filters for Hooks and Operations have been added. - -* Some cubicweb-ctl commands used to show the output of `msgcat` and - `msgfmt`; they don't anymore. diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/3.17.rst --- a/doc/3.17.rst Wed Dec 09 16:36:17 2015 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,60 +0,0 @@ -What's new in CubicWeb 3.17? -============================ - -New functionalities --------------------- - -* add a command to compare db schema and file system schema - (see `#464991 `_) - -* Add CubicWebRequestBase.content with the content of the HTTP request (see #2742453) - (see `#2742453 `_) - -* Add directive bookmark to ReST rendering - (see `#2545595 `_) - -* Allow user defined final type - (see `#124342 `_) - - -API changes ------------ - -* drop typed_eid() in favour of int() (see `#2742462 `_) - -* The SIOC views and adapters have been removed from CubicWeb and moved to the - `sioc` cube. - -* The web page embedding views and adapters have been removed from CubicWeb and - moved to the `embed` cube. - -* The email sending views and controllers have been removed from CubicWeb and - moved to the `massmailing` cube. - -* ``RenderAndSendNotificationView`` is deprecated in favor of - ``ActualNotificationOp`` the new operation use the more efficient *data* - idiom. - -* Looping task can now have a interval <= ``0``. Negative interval disable the - looping task entirely. - -* We now serve html instead of xhtml. - (see `#2065651 `_) - - -Deprecation ---------------------- - -* ``ldapuser`` have been deprecated. It'll be fully dropped in the next - version. If you are still using ldapuser switch to ``ldapfeed`` **NOW**! - -* ``hijack_user`` have been deprecated. It will be dropped soon. - -Deprecated Code Drops ----------------------- - -* The progress views and adapters have been removed from CubicWeb. These - classes were deprecated since 3.14.0. They are still available in the - `iprogress` cube. - -* API deprecated since 3.7 have been dropped. diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/3.18.rst --- a/doc/3.18.rst Wed Dec 09 16:36:17 2015 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,101 +0,0 @@ -What's new in CubicWeb 3.18? -============================ - -The migration script does not handle sqlite nor mysql instances. - - -New functionalities --------------------- - -* add a security debugging tool - (see `#2920304 `_) - -* introduce an `add` permission on attributes, to be interpreted at - entity creation time only and allow the implementation of complex - `update` rules that don't block entity creation (before that the - `update` attribute permission was interpreted at entity creation and - update time) - -* the primary view display controller (uicfg) now has a - `set_fields_order` method similar to the one available for forms - -* new method `ResultSet.one(col=0)` to retrive a single entity and enforce the - result has only one row (see `#3352314 https://www.cubicweb.org/ticket/3352314`_) - -* new method `RequestSessionBase.find` to look for entities - (see `#3361290 https://www.cubicweb.org/ticket/3361290`_) - -* the embedded jQuery copy has been updated to version 1.10.2, and jQuery UI to - version 1.10.3. - -* initial support for wsgi for the debug mode, available through the new - ``wsgi`` cubicweb-ctl command, which can use either python's builtin - wsgi server or the werkzeug module if present. - -* a ``rql-table`` directive is now available in ReST fields - -* cubicweb-ctl upgrade can now generate the static data resource directory - directly, without a manual call to gen-static-datadir. - -API changes ------------ - -* not really an API change, but the entity permission checks are now - systematically deferred to an operation, instead of a) trying in a - hook and b) if it failed, retrying later in an operation - -* The default value storage for attributes is no longer String, but - Bytes. This opens the road to storing arbitrary python objects, e.g. - numpy arrays, and fixes a bug where default values whose truth value - was False were not properly migrated. - -* `symmetric` relations are no more handled by an rql rewrite but are - now handled with hooks (from the `activeintegrity` category); this - may have some consequences for applications that do low-level database - manipulations or at times disable (some) hooks. - -* `unique together` constraints (multi-columns unicity constraints) - get a `name` attribute that maps the CubicWeb contraint entities to - corresponding backend index. - -* BreadCrumbEntityVComponent's open_breadcrumbs method now includes - the first breadcrumbs separator - -* entities can be compared for equality and hashed - -* the ``on_fire_transition`` predicate accepts a sequence of possible - transition names - -* the GROUP_CONCAT rql aggregate function no longer repeats duplicate - values, on the sqlite and postgresql backends - -Deprecation ---------------------- - -* ``pyrorql`` sources have been deprecated. Multisource will be fully dropped - in the next version. If you are still using pyrorql, switch to ``datafeed`` - **NOW**! - -* the old multi-source system - -* `find_one_entity` and `find_entities` in favor of `find` - (see `#3361290 https://www.cubicweb.org/ticket/3361290`_) - -* the `TmpFileViewMixin` and `TmpPngView` classes (see `#3400448 - https://www.cubicweb.org/ticket/3400448`_) - -Deprecated Code Drops ----------------------- - -* ``ldapuser`` have been dropped; use ``ldapfeed`` now - (see `#2936496 `_) - -* action ``GotRhythm`` was removed, make sure you do not - import it in your cubes (even to unregister it) - (see `#3093362 `_) - -* all 3.8 backward compat is gone - -* all 3.9 backward compat (including the javascript side) is gone - -* the ``twisted`` (web-only) instance type has been removed diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/3.19.rst --- a/doc/3.19.rst Wed Dec 09 16:36:17 2015 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,180 +0,0 @@ -What's new in CubicWeb 3.19? -============================ - -New functionalities --------------------- - -* implement Cross Origin Resource Sharing (CORS) - (see `#2491768 `_) - -* system_source.create_eid can get a range of IDs, to reduce overhead of batch - entity creation - -Behaviour Changes ------------------ - -* The anonymous property of Session and Connection are now computed from the - related user login. If it matches the ``anonymous-user`` in the config the - connection is anonymous. Beware that the ``anonymous-user`` config is web - specific. Therefore, no session may be anonymous in a repository only setup. - - -New Repository Access API -------------------------- - -Connection replaces Session -~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -A new explicit Connection object replaces Session as the main repository entry -point. Connection holds all the necessary methods to be used server-side -(``execute``, ``commit``, ``rollback``, ``call_service``, ``entity_from_eid``, -etc...). One obtains a new Connection object using ``session.new_cnx()``. -Connection objects need to have an explicit begin and end. Use them as a context -manager to never miss an end:: - - with session.new_cnx() as cnx: - cnx.execute('INSERT Elephant E, E name "Babar"') - cnx.commit() - cnx.execute('INSERT Elephant E, E name "Celeste"') - cnx.commit() - # Once you get out of the "with" clause, the connection is closed. - -Using the same Connection object in multiple threads will give you access to the -same Transaction. However, Connection objects are not thread safe (hence at your -own risks). - -``repository.internal_session`` is deprecated in favor of -``repository.internal_cnx``. Note that internal connections are now `safe` by default, -i.e. the integrity hooks are enabled. - -Backward compatibility is preserved on Session. - - -dbapi vs repoapi -~~~~~~~~~~~~~~~~ - -A new API has been introduced to replace the dbapi. It is called `repoapi`. - -There are three relevant functions for now: - -* ``repoapi.get_repository`` returns a Repository object either from an - URI when used as ``repoapi.get_repository(uri)`` or from a config - when used as ``repoapi.get_repository(config=config)``. - -* ``repoapi.connect(repo, login, **credentials)`` returns a ClientConnection - associated with the user identified by the credentials. The - ClientConnection is associated with its own Session that is closed - when the ClientConnection is closed. A ClientConnection is a - Connection-like object to be used client side. - -* ``repoapi.anonymous_cnx(repo)`` returns a ClientConnection associated - with the anonymous user if described in the config. - - -repoapi.ClientConnection replace dbapi.Connection and company -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -On the client/web side, the Request is now using a ``repoapi.ClientConnection`` -instead of a ``dbapi.connection``. The ``ClientConnection`` has multiple backward -compatible methods to make it look like a ``dbapi.Cursor`` and ``dbapi.Connection``. - -Session used on the Web side are now the same than the one used Server side. -Some backward compatibility methods have been installed on the server side Session -to ease the transition. - -The authentication stack has been altered to use the ``repoapi`` instead of -the ``dbapi``. Cubes adding new element to this stack are likely to break. - -Session data can be accessed using the cnx.data dictionary, while -transaction data is available through cnx.transaction_data. These -replace the [gs]et_shared_data methods with optional txid kwarg. - -New API in tests -~~~~~~~~~~~~~~~~ - -All current methods and attributes used to access the repo on ``CubicWebTC`` are -deprecated. You may now use a ``RepoAccess`` object. A ``RepoAccess`` object is -linked to a new ``Session`` for a specified user. It is able to create -``Connection``, ``ClientConnection`` and web side requests linked to this -session:: - - access = self.new_access('babar') # create a new RepoAccess for user babar - with access.repo_cnx() as cnx: - # some work with server side cnx - cnx.execute(...) - cnx.commit() - cnx.execute(...) - cnx.commit() - - with access.client_cnx() as cnx: - # some work with client side cnx - cnx.execute(...) - cnx.commit() - - with access.web_request(elephant='babar') as req: - # some work with client side cnx - elephant_name = req.form['elephant'] - req.execute(...) - req.cnx.commit() - -By default ``testcase.admin_access`` contains a ``RepoAccess`` object for the -default admin session. - - -API changes ------------ - -* ``RepositorySessionManager.postlogin`` is now called with two arguments, - request and session. And this now happens before the session is linked to the - request. - -* ``SessionManager`` and ``AuthenticationManager`` now take a repo object at - initialization time instead of a vreg. - -* The ``async`` argument of ``_cw.call_service`` has been dropped. All calls are - now synchronous. The zmq notification bus looks like a good replacement for - most async use cases. - -* ``repo.stats()`` is now deprecated. The same information is available through - a service (``_cw.call_service('repo_stats')``). - -* ``repo.gc_stats()`` is now deprecated. The same information is available through - a service (``_cw.call_service('repo_gc_stats')``). - -* ``repo.register_user()`` is now deprecated. The functionality is now - available through a service (``_cw.call_service('register_user')``). - -* ``request.set_session`` no longer takes an optional ``user`` argument. - -* CubicwebTC does not have repo and cnx as class attributes anymore. They are - standard instance attributes. ``set_cnx`` and ``_init_repo`` class methods - become instance methods. - -* ``set_cnxset`` and ``free_cnxset`` are deprecated. cnxset are now - automatically managed. - -* The implementation of cascading deletion when deleting `composite` - entities has changed. There comes a semantic change: merely deleting - a composite relation does not entail any more the deletion of the - component side of the relation. - -* ``_cw.user_callback`` and ``_cw.user_rql_callback`` are deprecated. Users - are encouraged to write an actual controller (e.g. using ``ajaxfunc``) - instead of storing a closure in the session data. - -* A new ``entity.cw_linkable_rql`` method provides the rql to fetch all entities - that are already or may be related to the current entity using the given - relation. - - -Deprecated Code Drops ----------------------- - -* session.hijack_user mechanism has been dropped. - -* EtypeRestrictionComponent has been removed, its functionality has been - replaced by facets a while ago. - -* the old multi-source support has been removed. Only copy-based sources - remain, such as datafeed or ldapfeed. - diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/3.20.rst --- a/doc/3.20.rst Wed Dec 09 16:36:17 2015 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,78 +0,0 @@ -What's new in CubicWeb 3.20 -=========================== - -New features ------------- - -* virtual relations: a new ComputedRelation class can be used in - schema.py; its `rule` attribute is an RQL snippet that defines the new - relation. - -* computed attributes: an attribute can now be defined with a `formula` - argument (also an RQL snippet); it will be read-only, and updated - automatically. - - Both of these features are described in `CWEP-002`_, and the updated - "Data model" chapter of the CubicWeb book. - -* cubicweb-ctl plugins can use the ``cubicweb.utils.admincnx`` function - to get a Connection object from an instance name. - -* new 'tornado' wsgi backend - -* session cookies have the HttpOnly flag, so they're no longer exposed to - javascript - -* rich text fields can be formatted as markdown - -* the edit controller detects concurrent editions, and raises a ValidationError - if an entity was modified between form generation and submission - -* cubicweb can use a postgresql "schema" (namespace) for its tables - -* "cubicweb-ctl configure" can be used to set values of the admin user - credentials in the sources configuration file - -* in debug mode, setting the _cwtracehtml parameter on a request allows tracing - where each bit of output is produced - -.. _CWEP-002: http://hg.logilab.org/review/cwep/file/tip/CWEP-002.rst - - -API Changes ------------ - -* ``ucsvreader()`` and ``ucsvreader_pb()`` from the ``dataimport`` module have - 2 new keyword arguments ``delimiter`` and ``quotechar`` to replace the - ``separator`` and ``quote`` arguments respectively. This makes the API match - that of Python's ``csv.reader()``. The old arguments are still supported - though deprecated. - -* the migration environment's ``remove_cube`` function is now called ``drop_cube``. - -* cubicweb.old.css is now cubicweb.css. The previous "new" - cubicweb.css, along with its cubicweb.reset.css companion, have been - removed. - -* the jquery-treeview plugin was updated to its latest version - - -Deprecated Code Drops ----------------------- - -* most of 3.10 and 3.11 backward compat is gone; this includes: - - CtxComponent.box_action() and CtxComponent.build_link() - - cubicweb.devtools.htmlparser.XMLDemotingValidator - - various methods and properties on Entities, replaced by cw_edited and cw_attr_cache - - 'commit_event' method on hooks, replaced by 'postcommit_event' - - server.hook.set_operation(), replaced by Operation.get_instance(...).add_data() - - View.div_id(), View.div_class() and View.create_url() - - `*VComponent` classes - - in forms, Field.value() and Field.help() must take the form and the field itself as arguments - - form.render() must get `w` as a named argument, and renderer.render() must take `w` as first argument - - in breadcrumbs, the optional `recurs` argument must be a set, not False - - cubicweb.web.views.idownloadable.{download_box,IDownloadableLineView} - - primary views no longer have `render_entity_summary` and `summary` methods - - WFHistoryVComponent's `cell_call` method is replaced by `render_body` - - cubicweb.dataimport.ObjectStore.add(), replaced by create_entity - - ManageView.{folders,display_folders} diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/Makefile --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/Makefile Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,88 @@ +SRC=. + +# You can set these sphinx variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +#BUILDDIR = build +BUILDDIR = _build +CWDIR = .. +JSDIR = ${CWDIR}/web/data +JSTORST = tools/pyjsrest.py +BUILDJS = js_api + +# Internal variables for sphinx +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d ${BUILDDIR}/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + + + +.PHONY: help clean html web pickle htmlhelp latex changes linkcheck + +help: + @echo "Please use \`make ' where is one of" + @echo " all to make standalone HTML files, developer manual and API doc" + @echo " html to make standalone HTML files" + @echo "--- " + @echo " pickle to make pickle files (usable by e.g. sphinx-web)" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " changes to make an overview over all changed/added/deprecated items" + @echo " linkcheck to check all external links for integrity" + +clean: + rm -f *.html + -rm -rf ${BUILDDIR}/html ${BUILDDIR}/doctrees + -rm -rf ${BUILDJS} + +all: html + +# run sphinx ### +html: js + mkdir -p ${BUILDDIR}/html ${BUILDDIR}/doctrees + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) ${BUILDDIR}/html + @echo + @echo "Build finished. The HTML pages are in ${BUILDDIR}/html." + +js: + mkdir -p ${BUILDJS} + $(JSTORST) -p ${JSDIR} -o ${BUILDJS} + +pickle: + mkdir -p ${BUILDDIR}/pickle ${BUILDDIR}/doctrees + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) ${BUILDDIR}/pickle + @echo + @echo "Build finished; now you can process the pickle files or run" + @echo " sphinx-web ${BUILDDIR}/pickle" + @echo "to start the sphinx-web server." + +web: pickle + +htmlhelp: + mkdir -p ${BUILDDIR}/htmlhelp ${BUILDDIR}/doctrees + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) ${BUILDDIR}/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in ${BUILDDIR}/htmlhelp." + +latex: + mkdir -p ${BUILDDIR}/latex ${BUILDDIR}/doctrees + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) ${BUILDDIR}/latex + @echo + @echo "Build finished; the LaTeX files are in ${BUILDDIR}/latex." + @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ + "run these through (pdf)latex." + +changes: + mkdir -p ${BUILDDIR}/changes ${BUILDDIR}/doctrees + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) ${BUILDDIR}/changes + @echo + @echo "The overview file is in ${BUILDDIR}/changes." + +linkcheck: + mkdir -p ${BUILDDIR}/linkcheck ${BUILDDIR}/doctrees + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) ${BUILDDIR}/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in ${BUILDDIR}/linkcheck/output.txt." diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/_static/cubicweb.png Binary file doc/_static/cubicweb.png has changed diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/_static/logilab.png Binary file doc/_static/logilab.png has changed diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/_static/sphinx-default.css --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/_static/sphinx-default.css Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,861 @@ +/** + * Sphinx Doc Design + */ + +html, body { + background: white; +} + +body { + font-family: Verdana, sans-serif; + font-size: 100%; + background-color: white; + color: black; + margin: 0; + padding: 0; +} + +/* :::: LAYOUT :::: */ + +div.logilablogo { + padding: 10px 10px 10px 10px; + height:75; +} + + +div.document { + background-color: white; +} + +div.documentwrapper { + float: left; + width: 100%; +} + +div.bodywrapper { + margin: 0 0 0 230px; +} + +div.body { + background-color: white; + padding: 0 20px 30px 20px; + border-left:solid; + border-left-color:#e2e2e2; + border-left-width:thin; +} + +div.sphinxsidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sphinxsidebar { + float: left; + width: 230px; + margin-left: -100%; + font-size: 90%; +} + +div.clearer { + clear: both; +} + +div.footer { + color: #ff4500; + width: 100%; + padding: 9px 0 9px 0; + text-align: center; + font-size: 75%; +} + +div.footer a { + color: #ff4500; + text-decoration: underline; +} + +div.related { + background-color: #ff7700; + color: white; + width: 100%; + height: 30px; + line-height: 30px; + font-size: 90%; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +div.related a { + color: white; + font-weight:bold; +} + +/* ::: TOC :::: */ + +div.sphinxsidebar { + border-style:solid; + border-color: white; +/* background-color:#e2e2e2;*/ + padding-bottom:5px; +} + +div.sphinxsidebar h3 { + font-family: Verdana, sans-serif; + color: black; + font-size: 1.2em; + font-weight: normal; + margin: 0; + padding: 0; + font-weight:bold; + font-style:italic; +} + +div.sphinxsidebar h4 { + font-family: Verdana, sans-serif; + color: black; + font-size: 1.1em; + font-weight: normal; + margin: 5px 0 0 0; + padding: 0; + font-weight:bold; + font-style:italic; +} + +div.sphinxsidebar p { + color: black; +} + +div.sphinxsidebar p.topless { + margin: 5px 10px 10px 10px; +} + +div.sphinxsidebar ul { + margin: 10px; + padding: 0; + list-style: none; + color: black; +} + +div.sphinxsidebar ul ul, +div.sphinxsidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sphinxsidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sphinxsidebar a { + color: black; + text-decoration: none; +} + +div.sphinxsidebar form { + margin-top: 10px; +} + +div.sphinxsidebar input { + border: 1px solid #e2e2e2; + font-family: sans-serif; + font-size: 1em; + padding-bottom: 5px; +} + +/* :::: MODULE CLOUD :::: */ +div.modulecloud { + margin: -5px 10px 5px 10px; + padding: 10px; + line-height: 160%; + border: 1px solid #cbe7e5; + background-color: #f2fbfd; +} + +div.modulecloud a { + padding: 0 5px 0 5px; +} + +/* :::: SEARCH :::: */ +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li div.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* :::: COMMON FORM STYLES :::: */ + +div.actions { + padding: 5px 10px 5px 10px; + border-top: 1px solid #cbe7e5; + border-bottom: 1px solid #cbe7e5; + background-color: #e0f6f4; +} + +form dl { + color: #333; +} + +form dt { + clear: both; + float: left; + min-width: 110px; + margin-right: 10px; + padding-top: 2px; +} + +input#homepage { + display: none; +} + +div.error { + margin: 5px 20px 0 0; + padding: 5px; + border: 1px solid #d00; + font-weight: bold; +} + +/* :::: INLINE COMMENTS :::: */ + +div.inlinecomments { + position: absolute; + right: 20px; +} + +div.inlinecomments a.bubble { + display: block; + float: right; + background-image: url(style/comment.png); + background-repeat: no-repeat; + width: 25px; + height: 25px; + text-align: center; + padding-top: 3px; + font-size: 0.9em; + line-height: 14px; + font-weight: bold; + color: black; +} + +div.inlinecomments a.bubble span { + display: none; +} + +div.inlinecomments a.emptybubble { + background-image: url(style/nocomment.png); +} + +div.inlinecomments a.bubble:hover { + background-image: url(style/hovercomment.png); + text-decoration: none; + color: #3ca0a4; +} + +div.inlinecomments div.comments { + float: right; + margin: 25px 5px 0 0; + max-width: 50em; + min-width: 30em; + border: 1px solid #2eabb0; + background-color: #f2fbfd; + z-index: 150; +} + +div#comments { + border: 1px solid #2eabb0; + margin-top: 20px; +} + +div#comments div.nocomments { + padding: 10px; + font-weight: bold; +} + +div.inlinecomments div.comments h3, +div#comments h3 { + margin: 0; + padding: 0; + background-color: #2eabb0; + color: white; + border: none; + padding: 3px; +} + +div.inlinecomments div.comments div.actions { + padding: 4px; + margin: 0; + border-top: none; +} + +div#comments div.comment { + margin: 10px; + border: 1px solid #2eabb0; +} + +div.inlinecomments div.comment h4, +div.commentwindow div.comment h4, +div#comments div.comment h4 { + margin: 10px 0 0 0; + background-color: #2eabb0; + color: white; + border: none; + padding: 1px 4px 1px 4px; +} + +div#comments div.comment h4 { + margin: 0; +} + +div#comments div.comment h4 a { + color: #d5f4f4; +} + +div.inlinecomments div.comment div.text, +div.commentwindow div.comment div.text, +div#comments div.comment div.text { + margin: -5px 0 -5px 0; + padding: 0 10px 0 10px; +} + +div.inlinecomments div.comment div.meta, +div.commentwindow div.comment div.meta, +div#comments div.comment div.meta { + text-align: right; + padding: 2px 10px 2px 0; + font-size: 95%; + color: #538893; + border-top: 1px solid #cbe7e5; + background-color: #e0f6f4; +} + +div.commentwindow { + position: absolute; + width: 500px; + border: 1px solid #cbe7e5; + background-color: #f2fbfd; + display: none; + z-index: 130; +} + +div.commentwindow h3 { + margin: 0; + background-color: #2eabb0; + color: white; + border: none; + padding: 5px; + font-size: 1.5em; + cursor: pointer; +} + +div.commentwindow div.actions { + margin: 10px -10px 0 -10px; + padding: 4px 10px 4px 10px; + color: #538893; +} + +div.commentwindow div.actions input { + border: 1px solid #2eabb0; + background-color: white; + color: #135355; + cursor: pointer; +} + +div.commentwindow div.form { + padding: 0 10px 0 10px; +} + +div.commentwindow div.form input, +div.commentwindow div.form textarea { + border: 1px solid #3c9ea2; + background-color: white; + color: black; +} + +div.commentwindow div.error { + margin: 10px 5px 10px 5px; + background-color: #fbe5dc; + display: none; +} + +div.commentwindow div.form textarea { + width: 99%; +} + +div.commentwindow div.preview { + margin: 10px 0 10px 0; + background-color: #70d0d4; + padding: 0 1px 1px 25px; +} + +div.commentwindow div.preview h4 { + margin: 0 0 -5px -20px; + padding: 4px 0 0 4px; + color: white; + font-size: 1.3em; +} + +div.commentwindow div.preview div.comment { + background-color: #f2fbfd; +} + +div.commentwindow div.preview div.comment h4 { + margin: 10px 0 0 0!important; + padding: 1px 4px 1px 4px!important; + font-size: 1.2em; +} + +/* :::: SUGGEST CHANGES :::: */ +div#suggest-changes-box input, div#suggest-changes-box textarea { + border: 1px solid #ccc; + background-color: white; + color: black; +} + +div#suggest-changes-box textarea { + width: 99%; + height: 400px; +} + + +/* :::: PREVIEW :::: */ +div.preview { + background-image: url(style/preview.png); + padding: 0 20px 20px 20px; + margin-bottom: 30px; +} + + +/* :::: INDEX PAGE :::: */ + +table.contentstable { + width: 90%; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* :::: INDEX STYLES :::: */ + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable dl, table.indextable dd { + margin-top: 0; + margin-bottom: 0; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +form.pfform { + margin: 10px 0 20px 0; +} + +/* :::: GLOBAL STYLES :::: */ + +.docwarning { + background-color: #ffe4e4; + padding: 10px; + margin: 0 -20px 0 -20px; + border-bottom: 1px solid #f66; +} + +p.subhead { + font-weight: bold; + margin-top: 20px; +} + +a { + color: orangered; + text-decoration: none; +} + +a:hover { + text-decoration: underline; +} + +div.body h1, +div.body h2, +div.body h3, +div.body h4, +div.body h5, +div.body h6 { + font-family: 'Verdana', sans-serif; + background-color: white; + font-weight: bold; + color: black; + border-bottom: 1px solid #ccc; + margin: 20px -20px 10px -20px; + padding: 3px 0 3px 10px; +} + +div.body h1 { margin-top: 10pt; font-size: 150%; } +div.body h2 { font-size: 120%; } +div.body h3 { font-size: 100%; } +div.body h4 { font-size: 80%; } +div.body h5 { font-size: 600%; } +div.body h6 { font-size: 40%; } + +a.headerlink { + color: #c60f0f; + font-size: 0.8em; + padding: 0 4px 0 4px; + text-decoration: none; + visibility: hidden; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink { + visibility: visible; +} + +a.headerlink:hover { + background-color: #c60f0f; + color: white; +} + +div.body p, div.body dd, div.body li { + text-align: justify; + line-height: 130%; +} + +div.body p.caption { + text-align: inherit; +} + +div.body td { + text-align: left; +} + +ul.fakelist { + list-style: none; + margin: 10px 0 10px 20px; + padding: 0; +} + +.field-list ul { + padding-left: 1em; +} + +.first { + margin-top: 0 !important; +} + +/* "Footnotes" heading */ +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +/* "Topics" */ + +div.topic { + background-color: #eee; + border: 1px solid #ccc; + padding: 0 7px 0 7px; + margin: 10px 0 10px 0; +} + +p.topic-title { + font-size: 1.1em; + font-weight: bold; + margin-top: 10px; +} + +/* Admonitions */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 7px; +} + +div.admonition dt { + font-weight: bold; +} + +div.admonition dl { + margin-bottom: 0; +} + +div.admonition p { + display: inline; +} + +div.seealso { + background-color: #ffc; + border: 1px solid #ff6; +} + +div.warning { + background-color: #ffe4e4; + border: 1px solid #f66; +} + +div.note { + background-color: #eee; + border: 1px solid #ccc; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; + display: inline; +} + +p.admonition-title:after { + content: ":"; +} + +div.body p.centered { + text-align: center; + margin-top: 25px; +} + +table.docutils { + border: 0; +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 0; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +table.field-list td, table.field-list th { + border: 0 !important; +} + +table.footnote td, table.footnote th { + border: 0 !important; +} + +.field-list ul { + margin: 0; + padding-left: 1em; +} + +.field-list p { + margin: 0; +} + +dl { + margin-bottom: 15px; + clear: both; +} + +dd p { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +.refcount { + color: #060; +} + +dt:target, +.highlight { + background-color: #fbe54e; +} + +dl.glossary dt { + font-weight: bold; + font-size: 1.1em; +} + +th { + text-align: left; + padding-right: 5px; +} + +pre { + padding: 5px; + background-color: #efc; + color: #333; + border: 1px solid #ac9; + border-left: none; + border-right: none; + overflow: auto; +} + +td.linenos pre { + padding: 5px 0px; + border: 0; + background-color: transparent; + color: #aaa; +} + +table.highlighttable { + margin-left: 0.5em; +} + +table.highlighttable td { + padding: 0 0.5em 0 0.5em; +} + +tt { + background-color: #ecf0f3; + padding: 0 1px 0 1px; + font-size: 0.95em; +} + +tt.descname { + background-color: transparent; + font-weight: bold; + font-size: 1.2em; +} + +tt.descclassname { + background-color: transparent; +} + +tt.xref, a tt { + background-color: transparent; + font-weight: bold; +} + +.footnote:target { background-color: #ffa } + +h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt { + background-color: transparent; +} + +.optional { + font-size: 1.3em; +} + +.versionmodified { + font-style: italic; +} + +form.comment { + margin: 0; + padding: 10px 30px 10px 30px; + background-color: #eee; +} + +form.comment h3 { + background-color: #326591; + color: white; + margin: -10px -30px 10px -30px; + padding: 5px; + font-size: 1.4em; +} + +form.comment input, +form.comment textarea { + border: 1px solid #ccc; + padding: 2px; + font-family: sans-serif; + font-size: 100%; +} + +form.comment input[type="text"] { + width: 240px; +} + +form.comment textarea { + width: 100%; + height: 200px; + margin-bottom: 10px; +} + +.system-message { + background-color: #fda; + padding: 5px; + border: 3px solid red; +} + +/* :::: PRINT :::: */ +@media print { + div.document, + div.documentwrapper, + div.bodywrapper { + margin: 0; + width : 100%; + } + + div.sphinxsidebar, + div.related, + div.footer, + div#comments div.new-comment-box, + #top-link { + display: none; + } +} diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/_templates/layout.html --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/_templates/layout.html Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,196 @@ +{%- block doctype -%} + +{%- endblock %} +{%- set reldelim1 = reldelim1 is not defined and ' »' or reldelim1 %} +{%- set reldelim2 = reldelim2 is not defined and ' |' or reldelim2 %} +{%- macro relbar() %} + +{%- endmacro %} +{%- macro sidebar() %} + {%- if builder != 'htmlhelp' %} +
        +
        + {%- block sidebarlogo %} + {%- if logo %} + + {%- endif %} + {%- endblock %} + {%- block sidebartoc %} + {%- if display_toc %} +

        Table Of Contents

        + {{ toc }} + {%- endif %} + {%- endblock %} + {%- block sidebarrel %} + {%- if prev %} +

        Previous topic

        +

        {{ prev.title }}

        + {%- endif %} + {%- if next %} +

        Next topic

        +

        {{ next.title }}

        + {%- endif %} + {%- endblock %} + {%- if sourcename %} + + {%- endif %} + {%- if customsidebar %} + {{ rendertemplate(customsidebar) }} + {%- endif %} + {%- block sidebarsearch %} + {%- if pagename != "search" %} +

        {{ builder == 'web' and 'Keyword' or 'Quick' }} search

        + + {%- if builder == 'web' %} +

        Enter a module, class or function name.

        + {%- endif %} + {%- endif %} + {%- endblock %} +
        +
        + {%- endif %} +{%- endmacro -%} + + + + + {%- if builder != 'htmlhelp' %} + {%- set titlesuffix = " — " + docstitle %} + {%- endif %} + {{ title|striptags }}{{ titlesuffix }} + {%- if builder == 'web' %} + + {%- for link, type, title in page_links %} + + {%- endfor %} + {%- else %} + + + {%- endif %} + {%- if builder != 'htmlhelp' %} + + + + + + {%- if use_opensearch %} + + {%- endif %} + {%- if favicon %} + + {%- endif %} + {%- endif %} +{%- block rellinks %} + {%- if hasdoc('about') %} + + {%- endif %} + + + + {%- if hasdoc('copyright') %} + + {%- endif %} + + {%- if parents %} + + {%- endif %} + {%- if next %} + + {%- endif %} + {%- if prev %} + + {%- endif %} +{%- endblock %} +{%- block extrahead %}{% endblock %} + + + +{% block logilablogo %} + +{% endblock %} + +{%- block relbar1 %}{{ relbar() }}{% endblock %} + +{%- block sidebar1 %}{# possible location for sidebar #}{% endblock %} + +{%- block document %} +
        +
        + {%- if builder != 'htmlhelp' %} +
        + {%- endif %} +
        + {% block body %}{% endblock %} +
        + {%- if builder != 'htmlhelp' %} +
        + {%- endif %} +
        +{%- endblock %} + +{%- block sidebar2 %}{{ sidebar() }}{% endblock %} +
        +
        + +{%- block relbar2 %}{{ relbar() }}{% endblock %} + +{%- block footer %} + +{%- endblock %} + + diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/_themes/cubicweb/layout.html --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/_themes/cubicweb/layout.html Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,61 @@ +{% extends "basic/layout.html" %} + +{%- block extrahead %} + +{%- if theme_favicon %} + +{%- endif %} + +{%- if theme_canonical_url %} + +{%- endif %} +{% endblock %} + +{% block header %} + +{% if theme_in_progress|tobool %} + Documentation in progress +{% endif %} + +{% if theme_outdated|tobool %} + +{% endif %} + +
        + {%- if theme_logo %} + {% set img, ext = theme_logo.split('.', -1) %} +
        + + + +
        + {%- endif %} +
        +{% endblock %} + +{%- macro relbar() %} + +{%- endmacro %} + +{%- block sidebarlogo %}{%- endblock %} +{%- block sidebarsourcelink %}{%- endblock %} diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/_themes/cubicweb/static/cubicweb.css_t --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/_themes/cubicweb/static/cubicweb.css_t Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,33 @@ +/* + * cubicweb.css_t + * ~~~~~~~~~~~~~~ + * + * Sphinx stylesheet -- cubicweb theme. + * + * :copyright: Copyright 2014 by the Cubicweb team, see AUTHORS. + * :license: LGPL, see LICENSE for details. + * + */ + +@import url("pyramid.css"); + +div.header-small { + background-image: linear-gradient(white, #e2e2e2); + border-bottom: 1px solid #bbb; +} + +div.logo-small { + padding: 10px; +} + +img.logo { + width: 150px; +} + +div.related a { + color: #e6820e; +} + +a, a .pre { + color: #e6820e; +} diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/_themes/cubicweb/static/cubicweb.ico --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/_themes/cubicweb/static/cubicweb.ico Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,1 @@ +../../../../web/data/favicon.ico \ No newline at end of file diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/_themes/cubicweb/static/logo-cubicweb-small.svg --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/_themes/cubicweb/static/logo-cubicweb-small.svg Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,1 @@ +logo-cubicweb.svg \ No newline at end of file diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/_themes/cubicweb/static/logo-cubicweb.svg --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/_themes/cubicweb/static/logo-cubicweb.svg Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,1 @@ +../../../../web/data/logo-cubicweb.svg \ No newline at end of file diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/_themes/cubicweb/theme.conf --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/_themes/cubicweb/theme.conf Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,12 @@ +[theme] +inherit = pyramid +pygments_style = sphinx.pygments_styles.PyramidStyle +stylesheet = cubicweb.css + + +[options] +logo = logo-cubicweb.svg +favicon = cubicweb.ico +in_progress = false +outdated = false +canonical_url = diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/api/__init__.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/api/__init__.rst Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,115 @@ +.. _index_module: + +:mod:`cubicweb` +=============== + +.. automodule:: cubicweb + + Exceptions + ---------- + + Base exceptions + ~~~~~~~~~~~~~~~ + + .. autoexception:: ProgrammingError + :show-inheritance: + + .. autoexception:: CubicWebException + :show-inheritance: + + .. autoexception:: InternalError + :show-inheritance: + + .. autoexception:: SecurityError + :show-inheritance: + + .. autoexception:: RepositoryError + :show-inheritance: + + .. autoexception:: SourceException + :show-inheritance: + + .. autoexception:: CubicWebRuntimeError + :show-inheritance: + + Repository exceptions + ~~~~~~~~~~~~~~~~~~~~~ + + .. autoexception:: ConnectionError + :show-inheritance: + + .. autoexception:: AuthenticationError + :show-inheritance: + + .. autoexception:: BadConnectionId + :show-inheritance: + + .. autoexception:: UnknownEid + :show-inheritance: + + .. autoexception:: UniqueTogetherError + :show-inheritance: + + Security Exceptions + ~~~~~~~~~~~~~~~~~~~ + + .. autoexception:: Unauthorized + :show-inheritance: + + .. autoexception:: Forbidden + :show-inheritance: + + Source exceptions + ~~~~~~~~~~~~~~~~~ + + .. autoexception:: EidNotInSource + :show-inheritance: + + Registry exceptions + ~~~~~~~~~~~~~~~~~~~ + + .. autoexception:: UnknownProperty + :show-inheritance: + + Query exceptions + ~~~~~~~~~~~~~~~~ + + .. autoexception:: QueryError + :show-inheritance: + + .. autoexception:: NotAnEntity + :show-inheritance: + + .. autoexception:: MultipleResultsError + :show-inheritance: + + .. autoexception:: NoResultError + :show-inheritance: + + .. autoexception:: UndoTransactionException + :show-inheritance: + + + Misc + ~~~~ + + .. autoexception:: ConfigurationError + :show-inheritance: + + .. autoexception:: ExecutionError + :show-inheritance: + + .. autoexception:: BadCommandUsage + :show-inheritance: + + .. autoexception:: ValidationError + :show-inheritance: + + + Utilities + --------- + + .. autoclass:: Binary + .. autoclass:: CubicWebEventManager + .. autofunction:: onevent + .. autofunction:: validation_error diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/api/appobject.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/api/appobject.rst Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,10 @@ +.. _appobject_module: + +:mod:`cubicweb.appobject` +========================= + +.. automodule:: cubicweb.appobject + + .. autoclass:: AppObject + :show-inheritance: + :members: diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/api/cwvreg.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/api/cwvreg.rst Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,45 @@ +.. _cwvreg_module: + +:mod:`cubicweb.cwvreg` +====================== + +.. automodule:: cubicweb.cwvreg + + .. autoclass:: CWRegistryStore + :show-inheritance: + :members: + :undoc-members: + + .. autoclass:: CWRegistry + :show-inheritance: + :members: schema, poss_visible_objects, select + + .. autoclass:: InstancesRegistry + :show-inheritance: + :members: + + .. autoclass:: ETypeRegistry + :show-inheritance: + :members: + + .. autoclass:: ViewsRegistry + :show-inheritance: + :members: + + .. autoclass:: ActionsRegistry + :show-inheritance: + :members: + + .. autoclass:: CtxComponentsRegistry + :show-inheritance: + :members: + + .. autoclass:: BwCompatCWRegistry + :show-inheritance: + :members: + + +:mod:`logilab.common.registry` +============================== + +.. automodule:: logilab.common.registry diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/api/dataimport.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/api/dataimport.rst Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,63 @@ +.. _dataimport_module: + +:mod:`cubicweb.dataimport` +========================== + +.. automodule:: cubicweb.dataimport + + Utilities + --------- + + .. autofunction:: count_lines + + .. autofunction:: ucsvreader_pb + + .. autofunction:: ucsvreader + + .. autofunction:: callfunc_every + + .. autofunction:: lazytable + + .. autofunction:: lazydbtable + + .. autofunction:: mk_entity + + Sanitizing/coercing functions + ----------------------------- + + .. autofunction:: optional + .. autofunction:: required + .. autofunction:: todatetime + .. autofunction:: call_transform_method + .. autofunction:: call_check_method + + Integrity functions + ------------------- + + .. autofunction:: check_doubles + .. autofunction:: check_doubles_not_none + + Object Stores + ------------- + + .. autoclass:: ObjectStore + :members: + + .. autoclass:: RQLObjectStore + :show-inheritance: + :members: + + .. autoclass:: NoHookRQLObjectStore + :show-inheritance: + :members: + + .. autoclass:: SQLGenObjectStore + :show-inheritance: + :members: + + Import Controller + ----------------- + + .. autoclass:: CWImportController + :show-inheritance: + :members: diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/api/predicates.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/api/predicates.rst Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,57 @@ +.. _predicates_module: + +:mod:`cubicweb.predicates` +========================== + +.. automodule:: cubicweb.predicates + + .. autoclass:: cubicweb.appobject.yes + .. autoclass:: cubicweb.predicates.match_kwargs + .. autoclass:: cubicweb.predicates.appobject_selectable + .. autoclass:: cubicweb.predicates.adaptable + .. autoclass:: cubicweb.predicates.configuration_values + + .. autoclass:: cubicweb.predicates.none_rset + .. autoclass:: cubicweb.predicates.any_rset + .. autoclass:: cubicweb.predicates.nonempty_rset + .. autoclass:: cubicweb.predicates.empty_rset + .. autoclass:: cubicweb.predicates.one_line_rset + .. autoclass:: cubicweb.predicates.multi_lines_rset + .. autoclass:: cubicweb.predicates.multi_columns_rset + .. autoclass:: cubicweb.predicates.paginated_rset + .. autoclass:: cubicweb.predicates.sorted_rset + .. autoclass:: cubicweb.predicates.one_etype_rset + .. autoclass:: cubicweb.predicates.multi_etypes_rset + + .. autoclass:: cubicweb.predicates.non_final_entity + .. autoclass:: cubicweb.predicates.is_instance + .. autoclass:: cubicweb.predicates.score_entity + .. autoclass:: cubicweb.predicates.rql_condition + .. autoclass:: cubicweb.predicates.relation_possible + .. autoclass:: cubicweb.predicates.partial_relation_possible + .. autoclass:: cubicweb.predicates.has_related_entities + .. autoclass:: cubicweb.predicates.partial_has_related_entities + .. autoclass:: cubicweb.predicates.has_permission + .. autoclass:: cubicweb.predicates.has_add_permission + .. autoclass:: cubicweb.predicates.has_mimetype + .. autoclass:: cubicweb.predicates.is_in_state + .. autofunction:: cubicweb.predicates.on_fire_transition + + .. autoclass:: cubicweb.predicates.match_user_groups + + .. autoclass:: cubicweb.predicates.no_cnx + .. autoclass:: cubicweb.predicates.anonymous_user + .. autoclass:: cubicweb.predicates.authenticated_user + .. autoclass:: cubicweb.predicates.match_form_params + .. autoclass:: cubicweb.predicates.match_search_state + .. autoclass:: cubicweb.predicates.match_context_prop + .. autoclass:: cubicweb.predicates.match_context + .. autoclass:: cubicweb.predicates.match_view + .. autoclass:: cubicweb.predicates.primary_view + .. autoclass:: cubicweb.predicates.contextual + .. autoclass:: cubicweb.predicates.specified_etype_implements + .. autoclass:: cubicweb.predicates.attribute_edited + .. autoclass:: cubicweb.predicates.match_transition + + .. autoclass:: cubicweb.predicates.match_exception + .. autoclass:: cubicweb.predicates.debug_mode diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/api/req.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/api/req.rst Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,11 @@ +.. _req_module: + +:mod:`cubicweb.req` +=================== + +.. automodule:: cubicweb.req + + .. autoexception:: FindEntityError + + .. autoclass:: RequestSessionBase + :members: diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/api/rset.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/api/rset.rst Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,10 @@ +.. _rset_module: + +:mod:`cubicweb.rset` +==================== + +.. automodule:: cubicweb.rset + + .. autoclass:: ResultSet + :members: + diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/api/urlpublishing.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/api/urlpublishing.rst Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,36 @@ +.. _urlpublishing_module: + +:mod:`cubicweb.web.views.urlpublishing` +======================================= + +.. automodule:: cubicweb.web.views.urlpublishing + + .. autoexception:: PathDontMatch + + .. autoclass:: URLPublisherComponent + :show-inheritance: + :members: + + .. autoclass:: URLPathEvaluator + :show-inheritance: + :members: + + .. autoclass:: RawPathEvaluator + :show-inheritance: + :members: + + .. autoclass:: EidPathEvaluator + :show-inheritance: + :members: + + .. autoclass:: RestPathEvaluator + :show-inheritance: + :members: + + .. autoclass:: URLRewriteEvaluator + :show-inheritance: + :members: + + .. autoclass:: ActionPathEvaluator + :show-inheritance: + :members: diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/api/urlrewrite.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/api/urlrewrite.rst Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,18 @@ +.. _urlrewrite_module: + +:mod:`cubicweb.web.views.urlrewrite` +======================================= + +.. automodule:: cubicweb.web.views.urlrewrite + + .. autoclass:: URLRewriter + :show-inheritance: + :members: + + .. autoclass:: SimpleReqRewriter + :show-inheritance: + :members: + + .. autoclass:: SchemaBasedRewriter + :show-inheritance: + :members: diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/api/web.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/api/web.rst Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,21 @@ +.. _web_module: + +:mod:`cubicweb.web` +=================== + +.. automodule:: cubicweb.web + + Exceptions + ---------- + + .. autoexception:: DirectResponse + .. autoexception:: InvalidSession + .. autoexception:: PublishException + .. autoexception:: LogOut + .. autoexception:: Redirect + .. autoexception:: StatusResponse + .. autoexception:: RequestError + .. autoexception:: NothingToEdit + .. autoexception:: ProcessFormError + .. autoexception:: NotFound + .. autoexception:: RemoteCallFailed diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/book/MERGE_ME-tut-create-app.en.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/book/MERGE_ME-tut-create-app.en.txt Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,386 @@ +.. -*- coding: utf-8 -*- + + +Tutoriel : créer votre première application web pour Google AppEngine +===================================================================== + +[TRANSLATE ME TO FRENCH] + +This tutorial will guide you step by step to build a blog application +and discover the unique features of `LAX`. It assumes that you followed +the :ref:`installation` guidelines and that both the `AppEngine SDK` and the +`LAX` framework are setup on your computer. + +Creating a new application +-------------------------- + +We choosed in this tutorial to develop a blog as an example of web application +and will go through each required steps/actions to have it running with `LAX`. +When you installed `LAX`, you saw a directory named ``skel``. Make a copy of +this directory and call it ``BlogDemo``. + +The location of this directory does not matter. But once decided, make sure your ``PYTHONPATH`` is properly set (:ref:`installation`). + + +Defining a schema +----------------- + +With `LAX`, the schema/datamodel is the core of the application. This is where +you will define the type of content you have to hanlde in your application. + +Let us start with something simple and improve on it iteratively. + +In schema.py, we define two entities: ``Blog`` and ``BlogEntry``. + +:: + + class Blog(EntityType): + title = String(maxsize=50, required=True) + description = String() + + class BlogEntry(EntityType): + title = String(maxsize=100, required=True) + publish_date = Date(default='TODAY') + text = String(fulltextindexed=True) + category = String(vocabulary=('important','business')) + entry_of = SubjectRelation('Blog', cardinality='?*') + +A Blog has a title and a description. The title is a string that is +required by the class EntityType and must be less than 50 characters. +The description is a string that is not constrained. + +A BlogEntry has a title, a publish_date and a text. The title is a +string that is required and must be less than 100 characters. The +publish_date is a Date with a default value of TODAY, meaning that +when a BlogEntry is created, its publish_date will be the current day +unless it is modified. The text is a string that will be indexed in +the full-text index and has no constraint. + +A BlogEntry also has a relationship ``entry_of`` that link it to a +Blog. The cardinality ``?*`` means that a BlogEntry can be part of +zero or one Blog (``?`` means `zero or one`) and that a Blog can +have any number of BlogEntry (``*`` means `any number including +zero`). For completeness, remember that ``+`` means `one or more`. + +Running the application +----------------------- + +Defining this simple schema is enough to get us started. Make sure you +followed the setup steps described in detail in the installation +chapter (especially visiting http://localhost:8080/_load as an +administrator), then launch the application with the command:: + + python dev_appserver.py BlogDemo + +and point your browser at http://localhost:8080/ (if it is easier for +you, use the on-line demo at http://lax.appspot.com/). + +.. image:: images/lax-book.00-login.en.png + :alt: login screen + +After you log in, you will see the home page of your application. It +lists the entity types: Blog and BlogEntry. If these links read +``blog_plural`` and ``blogentry_plural`` it is because +internationalization (i18n) is not working for you yet. Please ignore +this for now. + +.. image:: images/lax-book.01-start.en.png + :alt: home page + +Creating system entities +------------------------ +You can only create new users if you decided not to use google authentication. + + +[WRITE ME : create users manages permissions etc] + + + +Creating application entites +---------------------------- + +Create a Blog +~~~~~~~~~~~~~ + +Let us create a few of these entities. Click on the [+] at the right +of the link Blog. Call this new Blog ``Tech-blog`` and type in +``everything about technology`` as the description, then validate the +form by clicking on ``Validate``. + +.. image:: images/lax-book.02-create-blog.en.png + :alt: from to create blog + +Click on the logo at top left to get back to the home page, then +follow the Blog link that will list for you all the existing Blog. +You should be seeing a list with a single item ``Tech-blog`` you +just created. + +.. image:: images/lax-book.03-list-one-blog.en.png + :alt: displaying a list of a single blog + +Clicking on this item will get you to its detailed description except +that in this case, there is not much to display besides the name and +the phrase ``everything about technology``. + +.. image:: images/lax-book.04-detail-one-blog.en.png + :alt: displaying the detailed view of a blog + +Now get back to the home page by clicking on the top-left logo, then +create a new Blog called ``MyLife`` and get back to the home page +again to follow the Blog link for the second time. The list now +has two items. + +.. image:: images/lax-book.05-list-two-blog.en.png + :alt: displaying a list of two blogs + + +Create a BlogEntry +~~~~~~~~~~~~~~~~~~ + +Get back to the home page and click on [+] at the right of the link +BlogEntry. Call this new entry ``Hello World`` and type in some text +before clicking on ``Validate``. You added a new blog entry without +saying to what blog it belongs. There is a box on the left entitled +``actions``, click on the menu item ``modify``. You are back to the form +to edit the blog entry you just created, except that the form now has +another section with a combobox titled ``add relation``. Chose +``entry_of`` in this menu and a second combobox appears where you pick +``MyLife``. + +You could also have, at the time you started to fill the form for a +new entity BlogEntry, hit ``Apply`` instead of ``Validate`` and the +combobox titled ``add relation`` would have showed up. + +.. image:: images/lax-book.06-add-relation-entryof.en.png + :alt: editing a blog entry to add a relation to a blog + +Validate the changes by clicking ``Validate``. The entity BlogEntry +that is displayed now includes a link to the entity Blog named +``MyLife``. + +.. image:: images/lax-book.07-detail-one-blogentry.en.png + :alt: displaying the detailed view of a blogentry + +Remember that all of this was handled by the framework and that the +only input that was provided so far is the schema. To get a graphical +view of the schema, run the ``laxctl genschema BlogDemo`` command as +explained in the installation section and point your browser to the +URL http://localhost:8080/schema + +.. image:: images/lax-book.08-schema.en.png + :alt: graphical view of the schema (aka data-model) + +Site configuration +------------------ + +.. image:: images/lax-book.03-site-config-panel.en.png + +This panel allows you to configure the appearance of your application site. +Six menus are available and we will go through each of them to explain how +to use them. + +Navigation +~~~~~~~~~~ +This menu provides you a way to adjust some navigation options depending on +your needs, such as the number of entities to display by page of results. +Follows the detailled list of available options: + +* navigation.combobox-limit: maximum number of entities to display in related + combo box (sample format: 23) +* navigation.page-size: maximum number of objects displayed by page of results + (sample format: 23) +* navigation.related-limit: maximum number of related entities to display in + the primary view (sample format: 23) +* navigation.short-line-size: maximum number of characters in short description + (sample format: 23) + +UI +~~ +This menu provides you a way to customize the user interface settings such as +date format or encoding in the produced html. +Follows the detailled list of available options: + +* ui.date-format : how to format date in the ui ("man strftime" for format description) +* ui.datetime-format : how to format date and time in the ui ("man strftime" for format + description) +* ui.default-text-format : default text format for rich text fields. +* ui.encoding : user interface encoding +* ui.fckeditor : should html fields being edited using fckeditor (a HTML WYSIWYG editor). + You should also select text/html as default text format to actually get fckeditor. +* ui.float-format : how to format float numbers in the ui +* ui.language : language of the user interface +* ui.main-template : id of main template used to render pages +* ui.site-title : site title, which is displayed right next to the logo in the header +* ui.time-format : how to format time in the ui ("man strftime" for format description) + + +Actions +~~~~~~~ +This menu provides a way to configure the context in which you expect the actions +to be displayed to the user and if you want the action to be visible or not. +You must have notice that when you view a list of entities, an action box is +available on the left column which display some actions as well as a drop-down +menu for more actions. + +The context available are: + +* mainactions : actions listed in the left box +* moreactions : actions listed in the `more` menu of the left box +* addrelated : add actions listed in the left box +* useractions : actions listed in the first section of drop-down menu + accessible from the right corner user login link +* siteactions : actions listed in the second section of drop-down menu + accessible from the right corner user login link +* hidden : select this to hide the specific action + +Boxes +~~~~~ +The application has already a pre-defined set of boxes you can use right away. +This configuration section allows you to place those boxes where you want in the +application interface to customize it. + +The available boxes are: + +* actions box : box listing the applicable actions on the displayed data + +* boxes_blog_archives_box : box listing the blog archives + +* possible views box : box listing the possible views for the displayed data + +* rss box : RSS icon to get displayed data as a RSS thread + +* search box : search box + +* startup views box : box listing the configuration options available for + the application site, such as `Preferences` and `Site Configuration` + +Components +~~~~~~~~~~ +[WRITE ME] + +Contextual components +~~~~~~~~~~~~~~~~~~~~~ +[WRITE ME] + +Set-up a workflow +----------------- + +Before starting, make sure you refresh your mind by reading [link to +definition_workflow chapter]. + +We want to create a workflow to control the quality of the BlogEntry +submitted on your application. When a BlogEntry is created by a user +its state should be `submitted`. To be visible to all, it needs to +be in the state `published`. To move from `submitted` to `published` +we need a transition that we can name `approve_blogentry`. + +We do not want every user to be allowed to change the state of a +BlogEntry. We need to define a group of user, `moderators`, and +this group will have appropriate permissions to approve BlogEntry +to be published and visible to all. + +There are two ways to create a workflow, form the user interface, +and also by defining it in ``migration/postcreate.py``. This script +is executed each time a new ``./bin/laxctl db-init`` is done. +If you create the states and transitions through the user interface +this means that next time you will need to initialize the database +you will have to re-create all the entities. +We strongly recommand you create the workflow in ``migration\postcreate.py`` +and we will now show you how. +The user interface would only be a reference for you to view the states +and transitions but is not the appropriate interface to define your +application workflow. + +Update the schema +~~~~~~~~~~~~~~~~~ +To enable a BlogEntry to have a State, we have to define a relation +``in_state`` in the schema of BlogEntry. Please do as follows, add +the line ``in_state (...)``:: + + class BlogEntry(EntityType): + title = String(maxsize=100, required=True) + publish_date = Date(default='TODAY') + text_format = String(meta=True, internationalizable=True, maxsize=50, + default='text/rest', constraints=[format_constraint]) + text = String(fulltextindexed=True) + category = String(vocabulary=('important','business')) + entry_of = SubjectRelation('Blog', cardinality='?*') + in_state = SubjectRelation('State', cardinality='1*') + +As you updated the schema, you will have re-execute ``./bin/laxctl db-init`` +to initialize the database and migrate your existing entities. +[WRITE ABOUT MIGRATION] + +Create states, transitions and group permissions +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +At the time the ``postcreate.py`` script is executed, several methods +can be used. They are all defined in the ``class ServerMigrationHelper``. +We will only discuss the method we use to create a wrokflow here. + +To define our workflow for BlogDemo, please add the following lines +to ``migration/postcreate.py``:: + + _ = unicode + + moderators = add_entity('CWGroup', name=u"moderators") + + submitted = add_state(_('submitted'), 'BlogEntry', initial=True) + published = add_state(_('published'), 'BlogEntry') + + add_transition(_('approve_blogentry'), 'BlogEntry', (submitted,), published, ('moderators', 'managers'),) + + checkpoint() + +``add_entity`` is used here to define the new group of users that we +need to define the transitions, `moderators`. +If this group required by the transition is not defined before the +transition is created, it will not create the relation `transition +require the group moderator`. + +``add_state`` expects as the first argument the name of the state you are +willing to create, then the entity type on which the state can be applied, +and an optionnal argument to set if the state is the initial state +of the entity type or not. + +``add_transition`` expects as the first argument the name of the +transition, then the entity type on which we can apply the transition, +then the list of possible initial states from which the transition +can be applied, the target state of the transition, and the permissions +(e.g. list of the groups of users who can apply the transition). + +.. image:: images/lax-book.03-transitions-view.en.png + +You can now notice that in the actions box of a BlogEntry, the state +is now listed as well as the possible transitions from this state +defined by the workflow. This transition, as defined in the workflow, +will only being displayed for the users belonging to the group +moderators of managers. + +Change view permission +~~~~~~~~~~~~~~~~~~~~~~ + + + +Conclusion +---------- + +Exercise +~~~~~~~~ + +Create new blog entries in ``Tech-blog``. + +What we learned +~~~~~~~~~~~~~~~ + +Creating a simple schema was enough to set up a new application that +can store blogs and blog entries. + +What is next ? +~~~~~~~~~~~~~~ + +Although the application is fully functionnal, its look is very +basic. In the following section we will learn to create views to +customize how data is displayed. + + diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/book/MERGE_ME-tut-create-gae-app.en.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/book/MERGE_ME-tut-create-gae-app.en.txt Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,218 @@ +.. -*- coding: utf-8 -*- + +.. _tutorielGAE: + +Tutoriel : créer votre première application web pour Google AppEngine +===================================================================== + +Ce tutoriel va vous guider pas à pas a construire une apllication web +de gestion de Blog afin de vous faire découvrir les fonctionnalités de +*CubicWeb*. + +Nous supposons que vous avec déjà suivi le guide :ref:`installationGAE`. + + +Créez une nouvelle application +------------------------------ + +Nous choisissons dans ce tutoriel de développer un blog comme un exemple +d'application web et nous allons expliciter toutes les étapes nécessaires +à sa réalisation. + +:: + + cubicweb-ctl newgapp blogdemo + +`newgapp` est la commande permettant de créer une instance *CubicWeb* pour +le datastore. + +Assurez-vous que votre variable d'environnement ``PYTHONPATH`` est correctement +initialisée (:ref:`installationGAE`) + +Définissez un schéma +-------------------- + +Le modèle de données ou schéma est au coeur d'une application *CubicWeb*. +C'est là où vous allez devoir définir le type de contenu que votre application +devra gérer. + +Commençons par un schéma simple que nous améliorerons progressivemment. + +Une fois votre instance ``blogdemo`` crée, vous trouverez un fichier ``schema.py`` +contenant la définition des entités suivantes : ``Blog`` and ``BlogEntry``. + +:: + + class Blog(EntityType): + title = String(maxsize=50, required=True) + description = String() + + class BlogEntry(EntityType): + title = String(maxsize=100, required=True) + publish_date = Date(default='TODAY') + text = String(fulltextindexed=True) + category = String(vocabulary=('important','business')) + entry_of = SubjectRelation('Blog', cardinality='?*') + + +Un ``Blog`` a un titre et une description. Le titre est une chaîne +de caractères requise par la classe parente EntityType and ne doit +pas excéder 50 caractères. La description est une chaîne de +caractères sans contraintes. + +Une ``BlogEntry`` a un titre, une date de publication et du texte +étant son contenu. Le titre est une chaîne de caractères qui ne +doit pas excéder 100 caractères. La date de publication est de type Date et a +pour valeur par défaut TODAY, ce qui signifie que lorsqu'une +``BlogEntry`` sera créée, sa date de publication sera la date +courante a moins de modifier ce champ. Le texte est une chaîne de +caractères qui sera indexée en plein texte et sans contraintes. + +Une ``BlogEntry`` a aussi une relation nommée ``entry_of`` qui la +relie à un ``Blog``. La cardinalité ``?*`` signifie que BlogEntry +peut faire partie de zero a un Blog (``?`` signifie `zero ou un`) et +qu'un Blog peut avoir une infinité de BlogEntry (``*`` signifie +`n'importe quel nombre incluant zero`). +Par soucis de complétude, nous rappellerons que ``+`` signifie +`un ou plus`. + +Lancez l'application +-------------------- + +Définir ce simple schéma est suffisant pour commencer. Assurez-vous +que vous avez suivi les étapes décrites dans la section installation +(en particulier visitez http://localhost:8080/_load en tant qu'administrateur +afin d'initialiser le datastore), puis lancez votre application avec la commande :: + + python dev_appserver.py BlogDemo + +puis dirigez vous vers http://localhost:8080/ (ou si c'est plus facile +vous pouvez utiliser la démo en ligne http://lax.appspot.com/). +[FIXME] -- changer la demo en ligne en quelque chose qui marche (!) + +.. image:: images/lax-book.00-login.en.png + :alt: login screen + +Après vous être authentifié, vous arrivez sur la page d'accueil de votre +application. Cette page liste les types d'entités accessibles dans votre +application, en l'occurrence : Blog et Articles. Si vous lisez ``blog_plural`` +et ``blogentry_plural`` cela signifie que l'internationalisation (i18n) +n'a pas encore fonctionné. Ignorez cela pour le moment. + +.. image:: images/lax-book.01-start.en.png + :alt: home page + +Créez des entités système +------------------------- + +Vous ne pourrez créer de nouveaux utilisateurs que dans le cas où vous +avez choisi de ne pas utiliser l'authentification Google. + + +[WRITE ME : create users manages permissions etc] + + + +Créez des entités applicatives +------------------------------ + +Créez un Blog +~~~~~~~~~~~~~ + +Créons à présent quelques entités. Cliquez sur `[+]` sur la +droite du lien Blog. Appelez cette nouvelle entité Blog ``Tech-Blog`` +et tapez pour la description ``everything about technology``, +puis validez le formulaire d'édition en cliquant sur le bouton +``Validate``. + + +.. image:: images/lax-book.02-create-blog.en.png + :alt: from to create blog + +En cliquant sur le logo situé dans le coin gauche de la fenêtre, +vous allez être redirigé vers la page d'accueil. Ensuite, si vous allez +sur le lien Blog, vous devriez voir la liste des entités Blog, en particulier +celui que vous venez juste de créer ``Tech-Blog``. + +.. image:: images/lax-book.03-list-one-blog.en.png + :alt: displaying a list of a single blog + +Si vous cliquez sur ``Tech-Blog`` vous devriez obtenir une description +détaillée, ce qui dans notre cas, n'est rien de plus que le titre +et la phrase ``everything about technology`` + + +.. image:: images/lax-book.04-detail-one-blog.en.png + :alt: displaying the detailed view of a blog + +Maintenant retournons sur la page d'accueil et créons un nouveau +Blog ``MyLife`` et retournons sur la page d'accueil, puis suivons +le lien Blog et nous constatons qu'à présent deux blogs sont listés. + +.. image:: images/lax-book.05-list-two-blog.en.png + :alt: displaying a list of two blogs + +Créons un article +~~~~~~~~~~~~~~~~~ + +Revenons sur la page d'accueil et cliquons sur `[+]` à droite du lien +`articles`. Appellons cette nouvelle entité ``Hello World`` et introduisons +un peut de texte avant de ``Valider``. Vous venez d'ajouter un article +sans avoir précisé à quel Blog il appartenait. Dans la colonne de gauche +se trouve une boite intitulé ``actions``, cliquez sur le menu ``modifier``. +Vous êtes de retour sur le formulaire d'édition de l'article que vous +venez de créer, à ceci près que ce formulaire a maintenant une nouvelle +section intitulée ``ajouter relation``. Choisissez ``entry_of`` dans ce menu, +cela va faire apparaitre une deuxième menu déroulant dans lequel vous +allez pouvoir séléctionner le Blog ``MyLife``. + +Vous auriez pu aussi, au moment où vous avez crée votre article, sélectionner +``appliquer`` au lieu de ``valider`` et le menu ``ajouter relation`` serait apparu. + +.. image:: images/lax-book.06-add-relation-entryof.en.png + :alt: editing a blog entry to add a relation to a blog + +Validez vos modifications en cliquant sur ``Valider``. L'entité article +qui est listée contient maintenant un lien vers le Blog auquel il +appartient, ``MyLife``. + +.. image:: images/lax-book.07-detail-one-blogentry.en.png + :alt: displaying the detailed view of a blogentry + +Rappelez-vous que pour le moment, tout a été géré par la plate-forme +*CubicWeb* et que la seule chose qui a été fournie est le schéma de +données. D'ailleurs pour obtenir une vue graphique du schéma, exécutez +la commande ``laxctl genschema blogdemo`` et vous pourrez visualiser +votre schéma a l'URL suivante : http://localhost:8080/schema + +.. image:: images/lax-book.08-schema.en.png + :alt: graphical view of the schema (aka data-model) + + +Change view permission +~~~~~~~~~~~~~~~~~~~~~~ + + + +Conclusion +---------- + +Exercise +~~~~~~~~ + +Create new blog entries in ``Tech-blog``. + +What we learned +~~~~~~~~~~~~~~~ + +Creating a simple schema was enough to set up a new application that +can store blogs and blog entries. + +What is next ? +~~~~~~~~~~~~~~ + +Although the application is fully functionnal, its look is very +basic. In the following section we will learn to create views to +customize how data is displayed. + + diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/book/README --- a/doc/book/README Wed Dec 09 16:36:17 2015 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,85 +0,0 @@ -==== -Book -==== - ----- -Part ----- - -Chapter -======= - -.. _Level1AnchorForLaterReference: - -Level 1 section ---------------- - -Level 2 section -~~~~~~~~~~~~~~~ - -Level 3 section -``````````````` - - - -*CubicWeb* - - -inline directives: - :file:`directory/file` - :envvar:`AN_ENV_VARIABLE` - :command:`command --option arguments` - - :ref:, :mod: - - -.. sourcecode:: python - - class SomePythonCode: - ... - -.. XXX a comment, wont be rendered - - -a [foot note]_ - -.. [foot note] the foot note content - - -Boxes -===== - -- warning box: - .. warning:: - - Warning content -- note box: - .. note:: - - Note content - - - -Cross references -================ - -To arbitrary section --------------------- - -:ref:`identifier` ou :ref:`label ` - -Label required of referencing node which as no title, else the node's title will be used. - - -To API objects --------------- -See the autodoc sphinx extension documentation. Quick overview: - -* ref to a class: :class:`cubicweb.devtools.testlib.AutomaticWebTest` - -* if you can to see only the class name in the generated documentation, add a ~: - :class:`~cubicweb.devtools.testlib.AutomaticWebTest` - -* you can also use :mod: (module), :exc: (exception), :func: (function), :meth: (method)... - -* syntax explained above to specify label explicitly may also be used diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/book/additionnal_services/index.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/book/additionnal_services/index.rst Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,14 @@ +Additional services +=================== + +In this chapter, we introduce services crossing the *web - +repository - administration* organisation of the first parts of the +CubicWeb book. Those services can be either proper services (like the +undo functionality) or mere *topical cross-sections* across CubicWeb. + +.. toctree:: + :maxdepth: 2 + + undo + + diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/book/additionnal_services/undo.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/book/additionnal_services/undo.rst Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,337 @@ +Undoing changes in CubicWeb +--------------------------- + +Many desktop applications offer the possibility for the user to +undo its last changes : this *undo feature* has now been +integrated into the CubicWeb framework. This document will +introduce you to the *undo feature* both from the end-user and the +application developer point of view. + +But because a semantic web application and a common desktop +application are not the same thing at all, especially as far as +undoing is concerned, we will first introduce *what* is the *undo +feature* for now. + +What's *undoing* in a CubicWeb application +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +What is an *undo feature* is quite intuitive in the context of a +desktop application. But it is a bit subtler in the context of a +Semantic Web application. This section introduces some of the main +differences between a classical desktop and a Semantic Web +applications to keep in mind in order to state precisely *what we +want*. + +The notion transactions +``````````````````````` + +A CubicWeb application acts upon an *Entity-Relationship* model, +described by a schema. This allows to ensure some data integrity +properties. It also implies that changes are made by all-or-none +groups called *transactions*, such that the data integrity is +preserved whether the transaction is completely applied *or* none +of it is applied. + +A transaction can thus include more actions than just those +directly required by the main purpose of the user. For example, +when a user *just* writes a new blog entry, the underlying +*transaction* holds several *actions* as illustrated below : + +* By admin on 2012/02/17 15:18 - Created Blog entry : Torototo + + #. Created Blog entry : Torototo + #. Added relation : Torototo owned by admin + #. Added relation : Torototo blog entry of Undo Blog + #. Added relation : Torototo in state draft (draft) + #. Added relation : Torototo created by admin + +Because of the very nature (all-or-none) of the transactions, the +"undoable stuff" are the transactions and not the actions ! + +Public and private actions within a transaction +``````````````````````````````````````````````` + +Actually, within the *transaction* "Created Blog entry : +Torototo", two of those *actions* are said to be *public* and +the others are said to be *private*. *Public* here means that the +public actions (1 and 3) were directly requested by the end user ; +whereas *private* means that the other actions (2, 4, 5) were +triggered "under the hood" to fulfill various requirements for the +user operation (ensuring integrity, security, ... ). + +And because quite a lot of actions can be triggered by a "simple" +end-user request, most of which the end-user is not (and does not +need or wish to be) aware, only the so-called public actions will +appear [1]_ in the description of the an undoable transaction. + +* By admin on 2012/02/17 15:18 - Created Blog entry : Torototo + + #. Created Blog entry : Torototo + #. Added relation : Torototo blog entry of Undo Blog + +But note that both public and private actions will be undone +together when the transaction is undone. + +(In)dependent transactions : the simple case +```````````````````````````````````````````` + +A CubicWeb application can be used *simultaneously* by different users +(whereas a single user works on an given office document at a +given time), so that there is not always a single history +time-line in the CubicWeb case. Moreover CubicWeb provides +security through the mechanism of *permissions* granted to each +user. This can lead to some transactions *not* being undoable in +some contexts. + +In the simple case two (unprivileged) users Alice and Bob make +relatively independent changes : then both Alice and Bob can undo +their changes. But in some case there is a clean dependency +between Alice's and Bob's actions or between actions of one of +them. For example let's suppose that : + +- Alice has created a blog, +- then has published a first post inside, +- then Bob has published a second post in the same blog, +- and finally Alice has updated its post contents. + +Then it is clear that Alice can undo her contents changes and Bob +can undo his post creation independently. But Alice can not undo +her post creation while she has not first undone her changes. +It is also clear that Bob should *not* have the +permissions to undo any of Alice's transactions. + + +More complex dependencies between transactions +`````````````````````````````````````````````` + +But more surprising things can quickly happen. Going back to the +previous example, Alice *can* undo the creation of the blog after +Bob has published its post in it ! But this is possible only +because the schema does not *require* for a post to be in a +blog. Would the *blog entry of* relation have been mandatory, then +Alice could not have undone the blog creation because it would +have broken integrity constraint for Bob's post. + +When a user attempts to undo a transaction the system will check +whether a later transaction has explicit dependency on the +would-be-undone transaction. In this case the system will not even +attempt the undo operation and inform the user. + +If no such dependency is detected the system will attempt the undo +operation but it can fail, typically because of integrity +constraint violations. In such a case the undo operation is +completely [3]_ rollbacked. + + +The *undo feature* for CubicWeb end-users +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The exposition of the undo feature to the end-user through a Web +interface is still quite basic and will be improved toward a +greater usability. But it is already fully functional. For now +there are two ways to access the *undo feature* as long as the it +has been activated in the instance configuration file with the +option *undo-support=yes*. + +Immediately after having done the change to be canceled through +the **undo** link in the message. This allows to undo an +hastily action immediately. For example, just after having +validated the creation of the blog entry *A second blog entry* we +get the following message, allowing to undo the creation. + +.. image:: /images/undo_mesage_w600.png + :width: 600px + :alt: Screenshot of the undo link in the message + :align: center + +At any time we can access the **undo-history view** accessible from the +start-up page. + +.. image:: /images/undo_startup-link_w600.png + :width: 600px + :alt: Screenshot of the startup menu with access to the history view + :align: center + +This view will provide inspection of the transaction and their (public) +actions. Each transaction provides its own **undo** link. Only the +transactions the user has permissions to see and undo will be shown. + +.. image:: /images/undo_history-view_w600.png + :width: 600px + :alt: Screenshot of the undo history main view + :align: center + +If the user attempts to undo a transaction which can't be undone or +whose undoing fails, then a message will explain the situation and +no partial undoing will be left behind. + +This is all for the end-user side of the undo mechanism : this is +quite simple indeed ! Now, in the following section, we are going +to introduce the developer side of the undo mechanism. + +The *undo feature* for CubicWeb application developers +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +A word of warning : this section is intended for developers, +already having some knowledge of what's under CubicWeb's hood. If +it is not *yet* the case, please refer to CubicWeb documentation +http://docs.cubicweb.org/ . + +Overview +```````` + +The core of the undo mechanisms is at work in the *native source*, +beyond the RQL. This does mean that *transactions* and *actions* +are *no entities*. Instead they are represented at the SQL level +and exposed through the *DB-API* supported by the repository +*Connection* objects. + +Once the *undo feature* has been activated in the instance +configuration file with the option *undo-support=yes*, each +mutating operation (cf. [2]_) will be recorded in some special SQL +table along with its associated transaction. Transaction are +identified by a *txuuid* through which the functions of the +*DB-API* handle them. + +On the web side the last commited transaction *txuuid* is +remembered in the request's data to allow for imediate undoing +whereas the *undo-history view* relies upon the *DB-API* to list +the accessible transactions. The actual undoing is performed by +the *UndoController* accessible at URL of the form +`www.my.host/my/instance/undo?txuuid=...` + +The repository side +``````````````````` + +Please refer to the file `cubicweb/server/sources/native.py` and +`cubicweb/transaction.py` for the details. + +The undoing information is mainly stored in three SQL tables: + +`transactions` + Stores the txuuid, the user eid and the date-and-time of + the transaction. This table is referenced by the two others. + +`tx_entity_actions` + Stores the undo information for actions on entities. + +`tx_relation_actions` + Stores the undo information for the actions on relations. + +When the undo support is activated, entries are added to those +tables for each mutating operation on the data repository, and are +deleted on each transaction undoing. + +Those table are accessible through the following methods of the +repository `Connection` object : + +`undoable_transactions` + Returns a list of `Transaction` objects accessible to the user + and according to the specified filter(s) if any. + +`tx_info` + Returns a `Transaction` object from a `txuuid` + +`undo_transaction` + Returns the list of `Action` object for the given `txuuid`. + + NB: By default it only return *public* actions. + +The web side +```````````` + +The exposure of the *undo feature* to the end-user through the Web +interface relies on the *DB-API* introduced above. This implies +that the *transactions* and *actions* are not *entities* linked by +*relations* on which the usual views can be applied directly. + +That's why the file `cubicweb/web/views/undohistory.py` defines +some dedicated views to access the undo information : + +`UndoHistoryView` + This is a *StartupView*, the one accessible from the home + page of the instance which list all transactions. + +`UndoableTransactionView` + This view handles the display of a single `Transaction` object. + +`UndoableActionBaseView` + This (abstract) base class provides private methods to build + the display of actions whatever their nature. + +`Undoable[Add|Remove|Create|Delete|Update]ActionView` + Those views all inherit from `UndoableActionBaseView` and + each handles a specific kind of action. + +`UndoableActionPredicate` + This predicate is used as a *selector* to pick the appropriate + view for actions. + +Apart from this main *undo-history view* a `txuuid` is stored in +the request's data `last_undoable_transaction` in order to allow +immediate undoing of a hastily validated operation. This is +handled in `cubicweb/web/application.py` in the `main_publish` and +`add_undo_link_to_msg` methods for the storing and displaying +respectively. + +Once the undo information is accessible, typically through a +`txuuid` in an *undo* URL, the actual undo operation can be +performed by the `UndoController` defined in +`cubicweb/web/views/basecontrollers.py`. This controller basically +extracts the `txuuid` and performs a call to `undo_transaction` and +in case of an undo-specific error, lets the top level publisher +handle it as a validation error. + + +Conclusion +~~~~~~~~~~ + +The undo mechanism relies upon a low level recording of the +mutating operation on the repository. Those records are accessible +through some method added to the *DB-API* and exposed to the +end-user either through a whole history view of through an +immediate undoing link in the message box. + +The undo feature is functional but the interface and configuration +options are still quite reduced. One major improvement would be to +be able to filter with a finer grain which transactions or actions +one wants to see in the *undo-history view*. Another critical +improvement would be to enable the undo feature on a part only of +the entity-relationship schema to avoid storing too much useless +data and reduce the underlying overhead. + +But both functionality are related to the strong design choice not +to represent transactions and actions as entities and +relations. This has huge benefits in terms of safety and conceptual +simplicity but prevents from using lots of convenient CubicWeb +features such as *facets* to access undo information. + +Before developing further the undo feature or eventually revising +this design choice, it appears that some return of experience is +strongly needed. So don't hesitate to try the undo feature in your +application and send us some feedback. + + +Notes +~~~~~ + +.. [1] The end-user Web interface could be improved to enable + user to choose whether he wishes to see private actions. + +.. [2] There is only five kind of elementary actions (beyond + merely accessing data for reading): + + * **C** : creating an entity + * **D** : deleting an entity + * **U** : updating an entity attributes + * **A** : adding a relation + * **R** : removing a relation + +.. [3] Meaning none of the actions in the transaction is + undone. Depending upon the application, it might make sense + to enable *partial* undo. That is to say undo in which some + actions could not be undo without preventing to undo the + others actions in the transaction (as long as it does not + break schema integrity). This is not forbidden by the + back-end but is deliberately not supported by the front-end + (for now at least). diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/book/admin/additional-tips.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/book/admin/additional-tips.rst Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,64 @@ + +.. _Additional Tips: + +Backups (mostly with postgresql) +-------------------------------- + +It is always a good idea to backup. If your system does not do that, +you should set it up. Note that whenever you do an upgrade, +`cubicweb-ctl` offers you to backup your database. There are a number +of ways for doing backups. + +Using postgresql (and only that) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Before you +go ahead, make sure the following permissions are correct :: + + # chgrp postgres /var/lib/cubicweb/backup + # chmod g+ws /var/lib/cubicweb/backup + # chgrp postgres /etc/cubicweb.d/**/sources + # chmod g+r /etc/cubicweb.d/**/sources + +Simply use the pg_dump in a cron installed for `postgres` user on the database server:: + + # m h dom mon dow command + 0 2 * * * pg_dump -Fc --username=cubicweb --no-owner > /var/backups/-$(date '+%Y-%m-%d_%H:%M:%S').dump + +Using :command:`cubicweb-ctl db-dump` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The CubicWeb way is to use the :command:`db-dump` command. For that, +you have to put your passwords in a user-only-readable file at the +home directory of root user. The file is `.pgpass` (`chmod 0600`), in +this case for a socket run connection to PostgreSQL :: + + /var/run/postgresql:5432::: + +The postgres documentation for the `.pgpass` format can be found `here`_ + +Then add the following command to the crontab of the user (`crontab -e`):: + + # m h dom mon dow command + 0 2 * * * cubicweb-ctl db-dump + + +Backup ninja +~~~~~~~~~~~~ + +You can use a combination `backup-ninja`_ (which has a postgres script in the +example directory), `backuppc`)_ (for versionning). + +Please note that in the *CubicWeb way* it adds a second location for your +password which is error-prone. + +.. _`here` : http://www.postgresql.org/docs/current/static/libpq-pgpass.html +.. _`backup-ninja` : https://labs.riseup.net/code/projects/show/backupninja/ +.. _`backuppc` : http://backuppc.sourceforge.net/ + +.. warning:: + + Remember that these indications will fail you whenever you use + another database backend than postgres. Also it does properly handle + externally managed data such as files (using the Bytes File System + Storage). diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/book/admin/config.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/book/admin/config.rst Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,229 @@ +.. -*- coding: utf-8 -*- + +.. _ConfigEnv: + +Set-up of a *CubicWeb* environment +================================== + +You can `configure the database`_ system of your choice: + + - `PostgreSQL configuration`_ + - `MySql configuration`_ + - `SQLServer configuration`_ + - `SQLite configuration`_ + +For advanced features, have a look to: + + - `Cubicweb resources configuration`_ + +.. _`configure the database`: DatabaseInstallation_ +.. _`PostgreSQL configuration`: PostgresqlConfiguration_ +.. _`MySql configuration`: MySqlConfiguration_ +.. _`SQLServer configuration`: SQLServerConfiguration_ +.. _`SQLite configuration`: SQLiteConfiguration_ +.. _`Cubicweb resources configuration`: RessourcesConfiguration_ + + + +.. _RessourcesConfiguration: + +Cubicweb resources configuration +-------------------------------- + +.. autodocstring:: cubicweb.cwconfig + + +.. _DatabaseInstallation: + +Databases configuration +----------------------- + +Each instance can be configured with its own database connection information, +that will be stored in the instance's :file:`sources` file. The database to use +will be chosen when creating the instance. CubicWeb is known to run with +Postgresql (recommended), SQLServer and SQLite, and may run with MySQL. + +Other possible sources of data include CubicWeb, Subversion, LDAP and Mercurial, +but at least one relational database is required for CubicWeb to work. You do +not need to install a backend that you do not intend to use for one of your +instances. SQLite is not fit for production use, but it works well for testing +and ships with Python, which saves installation time when you want to get +started quickly. + +.. _PostgresqlConfiguration: + +PostgreSQL +~~~~~~~~~~ + +Many Linux distributions ship with the appropriate PostgreSQL packages. +Basically, you need to install the following packages: + +* `postgresql` and `postgresql-client`, which will pull the respective + versioned packages (e.g. `postgresql-9.1` and `postgresql-client-9.1`) and, + optionally, +* a `postgresql-plpython-X.Y` package with a version corresponding to that of + the aforementioned packages (e.g. `postgresql-plpython-9.1`). + +If you run postgres version prior to 8.3, you'll also need the +`postgresql-contrib-8.X` package for full-text search extension. + +If you run postgres on another host than the |cubicweb| repository, you should +install the `postgresql-client` package on the |cubicweb| host, and others on the +database host. + +For extra details concerning installation, please refer to the `PostgreSQL +project online documentation`_. + +.. _`PostgreSQL project online documentation`: http://www.postgresql.org/docs + + +Database cluster +++++++++++++++++ + +If you already have an existing cluster and PostgreSQL server running, you do +not need to execute the initilization step of your PostgreSQL database unless +you want a specific cluster for |cubicweb| databases or if your existing +cluster doesn't use the UTF8 encoding (see note below). + +To initialize a PostgreSQL cluster, use the command ``initdb``:: + + $ initdb -E UTF8 -D /path/to/pgsql + +Notice the encoding specification. This is necessary since |cubicweb| usually +want UTF8 encoded database. If you use a cluster with the wrong encoding, you'll +get error like:: + + new encoding (UTF8) is incompatible with the encoding of the template database (SQL_ASCII) + HINT: Use the same encoding as in the template database, or use template0 as template. + +Once initialized, start the database server PostgreSQL with the command:: + + $ postgres -D /path/to/psql + +If you cannot execute this command due to permission issues, please make sure +that your username has write access on the database. :: + + $ chown username /path/to/pgsql + +Database authentication ++++++++++++++++++++++++ + +The database authentication is configured in `pg_hba.conf`. It can be either set +to `ident sameuser` or `md5`. If set to `md5`, make sure to use an existing +user of your database. If set to `ident sameuser`, make sure that your client's +operating system user name has a matching user in the database. If not, please +do as follow to create a user:: + + $ su + $ su - postgres + $ createuser -s -P username + +The option `-P` (for password prompt), will encrypt the password with the +method set in the configuration file :file:`pg_hba.conf`. If you do not use this +option `-P`, then the default value will be null and you will need to set it +with:: + + $ su postgres -c "echo ALTER USER username WITH PASSWORD 'userpasswd' | psql" + +The above login/password will be requested when you will create an instance with +`cubicweb-ctl create` to initialize the database of your instance. + +Notice that the `cubicweb-ctl db-create` does database initialization that +may requires a postgres superuser. That's why a login/password is explicitly asked +at this step, so you can use there a superuser without using this user when running +the instance. Things that require special privileges at this step: + +* database creation, require the 'create database' permission +* install the plpython extension language (require superuser) +* install the tsearch extension for postgres version prior to 8.3 (require superuser) + +To avoid using a super user each time you create an install, a nice trick is to +install plpython (and tsearch when needed) on the special `template1` database, +so they will be installed automatically when cubicweb databases are created +without even with needs for special access rights. To do so, run :: + + # Installation of plpythonu language by default :: + $ createlang -U pgadmin plpythonu template1 + $ psql -U pgadmin template1 + template1=# update pg_language set lanpltrusted=TRUE where lanname='plpythonu'; + +Where `pgadmin` is a postgres superuser. The last command is necessary since by +default plpython is an 'untrusted' language and as such can't be used by non +superuser. This update fix that problem by making it trusted. + +To install the tsearch plain-text index extension on postgres prior to 8.3, run:: + + cat /usr/share/postgresql/8.X/contrib/tsearch2.sql | psql -U username template1 + + +.. _MySqlConfiguration: + +MySql +~~~~~ +.. warning:: + CubicWeb's MySQL support is not commonly used, so things may or may not work properly. + +You must add the following lines in ``/etc/mysql/my.cnf`` file:: + + transaction-isolation=READ-COMMITTED + default-storage-engine=INNODB + default-character-set=utf8 + max_allowed_packet = 128M + +.. Note:: + It is unclear whether mysql supports indexed string of arbitrary length or + not. + + +.. _SQLServerConfiguration: + +SQLServer +~~~~~~~~~ + +As of this writing, support for SQLServer 2005 is functional but incomplete. You +should be able to connect, create a database and go quite far, but some of the +SQL generated from RQL queries is still currently not accepted by the +backend. Porting to SQLServer 2008 is also an item on the backlog. + +The `source` configuration file may look like this (specific parts only are +shown):: + + [system] + db-driver=sqlserver2005 + db-user=someuser + # database password not needed + #db-password=toto123 + #db-create/init may ask for a pwd: just say anything + db-extra-arguments=Trusted_Connection + db-encoding=utf8 + + +You need to change the default settings on the database by running:: + + ALTER DATABASE SET READ_COMMITTED_SNAPSHOT ON; + +The ALTER DATABASE command above requires some permissions that your +user may not have. In that case you will have to ask your local DBA to +run the query for you. + +You can check that the setting is correct by running the following +query which must return '1':: + + SELECT is_read_committed_snapshot_on + FROM sys.databases WHERE name=''; + + + +.. _SQLiteConfiguration: + +SQLite +~~~~~~ + +SQLite has the great advantage of requiring almost no configuration. Simply +use 'sqlite' as db-driver, and set path to the dabase as db-name. Don't specify +anything for db-user and db-password, they will be ignore anyway. + +.. Note:: + SQLite is great for testing and to play with cubicweb but is not suited for + production environments. + diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/book/admin/create-instance.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/book/admin/create-instance.rst Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,100 @@ +.. -*- coding: utf-8 -*- + +Creation of your first instance +=============================== + +Instance creation +----------------- + +Now that we created a cube, we can create an instance and access it via a web +browser. We will use a `all-in-one` configuration to simplify things :: + + cubicweb-ctl create -c all-in-one mycube myinstance + +.. note:: + Please note that we created a new cube for a demo purposes but + you could have used an existing cube available in our standard library + such as blog or person for example. + +A series of questions will be prompted to you, the default answer is usually +sufficient. You can anyway modify the configuration later on by editing +configuration files. When a login/password are requested to access the database +please use the credentials you created at the time you configured the database +(:ref:`PostgresqlConfiguration`). + +It is important to distinguish here the user used to access the database and the +user used to login to the cubicweb instance. When an instance starts, it uses +the login/password for the database to get the schema and handle low level +transaction. But, when :command:`cubicweb-ctl create` asks for a manager +login/psswd of *CubicWeb*, it refers to the user you will use during the +development to administrate your web instance. It will be possible, later on, +to use this user to create other users for your final web instance. + + +Instance administration +----------------------- + +start / stop +~~~~~~~~~~~~ + +When this command is completed, the definition of your instance is +located in :file:`~/etc/cubicweb.d/myinstance/*`. To launch it, you +just type :: + + cubicweb-ctl start -D myinstance + +The option `-D` specifies the *debug mode* : the instance is not +running in server mode and does not disconnect from the terminal, +which simplifies debugging in case the instance is not properly +launched. You can see how it looks by visiting the URL +`http://localhost:8080` (the port number depends of your +configuration). To login, please use the cubicweb administrator +login/password you defined when you created the instance. + +To shutdown the instance, Crtl-C in the terminal window is enough. +If you did not use the option `-D`, then type :: + + cubicweb-ctl stop myinstance + +This is it! All is settled down to start developping your data model... + +.. note:: + + The output of `cubicweb-ctl start -D myinstance` can be + overwhelming. It is possible to reduce the log level with the + `--loglevel` parameter as in `cubicweb-ctl start -D myinstance -l + info` to filter out all logs under `info` gravity. + +upgrade +~~~~~~~ + +A manual upgrade step is necessary whenever a new version of CubicWeb or +a cube is installed, in order to synchronise the instance's +configuration and schema with the new code. The command is:: + + cubicweb-ctl upgrade myinstance + +A series of questions will be asked. It always starts with a proposal +to make a backup of your sources (where it applies). Unless you know +exactly what you are doing (i.e. typically fiddling in debug mode, but +definitely NOT migrating a production instance), you should answer YES +to that. + +The remaining questions concern the migration steps of |cubicweb|, +then of the cubes that form the whole application, in reverse +dependency order. + +In principle, if the migration scripts have been properly written and +tested, you should answer YES to all questions. + +Somtimes, typically while debugging a migration script, something goes +wrong and the migration fails. Unfortunately the databse may be in an +incoherent state. You have two options here: + +* fix the bug, restore the database and restart the migration process + from scratch (quite recommended in a production environement) + +* try to replay the migration up to the last successful commit, that + is answering NO to all questions up to the step that failed, and + finish by answering YES to the remaining questions. + diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/book/admin/cubicweb-ctl.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/book/admin/cubicweb-ctl.rst Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,111 @@ +.. -*- coding: utf-8 -*- + +.. _cubicweb-ctl: + +``cubicweb-ctl`` tool +===================== + +`cubicweb-ctl` is the swiss knife to manage *CubicWeb* instances. +The general syntax is :: + + cubicweb-ctl [options command] + +To view available commands :: + + cubicweb-ctl + cubicweb-ctl --help + +Please note that the commands available depends on the *CubicWeb* packages +and cubes that have been installed. + +To view the help menu on specific command :: + + cubicweb-ctl --help + +Listing available cubes and instance +------------------------------------- + +* ``list``, provides a list of the available configuration, cubes + and instances. + + +Creation of a new cube +----------------------- + +Create your new cube cube :: + + cubicweb-ctl newcube + +This will create a new cube in +``/path/to/grshell-cubicweb/cubes/`` for a Mercurial +installation, or in ``/usr/share/cubicweb/cubes`` for a debian +packages installation. + +Create an instance +------------------- + +You must ensure `~/etc/cubicweb.d/` exists prior to this. On windows, the +'~' part will probably expand to 'Documents and Settings/user'. + +To create an instance from an existing cube, execute the following +command :: + + cubicweb-ctl create + +This command will create the configuration files of an instance in +``~/etc/cubicweb.d/``. + +The tool ``cubicweb-ctl`` executes the command ``db-create`` and +``db-init`` when you run ``create`` so that you can complete an +instance creation in a single command. But of course it is possible +to issue these separate commands separately, at a later stage. + +Command to create/initialize an instance database +------------------------------------------------- + +* ``db-create``, creates the system database of an instance (tables and + extensions only) +* ``db-init``, initializes the system database of an instance + (schema, groups, users, workflows...) + +Commands to control instances +----------------------------- + +* ``start``, starts one or more or all instances + +of special interest:: + + start -D + +will start in debug mode (under windows, starting without -D will not +work; you need instead to setup your instance as a service). + +* ``stop``, stops one or more or all instances +* ``restart``, restarts one or more or all instances +* ``status``, returns the status of the instance(s) + +Commands to maintain instances +------------------------------ + +* ``upgrade``, launches the existing instances migration when a new version + of *CubicWeb* or the cubes installed is available +* ``shell``, opens a (Python based) migration shell for manual maintenance of the instance +* ``db-dump``, creates a dump of the system database +* ``db-restore``, restores a dump of the system database +* ``db-check``, checks data integrity of an instance. If the automatic correction + is activated, it is recommanded to create a dump before this operation. +* ``schema-sync``, synchronizes the persistent schema of an instance with + the instance schema. It is recommanded to create a dump before this operation. + +Commands to maintain i18n catalogs +---------------------------------- +* ``i18ncubicweb``, regenerates messages catalogs of the *CubicWeb* library +* ``i18ncube``, regenerates the messages catalogs of a cube +* ``i18ninstance``, recompiles the messages catalogs of an instance. + This is automatically done while upgrading. + +See also chapter :ref:`internationalization`. + +Other commands +-------------- +* ``delete``, deletes an instance (configuration files and database) diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/book/admin/index.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/book/admin/index.rst Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,28 @@ +.. -*- coding: utf-8 -*- + +.. _Part3: + +-------------- +Administration +-------------- + +This part is for installation and administration of the *CubicWeb* framework and +instances based on that framework. + +.. toctree:: + :maxdepth: 1 + :numbered: + + setup + setup-windows + config + cubicweb-ctl + create-instance + instance-config + site-config + multisources + ldap + migration + additional-tips + rql-logs + diff -r 2fe19ba68daa -r 2fdf67ef3341 doc/book/admin/instance-config.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/book/admin/instance-config.rst Wed Dec 09 18:24:09 2015 +0100 @@ -0,0 +1,200 @@ +.. -*- coding: utf-8 -*- + + +Configure an instance +===================== + +While creating an instance, a configuration file is generated in:: + + $ (CW_INSTANCES_DIR) / / .conf + +For example:: + + /etc/cubicweb.d/myblog/all-in-one.conf + +It is a simple text file in the INI format +(http://en.wikipedia.org/wiki/INI_file). In the following description, +each option name is prefixed with its own section and followed by its +default value if necessary, e.g. "`
        .