# HG changeset patch # User Rémi Cardona # Date 1429690094 -7200 # Node ID 729f36a1bcfa49ee4343d1e83027f619f8d6dd2c # Parent ffb269e603484994cf79843734379cdfdee91c43# Parent e52efb73f9ee5746cf85f498891633be464e267a merge 3.20.6 into 3.21 diff -r e52efb73f9ee -r 729f36a1bcfa __pkginfo__.py --- a/__pkginfo__.py Thu Apr 02 13:54:00 2015 +0200 +++ b/__pkginfo__.py Wed Apr 22 10:08:14 2015 +0200 @@ -115,8 +115,6 @@ [join('share', 'cubicweb', 'cubes', 'shared', 'data'), [join(_data_dir, fname) for fname in listdir(_data_dir) if not isdir(join(_data_dir, fname))]], - [join('share', 'cubicweb', 'cubes', 'shared', 'data', 'timeline'), - [join(_data_dir, 'timeline', fname) for fname in listdir(join(_data_dir, 'timeline'))]], [join('share', 'cubicweb', 'cubes', 'shared', 'data', 'images'), [join(_data_dir, 'images', fname) for fname in listdir(join(_data_dir, 'images'))]], [join('share', 'cubicweb', 'cubes', 'shared', 'data', 'jquery-treeview'), diff -r e52efb73f9ee -r 729f36a1bcfa cwconfig.py --- a/cwconfig.py Thu Apr 02 13:54:00 2015 +0200 +++ b/cwconfig.py Wed Apr 22 10:08:14 2015 +0200 @@ -279,7 +279,7 @@ ('default-text-format', {'type' : 'choice', 'choices': ('text/plain', 'text/rest', 'text/html', 'text/markdown'), - 'default': 'text/html', # use fckeditor in the web ui + 'default': 'text/plain', 'help': _('default text format for rich text fields.'), 'group': 'ui', }), diff -r e52efb73f9ee -r 729f36a1bcfa cwctl.py --- a/cwctl.py Thu Apr 02 13:54:00 2015 +0200 +++ b/cwctl.py Wed Apr 22 10:08:14 2015 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -401,7 +401,7 @@ if 'type' in odict and odict.get('level') <= self.config.config_level) for section in sections: - if section not in ('main', 'email', 'pyro', 'web'): + if section not in ('main', 'email', 'web'): print '\n' + underline_title('%s options' % section) config.input_config(section, self.config.config_level) # write down configuration @@ -900,9 +900,7 @@ ('repo-uri', {'short': 'H', 'type' : 'string', 'metavar': '://<[host][:port]>', 'help': 'URI of the CubicWeb repository to connect to. URI can be \ -pyro://[host:port] the Pyro name server host; if the pyro nameserver is not set, \ -it will be detected by using a broadcast query, a ZMQ URL or \ -inmemory:// (default) use an in-memory repository. THIS OPTION IS DEPRECATED, \ +a ZMQ URL or inmemory:// (default) use an in-memory repository. THIS OPTION IS DEPRECATED, \ directly give URI as instance id instead', 'group': 'remote' }), @@ -953,7 +951,7 @@ if self.config.repo_uri: warn('[3.16] --repo-uri option is deprecated, directly give the URI as instance id', DeprecationWarning) - if urlparse(self.config.repo_uri).scheme in ('pyro', 'inmemory'): + if urlparse(self.config.repo_uri).scheme == 'inmemory': appuri = '%s/%s' % (self.config.repo_uri.rstrip('/'), appuri) from cubicweb.utils import parse_repo_uri diff -r e52efb73f9ee -r 729f36a1bcfa dbapi.py --- a/dbapi.py Thu Apr 02 13:54:00 2015 +0200 +++ b/dbapi.py Wed Apr 22 10:08:14 2015 +0200 @@ -118,17 +118,8 @@ * a simple instance id for in-memory connection - * a uri like scheme://host:port/instanceid where scheme may be one of - 'pyro', 'inmemory' or 'zmqpickle' - - * if scheme is 'pyro', determine the name server address. If - not specified (e.g. 'pyro:///instanceid'), it will be detected through a - broadcast query. The instance id is the name of the instance in the name - server and may be prefixed by a group (e.g. - 'pyro:///:cubicweb.instanceid') - - * if scheme is handled by ZMQ (eg 'tcp'), you should not specify an - instance id + * a uri like scheme://host:port/instanceid where scheme must be + 'inmemory' Other arguments: @@ -137,8 +128,7 @@ :cnxprops: a :class:`ConnectionProperties` instance, allowing to specify - the connection method (eg in memory or pyro). A Pyro connection will be - established if you don't specify that argument. + the connection method (eg in memory). :setvreg: flag telling if a registry should be initialized for the connection. @@ -157,44 +147,18 @@ :kwargs: there goes authentication tokens. You usually have to specify a password for the given user, using a named 'password' argument. + """ if not urlparse(database).scheme: warn('[3.16] give an qualified URI as database instead of using ' 'host/cnxprops to specify the connection method', DeprecationWarning, stacklevel=2) - if cnxprops and cnxprops.cnxtype == 'zmq': - database = kwargs.pop('host') - elif cnxprops and cnxprops.cnxtype == 'inmemory': - database = 'inmemory://' + database - else: - host = kwargs.pop('host', None) - if host is None: - host = '' - group = kwargs.pop('group', None) - if group is None: - group = 'cubicweb' - database = 'pyro://%s/%s.%s' % (host, group, database) puri = urlparse(database) method = puri.scheme.lower() - if method == 'inmemory': - config = cwconfig.instance_configuration(puri.netloc) - else: - config = cwconfig.CubicWebNoAppConfiguration() + assert method == 'inmemory' + config = cwconfig.instance_configuration(puri.netloc) repo = get_repository(database, config=config) - if method == 'inmemory': - vreg = repo.vreg - elif setvreg: - if mulcnx: - multiple_connections_fix() - vreg = cwvreg.CWRegistryStore(config, initlog=initlog) - schema = repo.get_schema() - for oldetype, newetype in ETYPE_NAME_MAP.items(): - if oldetype in schema: - print 'aliasing', newetype, 'to', oldetype - schema._entities[newetype] = schema._entities[oldetype] - vreg.set_schema(schema) - else: - vreg = None + vreg = repo.vreg cnx = _repo_connect(repo, login, cnxprops=cnxprops, **kwargs) cnx.vreg = vreg return cnx @@ -735,10 +699,6 @@ @check_not_closed def cursor(self, req=None): """Return a new Cursor Object using the connection. - - On pyro connection, you should get cursor after calling if - load_appobjects method if desired (which you should call if you intend - to use ORM abilities). """ if req is None: req = self.request() diff -r e52efb73f9ee -r 729f36a1bcfa debian/control --- a/debian/control Thu Apr 02 13:54:00 2015 +0200 +++ b/debian/control Wed Apr 22 10:08:14 2015 +0200 @@ -58,7 +58,6 @@ | python-pysqlite2, python-passlib Recommends: - pyro (<< 4.0.0), cubicweb-documentation (= ${source:Version}) Suggests: python-zmq @@ -109,7 +108,6 @@ cubicweb-ctl (= ${source:Version}), python-twisted-web Recommends: - pyro (<< 4.0.0), cubicweb-documentation (= ${source:Version}) Description: twisted-based web interface for the CubicWeb framework CubicWeb is a semantic web application framework. diff -r e52efb73f9ee -r 729f36a1bcfa debian/cubicweb-ctl.cubicweb.init --- a/debian/cubicweb-ctl.cubicweb.init Thu Apr 02 13:54:00 2015 +0200 +++ b/debian/cubicweb-ctl.cubicweb.init Wed Apr 22 10:08:14 2015 +0200 @@ -4,16 +4,14 @@ # Provides: cubicweb # Required-Start: $remote_fs $syslog $local_fs $network # Required-Stop: $remote_fs $syslog $local_fs $network -# Should-Start: postgresql pyro-nsd -# Should-Stop: postgresql pyro-nsd +# Should-Start: postgresql +# Should-Stop: postgresql # Default-Start: 2 3 4 5 # Default-Stop: 0 1 6 # Short-Description: Start cubicweb application at boot time ### END INIT INFO # FIXME Seems to be inadequate here -# FIXME If related to pyro, try instead: -# export PYRO_STORAGE="/tmp" cd /tmp # FIXME Work-around about the following lintian error diff -r e52efb73f9ee -r 729f36a1bcfa devtools/__init__.py --- a/devtools/__init__.py Thu Apr 02 13:54:00 2015 +0200 +++ b/devtools/__init__.py Wed Apr 22 10:08:14 2015 +0200 @@ -237,10 +237,6 @@ def available_languages(self, *args): return self.cw_languages() - def pyro_enabled(self): - # but export PYRO_MULTITHREAD=0 or you get problems with sqlite and - # threads - return True # XXX merge with BaseApptestConfiguration ? class ApptestConfiguration(BaseApptestConfiguration): @@ -251,7 +247,7 @@ skip_db_create_and_restore = False def __init__(self, appid, apphome=None, - log_threshold=logging.CRITICAL, sourcefile=None): + log_threshold=logging.WARNING, sourcefile=None): BaseApptestConfiguration.__init__(self, appid, apphome, log_threshold=log_threshold) self.init_repository = sourcefile is None diff -r e52efb73f9ee -r 729f36a1bcfa devtools/devctl.py --- a/devtools/devctl.py Thu Apr 02 13:54:00 2015 +0200 +++ b/devtools/devctl.py Wed Apr 22 10:08:14 2015 +0200 @@ -580,8 +580,8 @@ # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # -# You should have received a copy of the GNU Lesser General Public License along -# with this program. If not, see . +# You should have received a copy of the GNU Lesser General Public License +# along with this program. If not, see . ''', 'GPL': '''\ @@ -592,7 +592,8 @@ # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more +# details. # # You should have received a copy of the GNU General Public License along with # this program. If not, see . diff -r e52efb73f9ee -r 729f36a1bcfa devtools/httptest.py --- a/devtools/httptest.py Thu Apr 02 13:54:00 2015 +0200 +++ b/devtools/httptest.py Wed Apr 22 10:08:14 2015 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -78,8 +78,6 @@ self.global_set_option('port', port) # force rewrite here return 'http://127.0.0.1:%d/' % self['port'] - def pyro_enabled(self): - return False class CubicWebServerTC(CubicWebTC): diff -r e52efb73f9ee -r 729f36a1bcfa devtools/test/data/cubes/i18ntestcube/views.py --- a/devtools/test/data/cubes/i18ntestcube/views.py Thu Apr 02 13:54:00 2015 +0200 +++ b/devtools/test/data/cubes/i18ntestcube/views.py Wed Apr 22 10:08:14 2015 +0200 @@ -26,9 +26,6 @@ _myafs = MyAFS() -# XXX useless ASA logilab.common.registry is fixed -_myafs.__module__ = "cubes.i18ntestcube.views" - _myafs.tag_object_of(('*', 'in_forum', 'Forum'), 'main', 'inlined') afs.tag_object_of(('*', 'in_forum', 'Forum'), 'main', 'inlined') diff -r e52efb73f9ee -r 729f36a1bcfa devtools/testlib.py --- a/devtools/testlib.py Thu Apr 02 13:54:00 2015 +0200 +++ b/devtools/testlib.py Wed Apr 22 10:08:14 2015 +0200 @@ -315,6 +315,7 @@ """provide a new RepoAccess object for a given user The access is automatically closed at the end of the test.""" + login = unicode(login) access = RepoAccess(self.repo, login, self.requestcls) self._open_access.add(access) return access @@ -646,9 +647,11 @@ login = req assert not isinstance(self, type) req = self._admin_clt_cnx + if login is not None: + login = unicode(login) if password is None: - password = login.encode('utf8') - user = req.create_entity('CWUser', login=unicode(login), + password = login + user = req.create_entity('CWUser', login=login, upassword=password, **kwargs) req.execute('SET X in_group G WHERE X eid %%(x)s, G name IN(%s)' % ','.join(repr(str(g)) for g in groups), diff -r e52efb73f9ee -r 729f36a1bcfa doc/3.21.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/doc/3.21.rst Wed Apr 22 10:08:14 2015 +0200 @@ -0,0 +1,25 @@ +What's new in CubicWeb 3.21? +============================ + +Code movement +------------- + +The cubicweb.web.views.timeline module (providing the timeline-json, timeline +and static-timeline views) has moved to a standalone cube_ + +.. _cube: https://www.cubicweb.org/project/cubicweb-timeline + +Deprecated code drops +--------------------- + +* the user_callback api has been removed; people should use plain + ajax functions instead + +* the `Pyro` and `Zmq-pickle` remote repository access methods have + been entirely removed (emerging alternatives such as rqlcontroller + and cwclientlib should be used instead). Note that as a side effect, + "repository-only" instances (i.e. without a http component) are no + longer possible. If you have any such instances, you will need to + rename the configuration file from repository.conf to all-in-one.conf + and run ``cubicweb-ctl upgrade`` to update it. + diff -r e52efb73f9ee -r 729f36a1bcfa doc/book/en/admin/config.rst --- a/doc/book/en/admin/config.rst Thu Apr 02 13:54:00 2015 +0200 +++ b/doc/book/en/admin/config.rst Wed Apr 22 10:08:14 2015 +0200 @@ -14,7 +14,6 @@ For advanced features, have a look to: - - `Pyro configuration`_ - `Cubicweb resources configuration`_ .. _`configure the database`: DatabaseInstallation_ @@ -22,7 +21,6 @@ .. _`MySql configuration`: MySqlConfiguration_ .. _`SQLServer configuration`: SQLServerConfiguration_ .. _`SQLite configuration`: SQLiteConfiguration_ -.. _`Pyro configuration`: PyroConfiguration_ .. _`Cubicweb resources configuration`: RessourcesConfiguration_ @@ -43,7 +41,7 @@ Each instance can be configured with its own database connection information, that will be stored in the instance's :file:`sources` file. The database to use will be chosen when creating the instance. CubicWeb is known to run with -Postgresql (recommended), MySQL, SQLServer and SQLite. +Postgresql (recommended), SQLServer and SQLite, and may run with MySQL. Other possible sources of data include CubicWeb, Subversion, LDAP and Mercurial, but at least one relational database is required for CubicWeb to work. You do @@ -162,6 +160,8 @@ MySql ~~~~~ +.. warning:: + CubicWeb's MySQL support is not commonly used, so things may or may not work properly. You must add the following lines in ``/etc/mysql/my.cnf`` file:: @@ -227,29 +227,3 @@ SQLite is great for testing and to play with cubicweb but is not suited for production environments. - -.. _PyroConfiguration: - -Pyro configuration ------------------- - -Pyro name server -~~~~~~~~~~~~~~~~ - -If you want to use Pyro to access your instance remotely, or to have multi-source -or distributed configuration, it is required to have a Pyro name server running -on your network. By default it is detected by a broadcast request, but you can -specify a location in the instance's configuration file. - -To do so, you need to : - -* be sure to have installed it (see :ref:`InstallDependencies`) - -* launch the pyro name server with `pyro-nsd start` before starting cubicweb - -* under debian, edit the file :file:`/etc/default/pyro-nsd` so that the name - server pyro will be launched automatically when the machine fire up - -Note that you can use the pyro server without a running pyro nameserver. -Refer to `pyro-ns-host` server configuration option for details. - diff -r e52efb73f9ee -r 729f36a1bcfa doc/book/en/admin/index.rst --- a/doc/book/en/admin/index.rst Thu Apr 02 13:54:00 2015 +0200 +++ b/doc/book/en/admin/index.rst Wed Apr 22 10:08:14 2015 +0200 @@ -22,7 +22,6 @@ site-config multisources ldap - pyro migration additional-tips rql-logs diff -r e52efb73f9ee -r 729f36a1bcfa doc/book/en/admin/instance-config.rst --- a/doc/book/en/admin/instance-config.rst Thu Apr 02 13:54:00 2015 +0200 +++ b/doc/book/en/admin/instance-config.rst Wed Apr 22 10:08:14 2015 +0200 @@ -110,32 +110,6 @@ file where all requests RQL executed by the server are written -Pyro configuration for the instance ------------------------------------ -Web server side: - -:`pyro.pyro-instance-id`: - pyro identifier of RQL server (e.g. the instance name) - -RQL server side: - -:`main.pyro-server`: - boolean to switch on/off pyro server-side - -:`pyro.pyro-host`: - pyro host:port number. If no port is specified, it is assigned - automatically. - -RQL and web servers side: - -:`pyro.pyro-ns-host`: - hostname hosting pyro server name. If no value is - specified, it is located by a request from broadcast - -:`pyro.pyro-ns-group`: - pyro group in which to save the instance (will default to 'cubicweb') - - Configuring e-mail ------------------ RQL and web server side: diff -r e52efb73f9ee -r 729f36a1bcfa doc/book/en/admin/migration.rst --- a/doc/book/en/admin/migration.rst Thu Apr 02 13:54:00 2015 +0200 +++ b/doc/book/en/admin/migration.rst Wed Apr 22 10:08:14 2015 +0200 @@ -8,7 +8,7 @@ **Aim** : do the migration for N cubicweb instances hosted on a server to another with no downtime. -**Prerequisites** : have an explicit definition of the database host (not default or localhost). In our case, the database is hosted on another host. You are not migrating your pyro server. You are not using multisource (more documentation on that soon). +**Prerequisites** : have an explicit definition of the database host (not default or localhost). In our case, the database is hosted on another host. **Steps** : @@ -21,26 +21,18 @@ scp /etc/cubicweb.d/ newmachine:/etc/cubicweb.d/ scp /etc/apache2/sites-available/ newmachine:/etc/apache2/sites-available/ -3. *on new machine* : give new ids to pyro registration so the new instances can register :: - - cd /etc/cubicweb.d/ ; sed -i.bck 's/^pyro-instance-id=.*$/\02/' */all-in-one.conf - -4. *on new machine* : start your instances :: +3. *on new machine* : start your instances :: cubicweb start -5. *on new machine* : enable sites and modules for apache and start it, test it using by modifying your /etc/host file. +4. *on new machine* : enable sites and modules for apache and start it, test it using by modifying your /etc/host file. -6. change dns entry from your oldmachine to newmachine +5. change dns entry from your oldmachine to newmachine -7. shutdown your *old machine* (if it doesn't host other services or your database) +6. shutdown your *old machine* (if it doesn't host other services or your database) -8. That's it. +7. That's it. **Possible enhancements** : use right from the start a pound server behind your apache, that way you can add backends and smoothily migrate by shuting down backends that pound will take into account. -Migrate apache & cubicweb with pyro ------------------------------------ -FIXME TODO - diff -r e52efb73f9ee -r 729f36a1bcfa doc/book/en/admin/pyro.rst --- a/doc/book/en/admin/pyro.rst Thu Apr 02 13:54:00 2015 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,62 +0,0 @@ -.. _UsingPyro: - -Working with a distributed client (using Pyro) -============================================== - -In some circumstances, it is practical to split the repository and -web-client parts of the application for load-balancing reasons. Or -one wants to access the repository from independant scripts to consult -or update the database. - -Prerequisites -------------- - -For this to work, several steps have to be taken in order. - -You must first ensure that the appropriate software is installed and -running (see :ref:`ConfigEnv`):: - - pyro-nsd -x -p 6969 - -Then you have to set appropriate options in your configuration. For -instance:: - - pyro-server=yes - pyro-ns-host=localhost:6969 - - pyro-instance-id=myinstancename - -Connect to the CubicWeb repository from a python script -------------------------------------------------------- - -Assuming pyro-nsd is running and your instance is configured with ``pyro-server=yes``, -you will be able to use :mod:`cubicweb.dbapi` api to initiate the connection. - -.. note:: - Regardless of whether your instance is pyro activated or not, you can still - achieve this by using cubicweb-ctl shell scripts in a simpler way, as by default - it creates a repository 'in-memory' instead of connecting through pyro. That - also means you've to be on the host where the instance is running. - -Finally, the client (for instance a python script) must connect specifically -as in the following example code: - -.. sourcecode:: python - - from cubicweb import dbapi - - cnx = dbapi.connect(database='instance-id', user='admin', password='admin') - cnx.load_appobjects() - cur = cnx.cursor() - for name in (u'Personal', u'Professional', u'Computers'): - cur.execute('INSERT Tag T: T name %(n)s', {'n': name}) - cnx.commit() - -Calling :meth:`cubicweb.dbapi.load_appobjects`, will populate the -cubicweb registries (see :ref:`VRegistryIntro`) with the application -objects installed on the host where the script runs. You'll then be -allowed to use the ORM goodies and custom entity methods and views. Of -course this is optional, without it you can still get the repository -data through the connection but in a roughly way: only RQL cursors -will be available, e.g. you can't even build entity objects from the -result set. diff -r e52efb73f9ee -r 729f36a1bcfa doc/book/en/annexes/faq.rst --- a/doc/book/en/annexes/faq.rst Thu Apr 02 13:54:00 2015 +0200 +++ b/doc/book/en/annexes/faq.rst Wed Apr 22 10:08:14 2015 +0200 @@ -83,11 +83,9 @@ 2. it offers an abstraction layer allowing your applications to run on multiple back-ends. That means not only various SQL backends - (postgresql, sqlite, mysql), but also multiple databases at the - same time, and also non-SQL data stores like LDAP directories and - subversion/mercurial repositories (see the `vcsfile` - component). Google App Engine is yet another supported target for - RQL. + (postgresql, sqlite, sqlserver, mysql), but also non-SQL data stores like + LDAP directories and subversion/mercurial repositories (see the `vcsfile` + component). Which ajax library is CubicWeb using ? -------------------------------------- diff -r e52efb73f9ee -r 729f36a1bcfa doc/book/en/devrepo/datamodel/definition.rst --- a/doc/book/en/devrepo/datamodel/definition.rst Thu Apr 02 13:54:00 2015 +0200 +++ b/doc/book/en/devrepo/datamodel/definition.rst Wed Apr 22 10:08:14 2015 +0200 @@ -163,7 +163,7 @@ Common properties for attributes and relations: -* `description`: an unicode string describing an attribute or a +* `description`: a unicode string describing an attribute or a relation. By default this string will be used in the editing form of the entity, which means that it is supposed to help the end-user and should be flagged by the function `_` to be properly diff -r e52efb73f9ee -r 729f36a1bcfa doc/book/en/devrepo/entityclasses/data-as-objects.rst --- a/doc/book/en/devrepo/entityclasses/data-as-objects.rst Thu Apr 02 13:54:00 2015 +0200 +++ b/doc/book/en/devrepo/entityclasses/data-as-objects.rst Wed Apr 22 10:08:14 2015 +0200 @@ -17,7 +17,7 @@ `Formatting and output generation`: * :meth:`view(__vid, __registry='views', **kwargs)`, applies the given view to the entity - (and returns an unicode string) + (and returns a unicode string) * :meth:`absolute_url(*args, **kwargs)`, returns an absolute URL including the base-url diff -r e52efb73f9ee -r 729f36a1bcfa doc/book/en/devweb/edition/dissection.rst --- a/doc/book/en/devweb/edition/dissection.rst Thu Apr 02 13:54:00 2015 +0200 +++ b/doc/book/en/devweb/edition/dissection.rst Wed Apr 22 10:08:14 2015 +0200 @@ -320,7 +320,7 @@ * on success: - * an url (string) representing the next thing to jump to + * a url (string) representing the next thing to jump to Given the array structure described above, it is quite simple to manipulate the DOM to show the errors at appropriate places. diff -r e52efb73f9ee -r 729f36a1bcfa doc/book/en/devweb/request.rst --- a/doc/book/en/devweb/request.rst Thu Apr 02 13:54:00 2015 +0200 +++ b/doc/book/en/devweb/request.rst Wed Apr 22 10:08:14 2015 +0200 @@ -89,7 +89,7 @@ html headers * `add_js(jsfiles)`: adds the given list of javascript resources to the current html headers - * `add_onload(jscode)`: inject the given jscode fragment (an unicode + * `add_onload(jscode)`: inject the given jscode fragment (a unicode string) into the current html headers, wrapped inside a document.ready(...) or another ajax-friendly one-time trigger event * `add_header(header, values)`: adds the header/value pair to the diff -r e52efb73f9ee -r 729f36a1bcfa doc/book/en/devweb/views/basetemplates.rst --- a/doc/book/en/devweb/views/basetemplates.rst Thu Apr 02 13:54:00 2015 +0200 +++ b/doc/book/en/devweb/views/basetemplates.rst Wed Apr 22 10:08:14 2015 +0200 @@ -116,7 +116,7 @@ * `binary`: boolean flag telling if the view generates some text or a binary stream. Default to False. When view generates text argument given to `self.w` - **must be an unicode string**, encoded string otherwise. + **must be a unicode string**, encoded string otherwise. * `content_type`, view's content type, default to 'text/xhtml' @@ -132,7 +132,7 @@ You can also modify certain aspects of the main template of a page -when building an url or setting these parameters in the req.form: +when building a url or setting these parameters in the req.form: * `__notemplate`, if present (whatever the value assigned), only the content view is returned diff -r e52efb73f9ee -r 729f36a1bcfa doc/book/en/devweb/views/reledit.rst --- a/doc/book/en/devweb/views/reledit.rst Thu Apr 02 13:54:00 2015 +0200 +++ b/doc/book/en/devweb/views/reledit.rst Wed Apr 22 10:08:14 2015 +0200 @@ -105,9 +105,9 @@ ensure edition of the `Person` entity instead (using a standard automatic form) of the association of Company and Person. -Finally, the `reload` key accepts either a boolean, an eid or an -unicode string representing an url. If an eid is provided, it will be -internally transformed into an url. The eid/url case helps when one +Finally, the `reload` key accepts either a boolean, an eid or a +unicode string representing a url. If an eid is provided, it will be +internally transformed into a url. The eid/url case helps when one needs to reload and the current url is inappropriate. A common case is edition of a key attribute, which is part of the current url. If one user changed the Company's name from `lozilab` to `logilab`, reloading diff -r e52efb73f9ee -r 729f36a1bcfa doc/book/en/tutorials/base/customizing-the-application.rst --- a/doc/book/en/tutorials/base/customizing-the-application.rst Thu Apr 02 13:54:00 2015 +0200 +++ b/doc/book/en/tutorials/base/customizing-the-application.rst Wed Apr 22 10:08:14 2015 +0200 @@ -268,7 +268,7 @@ but this is not mandatory. * When we want to write something to the output stream, we simply call `self.w`, - with *must be passed an unicode string*. + which *must be passed a unicode string*. * The latest function is the most exotic stuff. The point is that without it, you would get an error at display time because the framework wouldn't be able to diff -r e52efb73f9ee -r 729f36a1bcfa doc/features_list.rst --- a/doc/features_list.rst Thu Apr 02 13:54:00 2015 +0200 +++ b/doc/features_list.rst Wed Apr 22 10:08:14 2015 +0200 @@ -45,7 +45,6 @@ | configuration - user / groups handling | 3 | 1 | | configuration - site configuration | 3 | 1 | | configuration - distributed configuration | 2 | 1 | -| configuration - pyro | 2 | 2 | +--------------------------------------------------------------------+----+----+ | multi-sources - capabilities | NA | 0 | | multi-sources - configuration | 2 | 0 | diff -r e52efb73f9ee -r 729f36a1bcfa doc/tools/pyjsrest.py --- a/doc/tools/pyjsrest.py Thu Apr 02 13:54:00 2015 +0200 +++ b/doc/tools/pyjsrest.py Wed Apr 22 10:08:14 2015 +0200 @@ -134,7 +134,6 @@ 'cubicweb.preferences', 'cubicweb.edition', 'cubicweb.reledit', - 'cubicweb.timeline-ext', ] FILES_TO_IGNORE = set([ @@ -152,7 +151,6 @@ 'cubicweb.fckcwconfig-full.js', 'cubicweb.goa.js', 'cubicweb.compat.js', - 'cubicweb.timeline-bundle.js', ]) if __name__ == '__main__': diff -r e52efb73f9ee -r 729f36a1bcfa entities/__init__.py --- a/entities/__init__.py Thu Apr 02 13:54:00 2015 +0200 +++ b/entities/__init__.py Wed Apr 22 10:08:14 2015 +0200 @@ -53,7 +53,7 @@ """ restrictions = ['X is %s' % cls.__regid__] selected = ['X'] - for attrschema in cls.e_schema.indexable_attributes(): + for attrschema in sorted(cls.e_schema.indexable_attributes()): varname = attrschema.type.upper() restrictions.append('X %s %s' % (attrschema, varname)) selected.append(varname) diff -r e52efb73f9ee -r 729f36a1bcfa entities/adapters.py --- a/entities/adapters.py Thu Apr 02 13:54:00 2015 +0200 +++ b/entities/adapters.py Wed Apr 22 10:08:14 2015 +0200 @@ -79,6 +79,8 @@ itree = self.entity.cw_adapt_to('ITree') if itree is not None: return itree.path()[:-1] + if view.msgid_timestamp: + return (self.entity.eid,) return () diff -r e52efb73f9ee -r 729f36a1bcfa entities/test/unittest_base.py --- a/entities/test/unittest_base.py Thu Apr 02 13:54:00 2015 +0200 +++ b/entities/test/unittest_base.py Wed Apr 22 10:08:14 2015 +0200 @@ -66,8 +66,8 @@ def test_fti_rql_method(self): with self.admin_access.web_request() as req: eclass = self.vreg['etypes'].etype_class('EmailAddress') - self.assertEqual(['Any X, ALIAS, ADDRESS WHERE X is EmailAddress, ' - 'X alias ALIAS, X address ADDRESS'], + self.assertEqual(['Any X, ADDRESS, ALIAS WHERE X is EmailAddress, ' + 'X address ADDRESS, X alias ALIAS'], eclass.cw_fti_index_rql_queries(req)) diff -r e52efb73f9ee -r 729f36a1bcfa etwist/server.py --- a/etwist/server.py Thu Apr 02 13:54:00 2015 +0200 +++ b/etwist/server.py Wed Apr 22 10:08:14 2015 +0200 @@ -65,14 +65,6 @@ # when we have an in-memory repository, clean unused sessions every XX # seconds and properly shutdown the server if config['repository-uri'] == 'inmemory://': - if config.pyro_enabled(): - # if pyro is enabled, we have to register to the pyro name - # server, create a pyro daemon, and create a task to handle pyro - # requests - self.appli.repo.warning('remote repository access through pyro is deprecated') - self.pyro_daemon = self.appli.repo.pyro_register() - self.pyro_listen_timeout = 0.02 - self.appli.repo.looping_task(1, self.pyro_loop_event) if config.mode != 'test': reactor.addSystemEventTrigger('before', 'shutdown', self.shutdown_event) @@ -93,13 +85,6 @@ """ self.appli.repo.shutdown() - def pyro_loop_event(self): - """listen for pyro events""" - try: - self.pyro_daemon.handleRequests(self.pyro_listen_timeout) - except select.error: - return - def getChild(self, path, request): """Indicate which resource to use to process down the URL's path""" return self diff -r e52efb73f9ee -r 729f36a1bcfa etwist/twconfig.py --- a/etwist/twconfig.py Thu Apr 02 13:54:00 2015 +0200 +++ b/etwist/twconfig.py Wed Apr 22 10:08:14 2015 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -17,9 +17,6 @@ # with CubicWeb. If not, see . """twisted server configurations: -* the "twisted" configuration to get a web instance running in a standalone - twisted web server which talk to a repository server using Pyro - * the "all-in-one" configuration to get a web instance running in a twisted web server integrating a repository server in the same process (only available if the repository part of the software is installed @@ -82,13 +79,6 @@ the repository rather than the user running the command', 'group': 'main', 'level': WebConfiguration.mode == 'system' }), - ('pyro-server', - {'type' : 'yn', - # pyro is only a recommends by default, so don't activate it here - 'default': False, - 'help': 'run a pyro server', - 'group': 'main', 'level': 1, - }), ('webserver-threadpool-size', {'type': 'int', 'default': 4, @@ -117,9 +107,6 @@ cubicweb_appobject_path = WebConfigurationBase.cubicweb_appobject_path | ServerConfiguration.cubicweb_appobject_path cube_appobject_path = WebConfigurationBase.cube_appobject_path | ServerConfiguration.cube_appobject_path - def pyro_enabled(self): - """tell if pyro is activated for the in memory repository""" - return self['pyro-server'] CONFIGURATIONS.append(AllInOneConfiguration) diff -r e52efb73f9ee -r 729f36a1bcfa etwist/twctl.py --- a/etwist/twctl.py Thu Apr 02 13:54:00 2015 +0200 +++ b/etwist/twctl.py Wed Apr 22 10:08:14 2015 +0200 @@ -71,11 +71,14 @@ cfgname = 'all-in-one' subcommand = 'cubicweb-twisted' - class AllInOneStopHandler(serverctl.RepositoryStopHandler): + class AllInOneStopHandler(CommandHandler): cmdname = 'stop' cfgname = 'all-in-one' subcommand = 'cubicweb-twisted' + def poststop(self): + pass + class AllInOneUpgradeHandler(TWUpgradeHandler): cfgname = 'all-in-one' diff -r e52efb73f9ee -r 729f36a1bcfa ext/rest.py --- a/ext/rest.py Thu Apr 02 13:54:00 2015 +0200 +++ b/ext/rest.py Wed Apr 22 10:08:14 2015 +0200 @@ -34,7 +34,6 @@ """ __docformat__ = "restructuredtext en" -from cStringIO import StringIO from itertools import chain from logging import getLogger from os.path import join @@ -405,7 +404,7 @@ # remove unprintable characters unauthorized in xml data = data.translate(ESC_CAR_TABLE) settings = {'input_encoding': encoding, 'output_encoding': 'unicode', - 'warning_stream': StringIO(), + 'warning_stream': False, 'traceback': True, # don't sys.exit 'stylesheet': None, # don't try to embed stylesheet (may cause # obscure bug due to docutils computing diff -r e52efb73f9ee -r 729f36a1bcfa hooks/metadata.py --- a/hooks/metadata.py Thu Apr 02 13:54:00 2015 +0200 +++ b/hooks/metadata.py Wed Apr 22 10:08:14 2015 +0200 @@ -199,17 +199,15 @@ oldsource = self._cw.entity_from_eid(schange[self.eidfrom]) entity = self._cw.entity_from_eid(self.eidfrom) # we don't want the moved entity to be reimported later. To - # distinguish this state, the trick is to change the associated - # record in the 'entities' system table with eid=-eid while leaving - # other fields unchanged, and to add a new record with eid=eid, - # source='system'. External source will then have consider case - # where `extid2eid` return a negative eid as 'this entity was known - # but has been moved, ignore it'. - self._cw.system_sql('UPDATE entities SET eid=-eid WHERE eid=%(eid)s', - {'eid': self.eidfrom}) + # distinguish this state, move the record from the 'entities' table + # to 'moved_entities'. External source will then have consider + # case where `extid2eid` returns a negative eid as 'this entity was + # known but has been moved, ignore it'. + attrs = {'eid': entity.eid, 'extid': self._cw.entity_metas(entity.eid)['extid']} + self._cw.system_sql(syssource.sqlgen.insert('moved_entities', attrs), attrs) attrs = {'type': entity.cw_etype, 'eid': entity.eid, 'extid': None, 'asource': 'system'} - self._cw.system_sql(syssource.sqlgen.insert('entities', attrs), attrs) + self._cw.system_sql(syssource.sqlgen.update('entities', attrs, ['eid']), attrs) # register an operation to update repository/sources caches ChangeEntitySourceUpdateCaches(self._cw, entity=entity, oldsource=oldsource.repo_source, diff -r e52efb73f9ee -r 729f36a1bcfa hooks/syncschema.py --- a/hooks/syncschema.py Thu Apr 02 13:54:00 2015 +0200 +++ b/hooks/syncschema.py Wed Apr 22 10:08:14 2015 +0200 @@ -29,7 +29,7 @@ from copy import copy from yams.schema import (BASE_TYPES, BadSchemaDefinition, RelationSchema, RelationDefinitionSchema) -from yams import buildobjs as ybo, schema2sql as y2sql, convert_default_value +from yams import buildobjs as ybo, convert_default_value from logilab.common.decorators import clear_cache @@ -37,7 +37,7 @@ from cubicweb.predicates import is_instance from cubicweb.schema import (SCHEMA_TYPES, META_RTYPES, VIRTUAL_RTYPES, CONSTRAINTS, ETYPE_NAME_MAP, display_name) -from cubicweb.server import hook, schemaserial as ss +from cubicweb.server import hook, schemaserial as ss, schema2sql as y2sql from cubicweb.server.sqlutils import SQL_PREFIX from cubicweb.hooks.synccomputed import RecomputeAttributeOperation @@ -72,7 +72,7 @@ table = SQL_PREFIX + etype column = SQL_PREFIX + rtype try: - cnx.system_sql(str('ALTER TABLE %s ADD %s integer' % (table, column)), + cnx.system_sql(str('ALTER TABLE %s ADD %s integer REFERENCES entities (eid)' % (table, column)), rollback_on_failure=False) cnx.info('added column %s to table %s', column, table) except Exception: @@ -319,8 +319,12 @@ if 'fulltext_container' in self.values: op = UpdateFTIndexOp.get_instance(cnx) for subjtype, objtype in rschema.rdefs: - op.add_data(subjtype) - op.add_data(objtype) + if self.values['fulltext_container'] == 'subject': + op.add_data(subjtype) + op.add_data(objtype) + else: + op.add_data(objtype) + op.add_data(subjtype) # update the in-memory schema first self.oldvalues = dict( (attr, getattr(rschema, attr)) for attr in self.values) self.rschema.__dict__.update(self.values) @@ -1313,6 +1317,7 @@ We wait after the commit to as the schema in memory is only updated after the commit. """ + containercls = list def postcommit_event(self): cnx = self.cnx diff -r e52efb73f9ee -r 729f36a1bcfa hooks/zmq.py --- a/hooks/zmq.py Thu Apr 02 13:54:00 2015 +0200 +++ b/hooks/zmq.py Wed Apr 22 10:08:14 2015 +0200 @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# copyright 2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -50,30 +50,3 @@ self.repo.app_instances_bus.start() -class ZMQRepositoryServerStopHook(hook.Hook): - __regid__ = 'zmqrepositoryserverstop' - events = ('server_shutdown',) - - def __call__(self): - server = getattr(self.repo, 'zmq_repo_server', None) - if server: - self.repo.zmq_repo_server.quit() - -class ZMQRepositoryServerStartHook(hook.Hook): - __regid__ = 'zmqrepositoryserverstart' - events = ('server_startup',) - - def __call__(self): - config = self.repo.config - if config.name == 'repository': - # start-repository command already starts a zmq repo - return - address = config.get('zmq-repository-address') - if not address: - return - self.repo.warning('remote access to the repository via zmq/pickle is deprecated') - from cubicweb.server import cwzmq - self.repo.zmq_repo_server = server = cwzmq.ZMQRepositoryServer(self.repo) - server.connect(address) - self.repo.threaded_task(server.run) - diff -r e52efb73f9ee -r 729f36a1bcfa misc/migration/3.21.0_Any.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/misc/migration/3.21.0_Any.py Wed Apr 22 10:08:14 2015 +0200 @@ -0,0 +1,19 @@ + +helper = repo.system_source.dbhelper +sql('DROP INDEX entities_extid_idx') +sql(helper.sql_create_index('entities', 'extid', True)) + +sql(''' +CREATE TABLE moved_entities ( + eid INTEGER PRIMARY KEY NOT NULL, + extid VARCHAR(256) UNIQUE +) +''') + +moved_entities = sql('SELECT -eid, extid FROM entities WHERE eid < 0') +cu = session.cnxset.cu +cu.executemany('INSERT INTO moved_entities (eid, extid) VALUES (%s, %s)', + moved_entities) +sql('DELETE FROM entities WHERE eid < 0') + +commit() diff -r e52efb73f9ee -r 729f36a1bcfa misc/migration/bootstrapmigration_repository.py --- a/misc/migration/bootstrapmigration_repository.py Thu Apr 02 13:54:00 2015 +0200 +++ b/misc/migration/bootstrapmigration_repository.py Wed Apr 22 10:08:14 2015 +0200 @@ -57,7 +57,7 @@ commit() if applcubicwebversion <= (3, 14, 4) and cubicwebversion >= (3, 14, 4): - from yams import schema2sql as y2sql + from cubicweb.server import schema2sql as y2sql dbhelper = repo.system_source.dbhelper rdefdef = schema['CWSource'].rdef('name') attrtype = y2sql.type_from_constraints(dbhelper, rdefdef.object, rdefdef.constraints).split()[0] diff -r e52efb73f9ee -r 729f36a1bcfa repoapi.py --- a/repoapi.py Thu Apr 02 13:54:00 2015 +0200 +++ b/repoapi.py Wed Apr 22 10:08:14 2015 +0200 @@ -1,4 +1,4 @@ -# copyright 2013-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2013-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -41,7 +41,7 @@ loading the repository for a client, eg web server, configuration). The returned repository may be an in-memory repository or a proxy object - using a specific RPC method, depending on the given URI (pyro or zmq). + using a specific RPC method, depending on the given URI. """ if uri is None: return _get_inmemory_repo(config, vreg) @@ -52,30 +52,7 @@ # me may have been called with a dummy 'inmemory://' uri ... return _get_inmemory_repo(config, vreg) - if protocol == 'pyroloc': # direct connection to the instance - from logilab.common.pyro_ext import get_proxy - uri = uri.replace('pyroloc', 'PYRO') - return get_proxy(uri) - - if protocol == 'pyro': # connection mediated through the pyro ns - from logilab.common.pyro_ext import ns_get_proxy - path = appid.strip('/') - if not path: - raise ConnectionError( - "can't find instance name in %s (expected to be the path component)" - % uri) - if '.' in path: - nsgroup, nsid = path.rsplit('.', 1) - else: - nsgroup = 'cubicweb' - nsid = path - return ns_get_proxy(nsid, defaultnsgroup=nsgroup, nshost=hostport) - - if protocol.startswith('zmqpickle-'): - from cubicweb.zmqclient import ZMQRepositoryClient - return ZMQRepositoryClient(uri) - else: - raise ConnectionError('unknown protocol: `%s`' % protocol) + raise ConnectionError('unknown protocol: `%s`' % protocol) def connect(repo, login, **kwargs): """Take credential and return associated ClientConnection. diff -r e52efb73f9ee -r 729f36a1bcfa req.py --- a/req.py Thu Apr 02 13:54:00 2015 +0200 +++ b/req.py Wed Apr 22 10:08:14 2015 +0200 @@ -357,7 +357,7 @@ for key, val in sorted(newparams.iteritems()): query[key] = (self.url_quote(val),) query = '&'.join(u'%s=%s' % (param, value) - for param, values in query.items() + for param, values in sorted(query.items()) for value in values) return urlunsplit((schema, netloc, path, query, fragment)) diff -r e52efb73f9ee -r 729f36a1bcfa rqlrewrite.py --- a/rqlrewrite.py Thu Apr 02 13:54:00 2015 +0200 +++ b/rqlrewrite.py Wed Apr 22 10:08:14 2015 +0200 @@ -89,7 +89,7 @@ mytyperel.r_type = 'is' if len(possibletypes) > 1: node = n.Function('IN') - for etype in possibletypes: + for etype in sorted(possibletypes): node.append(n.Constant(etype, 'etype')) else: etype = iter(possibletypes).next() diff -r e52efb73f9ee -r 729f36a1bcfa server/cwzmq.py --- a/server/cwzmq.py Thu Apr 02 13:54:00 2015 +0200 +++ b/server/cwzmq.py Wed Apr 22 10:08:14 2015 +0200 @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# copyright 2012-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2012-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -17,8 +17,6 @@ # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -import cPickle -import traceback from threading import Thread from logging import getLogger @@ -27,16 +25,10 @@ import zmq.eventloop.zmqstream from cubicweb import set_log_methods -from cubicweb.server.server import QuitEvent, Finished + ctx = zmq.Context() -def cwproto_to_zmqaddr(address): - """ converts a cw-zmq address (like zmqpickle-tcp://:) - into a proper zmq address (tcp://:) - """ - assert address.startswith('zmqpickle-'), 'bad protocol string %s' % address - return address.split('-', 1)[1] # chop the `zmqpickle-` prefix class ZMQComm(object): """ @@ -134,132 +126,5 @@ self.ioloop.add_callback(lambda: self.stream.setsockopt(zmq.SUBSCRIBE, topic)) -class ZMQRepositoryServer(object): - - def __init__(self, repository): - """make the repository available as a PyRO object""" - self.address = None - self.repo = repository - self.socket = None - self.stream = None - self.loop = ioloop.IOLoop() - - # event queue - self.events = [] - - def connect(self, address): - self.address = cwproto_to_zmqaddr(address) - - def run(self): - """enter the service loop""" - # start repository looping tasks - self.socket = ctx.socket(zmq.REP) - self.stream = zmq.eventloop.zmqstream.ZMQStream(self.socket, io_loop=self.loop) - self.stream.bind(self.address) - self.info('ZMQ server bound on: %s', self.address) - - self.stream.on_recv(self.process_cmds) - - try: - self.loop.start() - except zmq.ZMQError: - self.warning('ZMQ event loop killed') - self.quit() - - def trigger_events(self): - """trigger ready events""" - for event in self.events[:]: - if event.is_ready(): - self.info('starting event %s', event) - event.fire(self) - try: - event.update() - except Finished: - self.events.remove(event) - - def process_cmd(self, cmd): - """Delegate the given command to the repository. - - ``cmd`` is a list of (method_name, args, kwargs) - where ``args`` is a list of positional arguments - and ``kwargs`` is a dictionnary of named arguments. - - >>> rset = delegate_to_repo(["execute", [sessionid], {'rql': rql}]) - - :note1: ``kwargs`` may be ommited - - >>> rset = delegate_to_repo(["execute", [sessionid, rql]]) - - :note2: both ``args`` and ``kwargs`` may be omitted - - >>> schema = delegate_to_repo(["get_schema"]) - >>> schema = delegate_to_repo("get_schema") # also allowed - - """ - cmd = cPickle.loads(cmd) - if not cmd: - raise AttributeError('function name required') - if isinstance(cmd, basestring): - cmd = [cmd] - if len(cmd) < 2: - cmd.append(()) - if len(cmd) < 3: - cmd.append({}) - cmd = list(cmd) + [(), {}] - funcname, args, kwargs = cmd[:3] - result = getattr(self.repo, funcname)(*args, **kwargs) - return result - - def process_cmds(self, cmds): - """Callback intended to be used with ``on_recv``. - - Call ``delegate_to_repo`` on each command and send a pickled of - each result recursively. - - Any exception are catched, pickled and sent. - """ - try: - for cmd in cmds: - result = self.process_cmd(cmd) - self.send_data(result) - except Exception as exc: - traceback.print_exc() - self.send_data(exc) - - def send_data(self, data): - self.socket.send_pyobj(data) - - def quit(self, shutdown_repo=False): - """stop the server""" - self.info('Quitting ZMQ server') - try: - self.loop.add_callback(self.loop.stop) - self.stream.on_recv(None) - self.stream.close() - except Exception as e: - print e - pass - if shutdown_repo and not self.repo.shutting_down: - event = QuitEvent() - event.fire(self) - - # server utilitities ###################################################### - - def install_sig_handlers(self): - """install signal handlers""" - import signal - self.info('installing signal handlers') - signal.signal(signal.SIGINT, lambda x, y, s=self: s.quit(shutdown_repo=True)) - signal.signal(signal.SIGTERM, lambda x, y, s=self: s.quit(shutdown_repo=True)) - - - # these are overridden by set_log_methods below - # only defining here to prevent pylint from complaining - @classmethod - def info(cls, msg, *a, **kw): - pass - - set_log_methods(Publisher, getLogger('cubicweb.zmq.pub')) set_log_methods(Subscriber, getLogger('cubicweb.zmq.sub')) -set_log_methods(ZMQRepositoryServer, getLogger('cubicweb.zmq.repo')) diff -r e52efb73f9ee -r 729f36a1bcfa server/migractions.py --- a/server/migractions.py Thu Apr 02 13:54:00 2015 +0200 +++ b/server/migractions.py Wed Apr 22 10:08:14 2015 +0200 @@ -44,7 +44,6 @@ from logilab.common.decorators import cached, clear_cache from yams.constraints import SizeConstraint -from yams.schema2sql import eschema2sql, rschema2sql, unique_index_name from yams.schema import RelationDefinitionSchema from cubicweb import CW_SOFTWARE_ROOT, AuthenticationError, ExecutionError @@ -56,6 +55,7 @@ from cubicweb import repoapi from cubicweb.migration import MigrationHelper, yes from cubicweb.server import hook, schemaserial as ss +from cubicweb.server.schema2sql import eschema2sql, rschema2sql, unique_index_name from cubicweb.server.utils import manager_userpasswd from cubicweb.server.sqlutils import sqlexec, SQL_PREFIX diff -r e52efb73f9ee -r 729f36a1bcfa server/querier.py --- a/server/querier.py Thu Apr 02 13:54:00 2015 +0200 +++ b/server/querier.py Wed Apr 22 10:08:14 2015 +0200 @@ -570,7 +570,7 @@ except UnknownEid: # we want queries such as "Any X WHERE X eid 9999" return an # empty result instead of raising UnknownEid - return empty_rset(rql, args, rqlst) + return empty_rset(rql, args) if args and rql not in self._rql_ck_cache: self._rql_ck_cache[rql] = eidkeys if eidkeys: diff -r e52efb73f9ee -r 729f36a1bcfa server/repository.py --- a/server/repository.py Thu Apr 02 13:54:00 2015 +0200 +++ b/server/repository.py Wed Apr 22 10:08:14 2015 +0200 @@ -24,7 +24,6 @@ * brings these classes all together to provide a single access point to a cubicweb instance. * handles session management -* provides method for pyro registration, to call if pyro is enabled """ __docformat__ = "restructuredtext en" @@ -151,8 +150,6 @@ class Repository(object): """a repository provides access to a set of persistent storages for entities and relations - - XXX protect pyro access """ def __init__(self, config, tasks_manager=None, vreg=None): @@ -162,17 +159,11 @@ self.vreg = vreg self._tasks_manager = tasks_manager - self.pyro_registered = False - self.pyro_uri = None - # every pyro client is handled in its own thread; map these threads to - # the session we opened for them so we can clean up when they go away - self._pyro_sessions = {} self.app_instances_bus = NullEventBus() self.info('starting repository from %s', self.config.apphome) # dictionary of opened sessions self._sessions = {} - # list of functions to be called at regular interval # list of running threads self._running_threads = [] @@ -435,10 +426,6 @@ except Exception: self.exception('error while closing %s' % cnxset) continue - if self.pyro_registered: - if self._use_pyrons(): - pyro_unregister(self.config) - self.pyro_uri = None hits, misses = self.querier.cache_hit, self.querier.cache_miss try: self.info('rql st cache hit/miss: %s/%s (%s%% hits)', hits, misses, @@ -662,12 +649,6 @@ # try to get a user object user = self.authenticate_user(cnx, login, **kwargs) session = Session(user, self, cnxprops) - if threading.currentThread() in self._pyro_sessions: - # assume no pyro client does one get_repository followed by - # multiple repo.connect - assert self._pyro_sessions[threading.currentThread()] == None - self.debug('record session %s', session) - self._pyro_sessions[threading.currentThread()] = session user._cw = user.cw_rset.req = session user.cw_clear_relation_cache() self._sessions[session.sessionid] = session @@ -697,10 +678,6 @@ try: rset = self.querier.execute(session, rqlstring, args, build_descr) - # NOTE: the web front will (re)build it when needed - # e.g in facets - # Zeroed to avoid useless overhead with pyro - rset._rqlst = None return rset except (ValidationError, Unauthorized, RQLSyntaxError): raise @@ -810,8 +787,6 @@ # done during `session_close` hooks cnx.commit() session.close() - if threading.currentThread() in self._pyro_sessions: - self._pyro_sessions[threading.currentThread()] = None del self._sessions[sessionid] self.info('closed session %s for user %s', sessionid, session.user.login) @@ -1074,50 +1049,9 @@ hook.CleanupNewEidsCacheOp.get_instance(session).add_data(entity.eid) self.system_source.add_info(session, entity, source, extid) - def delete_info(self, session, entity, sourceuri): - """called by external source when some entity known by the system source - has been deleted in the external source - """ - # mark eid as being deleted in session info and setup cache update - # operation - hook.CleanupDeletedEidsCacheOp.get_instance(session).add_data(entity.eid) - self._delete_info(session, entity, sourceuri) - - def _delete_info(self, session, entity, sourceuri): - """delete system information on deletion of an entity: - - * delete all remaining relations from/to this entity - * call delete info on the system source - """ - pendingrtypes = session.transaction_data.get('pendingrtypes', ()) - # delete remaining relations: if user can delete the entity, he can - # delete all its relations without security checking - with session.security_enabled(read=False, write=False): - eid = entity.eid - for rschema, _, role in entity.e_schema.relation_definitions(): - if rschema.rule: - continue # computed relation - rtype = rschema.type - if rtype in schema.VIRTUAL_RTYPES or rtype in pendingrtypes: - continue - if role == 'subject': - # don't skip inlined relation so they are regularly - # deleted and so hooks are correctly called - rql = 'DELETE X %s Y WHERE X eid %%(x)s' % rtype - else: - rql = 'DELETE Y %s X WHERE X eid %%(x)s' % rtype - try: - session.execute(rql, {'x': eid}, build_descr=False) - except Exception: - if self.config.mode == 'test': - raise - self.exception('error while cascading delete for entity %s ' - 'from %s. RQL: %s', entity, sourceuri, rql) - self.system_source.delete_info_multi(session, [entity]) - - def _delete_info_multi(self, session, entities): - """same as _delete_info but accepts a list of entities with - the same etype and belinging to the same source. + def _delete_cascade_multi(self, session, entities): + """same as _delete_cascade but accepts a list of entities with + the same etype and belonging to the same source. """ pendingrtypes = session.transaction_data.get('pendingrtypes', ()) # delete remaining relations: if user can delete the entity, he can @@ -1150,7 +1084,6 @@ raise self.exception('error while cascading delete for entity %s. RQL: %s', entities, rql) - self.system_source.delete_info_multi(session, entities) def init_entity_caches(self, cnx, entity, source): """add entity to connection entities cache and repo's extid cache. @@ -1188,13 +1121,13 @@ edited.set_defaults() if cnx.is_hook_category_activated('integrity'): edited.check(creation=True) + self.add_info(cnx, entity, source, extid) try: source.add_entity(cnx, entity) except UniqueTogetherError as exc: userhdlr = cnx.vreg['adapters'].select( 'IUserFriendlyError', cnx, entity=entity, exc=exc) userhdlr.raise_user_exception() - self.add_info(cnx, entity, source, extid) edited.saved = entity._cw_is_saved = True # trigger after_add_entity after after_add_relation self.hm.call_hooks('after_add_entity', cnx, entity=entity) @@ -1309,8 +1242,9 @@ if server.DEBUG & server.DBG_REPO: print 'DELETE entities', etype, [entity.eid for entity in entities] self.hm.call_hooks('before_delete_entity', cnx, entities=entities) - self._delete_info_multi(cnx, entities) + self._delete_cascade_multi(cnx, entities) source.delete_entities(cnx, entities) + source.delete_info_multi(cnx, entities) self.hm.call_hooks('after_delete_entity', cnx, entities=entities) # don't clear cache here, it is done in a hook on commit @@ -1392,79 +1326,12 @@ eidfrom=subject, rtype=rtype, eidto=object) - # pyro handling ########################################################### - - @property - @cached - def pyro_appid(self): - from logilab.common import pyro_ext as pyro - config = self.config - appid = '%s.%s' % pyro.ns_group_and_id( - config['pyro-instance-id'] or config.appid, - config['pyro-ns-group']) - # ensure config['pyro-instance-id'] is a full qualified pyro name - config['pyro-instance-id'] = appid - return appid - - def _use_pyrons(self): - """return True if the pyro-ns-host is set to something else - than NO_PYRONS, meaning we want to go through a pyro - nameserver""" - return self.config['pyro-ns-host'] != 'NO_PYRONS' - - def pyro_register(self, host=''): - """register the repository as a pyro object""" - from logilab.common import pyro_ext as pyro - daemon = pyro.register_object(self, self.pyro_appid, - daemonhost=self.config['pyro-host'], - nshost=self.config['pyro-ns-host'], - use_pyrons=self._use_pyrons()) - self.info('repository registered as a pyro object %s', self.pyro_appid) - self.pyro_uri = pyro.get_object_uri(self.pyro_appid) - self.info('pyro uri is: %s', self.pyro_uri) - self.pyro_registered = True - # register a looping task to regularly ensure we're still registered - # into the pyro name server - if self._use_pyrons(): - self.looping_task(60*10, self._ensure_pyro_ns) - pyro_sessions = self._pyro_sessions - # install hacky function to free cnxset - def handleConnection(conn, tcpserver, sessions=pyro_sessions): - sessions[threading.currentThread()] = None - return tcpserver.getAdapter().__class__.handleConnection(tcpserver.getAdapter(), conn, tcpserver) - daemon.getAdapter().handleConnection = handleConnection - def removeConnection(conn, sessions=pyro_sessions): - daemon.__class__.removeConnection(daemon, conn) - session = sessions.pop(threading.currentThread(), None) - if session is None: - # client was not yet connected to the repo - return - if not session.closed: - self.close(session.sessionid) - daemon.removeConnection = removeConnection - return daemon - - def _ensure_pyro_ns(self): - if not self._use_pyrons(): - return - from logilab.common import pyro_ext as pyro - pyro.ns_reregister(self.pyro_appid, nshost=self.config['pyro-ns-host']) - self.info('repository re-registered as a pyro object %s', - self.pyro_appid) # these are overridden by set_log_methods below # only defining here to prevent pylint from complaining info = warning = error = critical = exception = debug = lambda msg, *a, **kw: None - -def pyro_unregister(config): - """unregister the repository from the pyro name server""" - from logilab.common.pyro_ext import ns_unregister - appid = config['pyro-instance-id'] or config.appid - ns_unregister(appid, config['pyro-ns-group'], config['pyro-ns-host']) - - from logging import getLogger from cubicweb import set_log_methods set_log_methods(Repository, getLogger('cubicweb.repository')) diff -r e52efb73f9ee -r 729f36a1bcfa server/schema2sql.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/server/schema2sql.py Wed Apr 22 10:08:14 2015 +0200 @@ -0,0 +1,245 @@ +# copyright 2004-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of yams. +# +# yams is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# yams is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with yams. If not, see . +"""write a schema as sql""" + +__docformat__ = "restructuredtext en" + +from hashlib import md5 + +from six.moves import range + +from yams.constraints import SizeConstraint, UniqueConstraint + +# default are usually not handled at the sql level. If you want them, set +# SET_DEFAULT to True +SET_DEFAULT = False + + +def schema2sql(dbhelper, schema, skip_entities=(), skip_relations=(), prefix=''): + """write to the output stream a SQL schema to store the objects + corresponding to the given schema + """ + output = [] + w = output.append + for etype in sorted(schema.entities()): + eschema = schema.eschema(etype) + if eschema.final or eschema.type in skip_entities: + continue + w(eschema2sql(dbhelper, eschema, skip_relations, prefix=prefix)) + for rtype in sorted(schema.relations()): + rschema = schema.rschema(rtype) + if rschema.final or rschema.inlined or rschema.rule: + continue + w(rschema2sql(rschema)) + return '\n'.join(output) + + +def dropschema2sql(dbhelper, schema, skip_entities=(), skip_relations=(), prefix=''): + """write to the output stream a SQL schema to store the objects + corresponding to the given schema + """ + output = [] + w = output.append + for etype in sorted(schema.entities()): + eschema = schema.eschema(etype) + if eschema.final or eschema.type in skip_entities: + continue + stmts = dropeschema2sql(dbhelper, eschema, skip_relations, prefix=prefix) + for stmt in stmts: + w(stmt) + for rtype in sorted(schema.relations()): + rschema = schema.rschema(rtype) + if rschema.final or rschema.inlined: + continue + w(droprschema2sql(rschema)) + return '\n'.join(output) + + +def eschema_attrs(eschema, skip_relations): + attrs = [attrdef for attrdef in eschema.attribute_definitions() + if not attrdef[0].type in skip_relations] + attrs += [(rschema, None) + for rschema in eschema.subject_relations() + if not rschema.final and rschema.inlined] + return attrs + +def unique_index_name(eschema, columns): + return u'unique_%s' % md5((eschema.type + + ',' + + ','.join(sorted(columns))).encode('ascii')).hexdigest() + +def iter_unique_index_names(eschema): + for columns in eschema._unique_together or (): + yield columns, unique_index_name(eschema, columns) + +def dropeschema2sql(dbhelper, eschema, skip_relations=(), prefix=''): + """return sql to drop an entity type's table""" + # not necessary to drop indexes, that's implictly done when + # dropping the table, but we need to drop SQLServer views used to + # create multicol unique indices + statements = [] + tablename = prefix + eschema.type + if eschema._unique_together is not None: + for columns, index_name in iter_unique_index_names(eschema): + cols = ['%s%s' % (prefix, col) for col in columns] + sqls = dbhelper.sqls_drop_multicol_unique_index(tablename, cols, index_name) + statements += sqls + statements += ['DROP TABLE %s;' % (tablename)] + return statements + + +def eschema2sql(dbhelper, eschema, skip_relations=(), prefix=''): + """write an entity schema as SQL statements to stdout""" + output = [] + w = output.append + table = prefix + eschema.type + w('CREATE TABLE %s(' % (table)) + attrs = eschema_attrs(eschema, skip_relations) + # XXX handle objectinline physical mode + for i in range(len(attrs)): + rschema, attrschema = attrs[i] + if attrschema is not None: + sqltype = aschema2sql(dbhelper, eschema, rschema, attrschema, + indent=' ') + else: # inline relation + sqltype = 'integer REFERENCES entities (eid)' + if i == len(attrs) - 1: + w(' %s%s %s' % (prefix, rschema.type, sqltype)) + else: + w(' %s%s %s,' % (prefix, rschema.type, sqltype)) + w(');') + # create indexes + for i in range(len(attrs)): + rschema, attrschema = attrs[i] + if attrschema is None or eschema.rdef(rschema).indexed: + w(dbhelper.sql_create_index(table, prefix + rschema.type)) + for columns, index_name in iter_unique_index_names(eschema): + cols = ['%s%s' % (prefix, col) for col in columns] + sqls = dbhelper.sqls_create_multicol_unique_index(table, cols, index_name) + for sql in sqls: + w(sql) + w('') + return '\n'.join(output) + + +def aschema2sql(dbhelper, eschema, rschema, aschema, creating=True, indent=''): + """write an attribute schema as SQL statements to stdout""" + attr = rschema.type + rdef = rschema.rdef(eschema.type, aschema.type) + sqltype = type_from_constraints(dbhelper, aschema.type, rdef.constraints, + creating) + if SET_DEFAULT: + default = eschema.default(attr) + if default is not None: + if aschema.type == 'Boolean': + sqltype += ' DEFAULT %s' % dbhelper.boolean_value(default) + elif aschema.type == 'String': + sqltype += ' DEFAULT %r' % str(default) + elif aschema.type in ('Int', 'BigInt', 'Float'): + sqltype += ' DEFAULT %s' % default + # XXX ignore default for other type + # this is expected for NOW / TODAY + if creating: + if rdef.uid: + sqltype += ' PRIMARY KEY REFERENCES entities (eid)' + elif rdef.cardinality[0] == '1': + # don't set NOT NULL if backend isn't able to change it later + if dbhelper.alter_column_support: + sqltype += ' NOT NULL' + # else we're getting sql type to alter a column, we don't want key / indexes + # / null modifiers + return sqltype + + +def type_from_constraints(dbhelper, etype, constraints, creating=True): + """return a sql type string corresponding to the constraints""" + constraints = list(constraints) + unique, sqltype = False, None + size_constrained_string = dbhelper.TYPE_MAPPING.get('SizeConstrainedString', 'varchar(%s)') + if etype == 'String': + for constraint in constraints: + if isinstance(constraint, SizeConstraint): + if constraint.max is not None: + sqltype = size_constrained_string % constraint.max + elif isinstance(constraint, UniqueConstraint): + unique = True + if sqltype is None: + sqltype = dbhelper.TYPE_MAPPING[etype] + if creating and unique: + sqltype += ' UNIQUE' + return sqltype + + +_SQL_SCHEMA = """ +CREATE TABLE %(table)s ( + eid_from INTEGER NOT NULL REFERENCES entities (eid), + eid_to INTEGER NOT NULL REFERENCES entities (eid), + CONSTRAINT %(table)s_p_key PRIMARY KEY(eid_from, eid_to) +); + +CREATE INDEX %(table)s_from_idx ON %(table)s(eid_from); +CREATE INDEX %(table)s_to_idx ON %(table)s(eid_to);""" + + +def rschema2sql(rschema): + assert not rschema.rule + return _SQL_SCHEMA % {'table': '%s_relation' % rschema.type} + + +def droprschema2sql(rschema): + """return sql to drop a relation type's table""" + # not necessary to drop indexes, that's implictly done when dropping + # the table + return 'DROP TABLE %s_relation;' % rschema.type + + +def grant_schema(schema, user, set_owner=True, skip_entities=(), prefix=''): + """write to the output stream a SQL schema to store the objects + corresponding to the given schema + """ + output = [] + w = output.append + for etype in sorted(schema.entities()): + eschema = schema.eschema(etype) + if eschema.final or etype in skip_entities: + continue + w(grant_eschema(eschema, user, set_owner, prefix=prefix)) + for rtype in sorted(schema.relations()): + rschema = schema.rschema(rtype) + if rschema.final or rschema.inlined: + continue + w(grant_rschema(rschema, user, set_owner)) + return '\n'.join(output) + + +def grant_eschema(eschema, user, set_owner=True, prefix=''): + output = [] + w = output.append + etype = eschema.type + if set_owner: + w('ALTER TABLE %s%s OWNER TO %s;' % (prefix, etype, user)) + w('GRANT ALL ON %s%s TO %s;' % (prefix, etype, user)) + return '\n'.join(output) + + +def grant_rschema(rschema, user, set_owner=True): + output = [] + if set_owner: + output.append('ALTER TABLE %s_relation OWNER TO %s;' % (rschema.type, user)) + output.append('GRANT ALL ON %s_relation TO %s;' % (rschema.type, user)) + return '\n'.join(output) diff -r e52efb73f9ee -r 729f36a1bcfa server/schemaserial.py --- a/server/schemaserial.py Thu Apr 02 13:54:00 2015 +0200 +++ b/server/schemaserial.py Wed Apr 22 10:08:14 2015 +0200 @@ -24,13 +24,12 @@ from logilab.common.shellutils import ProgressBar -from yams import (BadSchemaDefinition, schema as schemamod, buildobjs as ybo, - schema2sql as y2sql) +from yams import BadSchemaDefinition, schema as schemamod, buildobjs as ybo from cubicweb import Binary from cubicweb.schema import (KNOWN_RPROPERTIES, CONSTRAINTS, ETYPE_NAME_MAP, VIRTUAL_RTYPES) -from cubicweb.server import sqlutils +from cubicweb.server import sqlutils, schema2sql as y2sql def group_mapping(cnx, interactive=True): diff -r e52efb73f9ee -r 729f36a1bcfa server/server.py --- a/server/server.py Thu Apr 02 13:54:00 2015 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,140 +0,0 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. -# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr -# -# This file is part of CubicWeb. -# -# CubicWeb is free software: you can redistribute it and/or modify it under the -# terms of the GNU Lesser General Public License as published by the Free -# Software Foundation, either version 2.1 of the License, or (at your option) -# any later version. -# -# CubicWeb is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more -# details. -# -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . -"""Pyro RQL server""" - -__docformat__ = "restructuredtext en" - -import select -from time import localtime, mktime - -from cubicweb.server.utils import TasksManager -from cubicweb.server.repository import Repository - -class Finished(Exception): - """raise to remove an event from the event loop""" - -class TimeEvent: - """base event""" - # timefunc = staticmethod(localtime) - timefunc = localtime - - def __init__(self, absolute=None, period=None): - # local time tuple - if absolute is None: - absolute = self.timefunc() - self.absolute = absolute - # optional period in seconds - self.period = period - - def is_ready(self): - """return true if the event is ready to be fired""" - now = self.timefunc() - if self.absolute <= now: - return True - return False - - def fire(self, server): - """fire the event - must be overridden by concrete events - """ - raise NotImplementedError() - - def update(self): - """update the absolute date for the event or raise a finished exception - """ - if self.period is None: - raise Finished - self.absolute = localtime(mktime(self.absolute) + self.period) - - -class QuitEvent(TimeEvent): - """stop the server""" - def fire(self, server): - server.repo.shutdown() - server.quiting = True - - -class RepositoryServer(object): - - def __init__(self, config): - """make the repository available as a PyRO object""" - self.config = config - self.repo = Repository(config, TasksManager()) - self.ns = None - self.quiting = None - # event queue - self.events = [] - - def add_event(self, event): - """add an event to the loop""" - self.info('adding event %s', event) - self.events.append(event) - - def trigger_events(self): - """trigger ready events""" - for event in self.events[:]: - if event.is_ready(): - self.info('starting event %s', event) - event.fire(self) - try: - event.update() - except Finished: - self.events.remove(event) - - def run(self, req_timeout=5.0): - """enter the service loop""" - # start repository looping tasks - self.repo.start_looping_tasks() - while self.quiting is None: - try: - self.daemon.handleRequests(req_timeout) - except select.error: - continue - finally: - self.trigger_events() - - def quit(self): - """stop the server""" - self.add_event(QuitEvent()) - - def connect(self, host='', port=0): - """the connect method on the repository only register to pyro if - necessary - """ - self.daemon = self.repo.pyro_register(host) - - # server utilitities ###################################################### - - def install_sig_handlers(self): - """install signal handlers""" - import signal - self.info('installing signal handlers') - signal.signal(signal.SIGINT, lambda x, y, s=self: s.quit()) - signal.signal(signal.SIGTERM, lambda x, y, s=self: s.quit()) - - - # these are overridden by set_log_methods below - # only defining here to prevent pylint from complaining - @classmethod - def info(cls, msg, *a, **kw): - pass - -from logging import getLogger -from cubicweb import set_log_methods -LOGGER = getLogger('cubicweb.reposerver') -set_log_methods(RepositoryServer, LOGGER) diff -r e52efb73f9ee -r 729f36a1bcfa server/serverconfig.py --- a/server/serverconfig.py Thu Apr 02 13:54:00 2015 +0200 +++ b/server/serverconfig.py Wed Apr 22 10:08:14 2015 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -197,44 +197,6 @@ notified of every changes.', 'group': 'email', 'level': 2, }), - # pyro services config - ('pyro-host', - {'type' : 'string', - 'default': None, - 'help': 'Pyro server host, if not detectable correctly through \ -gethostname(). It may contains port information using : notation, \ -and if not set, it will be choosen randomly', - 'group': 'pyro', 'level': 3, - }), - ('pyro-instance-id', - {'type' : 'string', - 'default': lgconfig.Method('default_instance_id'), - 'help': 'identifier of the CubicWeb instance in the Pyro name server', - 'group': 'pyro', 'level': 1, - }), - ('pyro-ns-host', - {'type' : 'string', - 'default': '', - 'help': 'Pyro name server\'s host. If not set, will be detected by a \ -broadcast query. It may contains port information using : notation. \ -Use "NO_PYRONS" to create a Pyro server but not register to a pyro nameserver', - 'group': 'pyro', 'level': 1, - }), - ('pyro-ns-group', - {'type' : 'string', - 'default': 'cubicweb', - 'help': 'Pyro name server\'s group where the repository will be \ -registered.', - 'group': 'pyro', 'level': 1, - }), - # zmq services config - ('zmq-repository-address', - {'type' : 'string', - 'default': None, - 'help': ('ZMQ URI on which the repository will be bound ' - 'to (of the form `zmqpickle-tcp://:`).'), - 'group': 'zmq', 'level': 3, - }), ('zmq-address-sub', {'type' : 'csv', 'default' : (), @@ -350,10 +312,6 @@ stream.write('[%s]\n%s\n' % (section, generate_source_config(sconfig))) restrict_perms_to_user(sourcesfile) - def pyro_enabled(self): - """pyro is always enabled in standalone repository configuration""" - return True - def load_schema(self, expand_cubes=False, **kwargs): from cubicweb.schema import CubicWebSchemaLoader if expand_cubes: @@ -387,6 +345,3 @@ return ServerMigrationHelper(self, schema, interactive=interactive, cnx=cnx, repo=repo, connect=connect, verbosity=verbosity) - - -CONFIGURATIONS.append(ServerConfiguration) diff -r e52efb73f9ee -r 729f36a1bcfa server/serverctl.py --- a/server/serverctl.py Thu Apr 02 13:54:00 2015 +0200 +++ b/server/serverctl.py Wed Apr 22 10:08:14 2015 +0200 @@ -38,7 +38,6 @@ from cubicweb.toolsutils import Command, CommandHandler, underline_title from cubicweb.cwctl import CWCTL, check_options_consistency, ConfigureInstanceCommand from cubicweb.server import SOURCE_TYPES -from cubicweb.server.repository import Repository from cubicweb.server.serverconfig import ( USER_OPTIONS, ServerConfiguration, SourceConfiguration, ask_source_config, generate_source_config) @@ -167,11 +166,6 @@ if not automatic: print underline_title('Configuring the repository') config.input_config('email', inputlevel) - # ask for pyro configuration if pyro is activated and we're not - # using a all-in-one config, in which case this is done by the web - # side command handler - if config.pyro_enabled() and config.name != 'all-in-one': - config.input_config('pyro', inputlevel) print '\n'+underline_title('Configuring the sources') sourcesfile = config.sources_file() # hack to make Method('default_instance_id') usable in db option defs @@ -301,33 +295,6 @@ raise ExecutionError(str(exc)) -class RepositoryStartHandler(CommandHandler): - cmdname = 'start' - cfgname = 'repository' - - def start_server(self, config): - command = ['cubicweb-ctl', 'start-repository'] - if config.debugmode: - command.append('--debug') - command.append('--loglevel') - command.append(config['log-threshold'].lower()) - command.append(config.appid) - subprocess.call(command) - return 1 - - -class RepositoryStopHandler(CommandHandler): - cmdname = 'stop' - cfgname = 'repository' - - def poststop(self): - """if pyro is enabled, ensure the repository is correctly unregistered - """ - if self.config.pyro_enabled(): - from cubicweb.server.repository import pyro_unregister - pyro_unregister(self.config) - - # repository specific commands ################################################ def createdb(helper, source, dbcnx, cursor, **kwargs): @@ -686,77 +653,6 @@ cnx.close() -class StartRepositoryCommand(Command): - """Start a CubicWeb RQL server for a given instance. - - The server will be remotely accessible through pyro or ZMQ - - - the identifier of the instance to initialize. - """ - name = 'start-repository' - arguments = '' - min_args = max_args = 1 - options = ( - ('debug', - {'short': 'D', 'action' : 'store_true', - 'help': 'start server in debug mode.'}), - ('loglevel', - {'short': 'l', 'type' : 'choice', 'metavar': '', - 'default': None, 'choices': ('debug', 'info', 'warning', 'error'), - 'help': 'debug if -D is set, error otherwise', - }), - ('address', - {'short': 'a', 'type': 'string', 'metavar': '://:', - 'default': '', - 'help': ('specify a ZMQ URI on which to bind, or use "pyro://"' - 'to create a pyro-based repository'), - }), - ) - - def create_repo(self, config): - address = self['address'] - if not address: - address = config.get('zmq-repository-address') or 'pyro://' - if address.startswith('pyro://'): - from cubicweb.server.server import RepositoryServer - return RepositoryServer(config), config['host'] - else: - from cubicweb.server.utils import TasksManager - from cubicweb.server.cwzmq import ZMQRepositoryServer - repo = Repository(config, TasksManager()) - return ZMQRepositoryServer(repo), address - - def run(self, args): - from logilab.common.daemon import daemonize, setugid - from cubicweb.cwctl import init_cmdline_log_threshold - print 'WARNING: Standalone repository with pyro or zmq access is deprecated' - appid = args[0] - debug = self['debug'] - if sys.platform == 'win32' and not debug: - logger = logging.getLogger('cubicweb.ctl') - logger.info('Forcing debug mode on win32 platform') - debug = True - config = ServerConfiguration.config_for(appid, debugmode=debug) - init_cmdline_log_threshold(config, self['loglevel']) - # create the server - server, address = self.create_repo(config) - # ensure the directory where the pid-file should be set exists (for - # instance /var/run/cubicweb may be deleted on computer restart) - pidfile = config['pid-file'] - piddir = os.path.dirname(pidfile) - # go ! (don't daemonize in debug mode) - if not os.path.exists(piddir): - os.makedirs(piddir) - if not debug and daemonize(pidfile, umask=config['umask']): - return - uid = config['uid'] - if uid is not None: - setugid(uid) - server.install_sig_handlers() - server.connect(address) - server.run() - def _remote_dump(host, appid, output, sudo=False): # XXX generate unique/portable file name @@ -1140,7 +1036,6 @@ for cmdclass in (CreateInstanceDBCommand, InitInstanceCommand, GrantUserOnInstanceCommand, ResetAdminPasswordCommand, - StartRepositoryCommand, DBDumpCommand, DBRestoreCommand, DBCopyCommand, AddSourceCommand, CheckRepositoryCommand, RebuildFTICommand, SynchronizeSourceCommand, SchemaDiffCommand, diff -r e52efb73f9ee -r 729f36a1bcfa server/session.py --- a/server/session.py Thu Apr 02 13:54:00 2015 +0200 +++ b/server/session.py Wed Apr 22 10:08:14 2015 +0200 @@ -971,7 +971,7 @@ def transaction_uuid(self, set=True): uuid = self.transaction_data.get('tx_uuid') if set and uuid is None: - self.transaction_data['tx_uuid'] = uuid = uuid4().hex + self.transaction_data['tx_uuid'] = uuid = unicode(uuid4().hex) self.repo.system_source.start_undoable_transaction(self, uuid) return uuid diff -r e52efb73f9ee -r 729f36a1bcfa server/sources/native.py --- a/server/sources/native.py Thu Apr 02 13:54:00 2015 +0200 +++ b/server/sources/native.py Wed Apr 22 10:08:14 2015 +0200 @@ -43,7 +43,6 @@ from logilab.common.shellutils import getlogin from logilab.database import get_db_helper, sqlgen -from yams import schema2sql as y2sql from yams.schema import role_name from cubicweb import (UnknownEid, AuthenticationError, ValidationError, Binary, @@ -53,6 +52,7 @@ from cubicweb.schema import VIRTUAL_RTYPES from cubicweb.cwconfig import CubicWebNoAppConfiguration from cubicweb.server import hook +from cubicweb.server import schema2sql as y2sql from cubicweb.server.utils import crypt_password, eschema_eid, verify_and_update from cubicweb.server.sqlutils import SQL_PREFIX, SQLAdapterMixIn from cubicweb.server.rqlannotation import set_qdata @@ -272,7 +272,7 @@ {'type' : 'string', 'default': 'postgres', # XXX use choice type - 'help': 'database driver (postgres, mysql, sqlite, sqlserver2005)', + 'help': 'database driver (postgres, sqlite, sqlserver2005)', 'group': 'native-source', 'level': 0, }), ('db-host', @@ -614,8 +614,8 @@ sql = self.sqlgen.insert(SQL_PREFIX + entity.cw_etype, attrs) self.doexec(cnx, sql, attrs) if cnx.ertype_supports_undo(entity.cw_etype): - self._record_tx_action(cnx, 'tx_entity_actions', 'C', - etype=entity.cw_etype, eid=entity.eid) + self._record_tx_action(cnx, 'tx_entity_actions', u'C', + etype=unicode(entity.cw_etype), eid=entity.eid) def update_entity(self, cnx, entity): """replace an entity in the source""" @@ -623,8 +623,8 @@ attrs = self.preprocess_entity(entity) if cnx.ertype_supports_undo(entity.cw_etype): changes = self._save_attrs(cnx, entity, attrs) - self._record_tx_action(cnx, 'tx_entity_actions', 'U', - etype=entity.cw_etype, eid=entity.eid, + self._record_tx_action(cnx, 'tx_entity_actions', u'U', + etype=unicode(entity.cw_etype), eid=entity.eid, changes=self._binary(dumps(changes))) sql = self.sqlgen.update(SQL_PREFIX + entity.cw_etype, attrs, ['cw_eid']) @@ -638,8 +638,8 @@ for r in entity.e_schema.subject_relations() if (r.final or r.inlined) and not r in VIRTUAL_RTYPES] changes = self._save_attrs(cnx, entity, attrs) - self._record_tx_action(cnx, 'tx_entity_actions', 'D', - etype=entity.cw_etype, eid=entity.eid, + self._record_tx_action(cnx, 'tx_entity_actions', u'D', + etype=unicode(entity.cw_etype), eid=entity.eid, changes=self._binary(dumps(changes))) attrs = {'cw_eid': entity.eid} sql = self.sqlgen.delete(SQL_PREFIX + entity.cw_etype, attrs) @@ -649,16 +649,16 @@ """add a relation to the source""" self._add_relations(cnx, rtype, [(subject, object)], inlined) if cnx.ertype_supports_undo(rtype): - self._record_tx_action(cnx, 'tx_relation_actions', 'A', - eid_from=subject, rtype=rtype, eid_to=object) + self._record_tx_action(cnx, 'tx_relation_actions', u'A', + eid_from=subject, rtype=unicode(rtype), eid_to=object) def add_relations(self, cnx, rtype, subj_obj_list, inlined=False): """add a relations to the source""" self._add_relations(cnx, rtype, subj_obj_list, inlined) if cnx.ertype_supports_undo(rtype): for subject, object in subj_obj_list: - self._record_tx_action(cnx, 'tx_relation_actions', 'A', - eid_from=subject, rtype=rtype, eid_to=object) + self._record_tx_action(cnx, 'tx_relation_actions', u'A', + eid_from=subject, rtype=unicode(rtype), eid_to=object) def _add_relations(self, cnx, rtype, subj_obj_list, inlined=False): """add a relation to the source""" @@ -689,8 +689,8 @@ rschema = self.schema.rschema(rtype) self._delete_relation(cnx, subject, rtype, object, rschema.inlined) if cnx.ertype_supports_undo(rtype): - self._record_tx_action(cnx, 'tx_relation_actions', 'R', - eid_from=subject, rtype=rtype, eid_to=object) + self._record_tx_action(cnx, 'tx_relation_actions', u'R', + eid_from=subject, rtype=unicode(rtype), eid_to=object) def _delete_relation(self, cnx, subject, rtype, object, inlined=False): """delete a relation from the source""" @@ -757,7 +757,7 @@ it's a function just so that it shows up in profiling """ if server.DEBUG & server.DBG_SQL: - print 'execmany', query, 'with', len(args), 'arguments' + print 'execmany', query, 'with', len(args), 'arguments', cnx.cnxset.cnx cursor = cnx.cnxset.cu try: # str(query) to avoid error if it's a unicode string @@ -868,9 +868,10 @@ def extid2eid(self, cnx, extid): """get eid from an external id. Return None if no record found.""" assert isinstance(extid, str) + args = {'x': b64encode(extid)} cursor = self.doexec(cnx, 'SELECT eid FROM entities WHERE extid=%(x)s', - {'x': b64encode(extid)}) + args) # XXX testing rowcount cause strange bug with sqlite, results are there # but rowcount is 0 #if cursor.rowcount > 0: @@ -880,6 +881,17 @@ return result[0] except Exception: pass + cursor = self.doexec(cnx, + 'SELECT eid FROM moved_entities WHERE extid=%(x)s', + args) + try: + result = cursor.fetchone() + if result: + # entity was moved to the system source, return negative + # number to tell the external source to ignore it + return -result[0] + except Exception: + pass return None def _handle_is_relation_sql(self, cnx, sql, attrs): @@ -965,7 +977,7 @@ # only, and with no eid specified assert actionfilters.get('action', 'C') in 'CUD' assert not 'eid' in actionfilters - tearestr['etype'] = val + tearestr['etype'] = unicode(val) elif key == 'eid': # eid filter may apply to 'eid' of tx_entity_actions or to # 'eid_from' OR 'eid_to' of tx_relation_actions @@ -976,10 +988,10 @@ trarestr['eid_to'] = val elif key == 'action': if val in 'CUD': - tearestr['txa_action'] = val + tearestr['txa_action'] = unicode(val) else: assert val in 'AR' - trarestr['txa_action'] = val + trarestr['txa_action'] = unicode(val) else: raise AssertionError('unknow filter %s' % key) assert trarestr or tearestr, "can't only filter on 'public'" @@ -1014,10 +1026,11 @@ def tx_info(self, cnx, txuuid): """See :class:`cubicweb.repoapi.ClientConnection.transaction_info`""" - return tx.Transaction(txuuid, *self._tx_info(cnx, txuuid)) + return tx.Transaction(txuuid, *self._tx_info(cnx, unicode(txuuid))) def tx_actions(self, cnx, txuuid, public): """See :class:`cubicweb.repoapi.ClientConnection.transaction_actions`""" + txuuid = unicode(txuuid) self._tx_info(cnx, txuuid) restr = {'tx_uuid': txuuid} if public: @@ -1151,6 +1164,7 @@ err(cnx._("can't restore entity %(eid)s of type %(eschema)s, " "target of %(rtype)s (eid %(value)s) does not exist any longer") % locals()) + changes[column] = None elif eschema.destination(rtype) in ('Bytes', 'Password'): changes[column] = self._binary(value) edited[rtype] = Binary(value) @@ -1182,10 +1196,10 @@ self.repo.hm.call_hooks('before_add_entity', cnx, entity=entity) # restore the entity action.changes['cw_eid'] = eid + # restore record in entities (will update fti if needed) + self.add_info(cnx, entity, self, None) sql = self.sqlgen.insert(SQL_PREFIX + etype, action.changes) self.doexec(cnx, sql, action.changes) - # restore record in entities (will update fti if needed) - self.add_info(cnx, entity, self, None) self.repo.hm.call_hooks('after_add_entity', cnx, entity=entity) return errors @@ -1386,10 +1400,13 @@ eid INTEGER PRIMARY KEY NOT NULL, type VARCHAR(64) NOT NULL, asource VARCHAR(128) NOT NULL, - extid VARCHAR(256) + extid VARCHAR(256) UNIQUE );; CREATE INDEX entities_type_idx ON entities(type);; -CREATE INDEX entities_extid_idx ON entities(extid);; +CREATE TABLE moved_entities ( + eid INTEGER PRIMARY KEY NOT NULL, + extid VARCHAR(256) UNIQUE +);; CREATE TABLE transactions ( tx_uuid CHAR(32) PRIMARY KEY NOT NULL, diff -r e52efb73f9ee -r 729f36a1bcfa server/sqlutils.py --- a/server/sqlutils.py Thu Apr 02 13:54:00 2015 +0200 +++ b/server/sqlutils.py Wed Apr 22 10:08:14 2015 +0200 @@ -103,7 +103,7 @@ """return sql to give all access privileges to the given user on the system schema """ - from yams.schema2sql import grant_schema + from cubicweb.server.schema2sql import grant_schema from cubicweb.server.sources import native output = [] w = output.append @@ -121,7 +121,7 @@ user=None, set_owner=False, skip_relations=PURE_VIRTUAL_RTYPES, skip_entities=()): """return the system sql schema, according to the given parameters""" - from yams.schema2sql import schema2sql + from cubicweb.server.schema2sql import schema2sql from cubicweb.server.sources import native if set_owner: assert user, 'user is argument required when set_owner is true' @@ -146,7 +146,7 @@ def sqldropschema(schema, driver, text_index=True, skip_relations=PURE_VIRTUAL_RTYPES, skip_entities=()): """return the sql to drop the schema, according to the given parameters""" - from yams.schema2sql import dropschema2sql + from cubicweb.server.schema2sql import dropschema2sql from cubicweb.server.sources import native output = [] w = output.append @@ -500,6 +500,8 @@ return (dt.weekday() + 1) % 7 cnx.create_function("WEEKDAY", 1, weekday) + cnx.cursor().execute("pragma foreign_keys = on") + import yams.constraints yams.constraints.patch_sqlite_decimal() diff -r e52efb73f9ee -r 729f36a1bcfa server/test/data-schema2sql/__init__.py diff -r e52efb73f9ee -r 729f36a1bcfa server/test/data-schema2sql/schema/Company.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/server/test/data-schema2sql/schema/Company.py Wed Apr 22 10:08:14 2015 +0200 @@ -0,0 +1,67 @@ +# copyright 2004-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of yams. +# +# yams is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# yams is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with yams. If not, see . +from yams.buildobjs import EntityType, RelationType, RelationDefinition, \ + SubjectRelation, String + +class Company(EntityType): + name = String() + +class Subcompany(Company): + __specializes_schema__ = True + subcompany_of = SubjectRelation('Company') + +class Division(Company): + __specializes_schema__ = True + division_of = SubjectRelation('Company') + +class Subdivision(Division): + __specializes_schema__ = True + subdivision_of = SubjectRelation('Company') + +class Employee(EntityType): + works_for = SubjectRelation('Company') + +class require_permission(RelationType): + """link a permission to the entity. This permission should be used in the + security definition of the entity's type to be useful. + """ + fulltext_container = 'subject' + __permissions__ = { + 'read': ('managers', 'users', 'guests'), + 'add': ('managers',), + 'delete': ('managers',), + } + + +class missing_require_permission(RelationDefinition): + name = 'require_permission' + subject = 'Company' + object = 'EPermission' + +class EPermission(EntityType): + """entity type that may be used to construct some advanced security configuration + """ + __permissions__ = { + 'read': ('managers', 'users', 'guests',), + 'add': ('managers',), + 'delete': ('managers',), + 'update': ('managers', 'owners',), + } + name = String(required=True, indexed=True, internationalizable=True, + fulltextindexed=True, maxsize=100, + description=_('name or identifier of the permission')) diff -r e52efb73f9ee -r 729f36a1bcfa server/test/data-schema2sql/schema/Dates.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/server/test/data-schema2sql/schema/Dates.py Wed Apr 22 10:08:14 2015 +0200 @@ -0,0 +1,28 @@ +# copyright 2004-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of yams. +# +# yams is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# yams is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with yams. If not, see . +from datetime import time, date +from yams.buildobjs import EntityType, Datetime, Date, Time + +class Datetest(EntityType): + dt1 = Datetime(default=u'now') + dt2 = Datetime(default=u'today') + d1 = Date(default=u'today') + d2 = Date(default=date(2007, 12, 11)) + t1 = Time(default=time(8, 40)) + t2 = Time(default=time(9, 45)) + diff -r e52efb73f9ee -r 729f36a1bcfa server/test/data-schema2sql/schema/State.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/server/test/data-schema2sql/schema/State.py Wed Apr 22 10:08:14 2015 +0200 @@ -0,0 +1,81 @@ +# copyright 2004-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of yams. +# +# yams is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# yams is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with yams. If not, see . +from yams.buildobjs import (EntityType, RelationType, RelationDefinition, + SubjectRelation, Int, String, Boolean) +from yams.constraints import SizeConstraint, UniqueConstraint + +from __init__ import RESTRICTED_RTYPE_PERMS + +class State(EntityType): + """used to associate simple states to an entity + type and/or to define workflows + """ + __permissions__ = { + 'read': ('managers', 'users', 'guests',), + 'add': ('managers', 'users',), + 'delete': ('managers', 'owners',), + 'update': ('managers', 'owners',), + } + + # attributes + eid = Int(required=True, uid=True) + name = String(required=True, + indexed=True, internationalizable=True, + constraints=[SizeConstraint(256)]) + description = String(fulltextindexed=True) + # relations + state_of = SubjectRelation('Eetype', cardinality='+*') + next_state = SubjectRelation('State', cardinality='**') + + +class state_of(RelationType): + """link a state to one or more entity type""" + __permissions__ = RESTRICTED_RTYPE_PERMS + +class next_state(RelationType): + """define a workflow by associating a state to possible following states + """ + __permissions__ = RESTRICTED_RTYPE_PERMS + +class initial_state(RelationType): + """indicate which state should be used by default when an entity using states + is created + """ + __permissions__ = { + 'read': ('managers', 'users', 'guests',), + 'add': ('managers', 'users',), + 'delete': ('managers', 'users',), + } + subject = 'Eetype' + object = 'State' + cardinality = '?*' + inlined = True + +class Eetype(EntityType): + """define an entity type, used to build the application schema""" + __permissions__ = { + 'read': ('managers', 'users', 'guests',), + 'add': ('managers',), + 'delete': ('managers',), + 'update': ('managers', 'owners',), + } + name = String(required=True, indexed=True, internationalizable=True, + constraints=[UniqueConstraint(), SizeConstraint(64)]) + description = String(fulltextindexed=True) + meta = Boolean() + final = Boolean() diff -r e52efb73f9ee -r 729f36a1bcfa server/test/data-schema2sql/schema/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/server/test/data-schema2sql/schema/__init__.py Wed Apr 22 10:08:14 2015 +0200 @@ -0,0 +1,23 @@ +# copyright 2004-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of yams. +# +# yams is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# yams is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with yams. If not, see . +"""test schema""" +RESTRICTED_RTYPE_PERMS = { + 'read': ('managers', 'users', 'guests',), + 'add': ('managers',), + 'delete': ('managers',), + } diff -r e52efb73f9ee -r 729f36a1bcfa server/test/data-schema2sql/schema/schema.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/server/test/data-schema2sql/schema/schema.py Wed Apr 22 10:08:14 2015 +0200 @@ -0,0 +1,112 @@ +# copyright 2004-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of yams. +# +# yams is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# yams is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with yams. If not, see . +from yams.buildobjs import (EntityType, RelationDefinition, RelationType, + SubjectRelation, String, Int, Float, Date, Boolean) + +class Affaire(EntityType): + sujet = String(maxsize=128) + ref = String(maxsize=12) + + concerne = SubjectRelation('Societe') + obj_wildcard = SubjectRelation('*') + sym_rel = SubjectRelation('Person', symmetric=True) + inline_rel = SubjectRelation('Person', inlined=True, cardinality='?*') + +class subj_wildcard(RelationDefinition): + subject = '*' + object = 'Affaire' + + +class Person(EntityType): + __unique_together__ = [('nom', 'prenom')] + nom = String(maxsize=64, fulltextindexed=True, required=True) + prenom = String(maxsize=64, fulltextindexed=True) + sexe = String(maxsize=1, default='M') + promo = String(vocabulary=('bon','pasbon')) + titre = String(maxsize=128, fulltextindexed=True) + adel = String(maxsize=128) + ass = String(maxsize=128) + web = String(maxsize=128) + tel = Int(__permissions__={'read': (), + 'add': ('managers',), + 'update': ('managers',)}) + fax = Int() + datenaiss = Date() + test = Boolean() + salary = Float() + travaille = SubjectRelation('Societe', + __permissions__={'read': (), + 'add': (), + 'delete': ('managers',), + }) + + evaluee = SubjectRelation('Note') + +class Salaried(Person): + __specializes_schema__ = True + +class Societe(EntityType): + nom = String(maxsize=64, fulltextindexed=True) + web = String(maxsize=128) + tel = Int() + fax = Int() + rncs = String(maxsize=32) + ad1 = String(maxsize=128) + ad2 = String(maxsize=128) + ad3 = String(maxsize=128) + cp = String(maxsize=12) + ville = String(maxsize=32) + + evaluee = SubjectRelation('Note') + + +class Note(EntityType): + date = String(maxsize=10) + type = String(maxsize=1) + para = String(maxsize=512) + + +class pkginfo(EntityType): + modname = String(maxsize=30, required=True) + version = String(maxsize=10, required=True, default='0.1') + copyright = String(required=True) + license = String(vocabulary=('GPL', 'ZPL')) + short_desc = String(maxsize=80, required=True) + long_desc = String(required=True, fulltextindexed=True) + author = String(maxsize=100, required=True) + author_email = String(maxsize=100, required=True) + mailinglist = String(maxsize=100) + debian_handler = String(vocabulary=('machin', 'bidule')) + + +class evaluee(RelationType): + __permissions__ = { + 'read': ('managers',), + 'add': ('managers',), + 'delete': ('managers',), + } + +class concerne(RelationDefinition): + subject = 'Person' + object = 'Affaire' + __permissions__ = { + 'read': ('managers',), + 'add': ('managers',), + 'delete': ('managers',), + } + diff -r e52efb73f9ee -r 729f36a1bcfa server/test/data-schema2sql/schema/toignore --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/server/test/data-schema2sql/schema/toignore Wed Apr 22 10:08:14 2015 +0200 @@ -0,0 +1,1 @@ +coucou diff -r e52efb73f9ee -r 729f36a1bcfa server/test/unittest_migractions.py --- a/server/test/unittest_migractions.py Thu Apr 02 13:54:00 2015 +0200 +++ b/server/test/unittest_migractions.py Wed Apr 22 10:08:14 2015 +0200 @@ -259,8 +259,8 @@ 'filed_under2', 'has_text', 'identity', 'in_basket', 'is', 'is_instance_of', 'modification_date', 'name', 'owned_by']) - self.assertEqual([str(rs) for rs in self.schema['Folder2'].object_relations()], - ['filed_under2', 'identity']) + self.assertCountEqual([str(rs) for rs in self.schema['Folder2'].object_relations()], + ['filed_under2', 'identity']) # Old will be missing as it has been renamed into 'New' in the migrated # schema while New hasn't been added here. self.assertEqual(sorted(str(e) for e in self.schema['filed_under2'].subjects()), diff -r e52efb73f9ee -r 729f36a1bcfa server/test/unittest_repository.py --- a/server/test/unittest_repository.py Thu Apr 02 13:54:00 2015 +0200 +++ b/server/test/unittest_repository.py Wed Apr 22 10:08:14 2015 +0200 @@ -31,10 +31,9 @@ UnknownEid, AuthenticationError, Unauthorized, QueryError) from cubicweb.predicates import is_instance from cubicweb.schema import RQLConstraint -from cubicweb.dbapi import connect, multiple_connections_unfix from cubicweb.devtools.testlib import CubicWebTC from cubicweb.devtools.repotest import tuplify -from cubicweb.server import repository, hook +from cubicweb.server import hook from cubicweb.server.sqlutils import SQL_PREFIX from cubicweb.server.hook import Hook from cubicweb.server.sources import native @@ -312,111 +311,6 @@ ownedby = schema.rschema('owned_by') self.assertEqual(ownedby.objects('CWEType'), ('CWUser',)) - def test_pyro(self): - import Pyro - Pyro.config.PYRO_MULTITHREADED = 0 - done = [] - self.repo.config.global_set_option('pyro-ns-host', 'NO_PYRONS') - daemon = self.repo.pyro_register() - try: - uri = self.repo.pyro_uri.replace('PYRO', 'pyroloc') - # the client part has to be in the thread due to sqlite limitations - t = threading.Thread(target=self._pyro_client, args=(uri, done)) - t.start() - while not done: - daemon.handleRequests(1.0) - t.join(1) - if t.isAlive(): - self.fail('something went wrong, thread still alive') - finally: - repository.pyro_unregister(self.repo.config) - from logilab.common import pyro_ext - pyro_ext._DAEMONS.clear() - - - def _pyro_client(self, uri, done): - cnx = connect(uri, - u'admin', password='gingkow', - initlog=False) # don't reset logging configuration - try: - cnx.load_appobjects(subpath=('entities',)) - # check we can get the schema - schema = cnx.get_schema() - self.assertTrue(cnx.vreg) - self.assertTrue('etypes'in cnx.vreg) - cu = cnx.cursor() - rset = cu.execute('Any U,G WHERE U in_group G') - user = iter(rset.entities()).next() - self.assertTrue(user._cw) - self.assertTrue(user._cw.vreg) - from cubicweb.entities import authobjs - self.assertIsInstance(user._cw.user, authobjs.CWUser) - # make sure the tcp connection is closed properly; yes, it's disgusting. - adapter = cnx._repo.adapter - cnx.close() - adapter.release() - done.append(True) - finally: - # connect monkey patch some method by default, remove them - multiple_connections_unfix() - - - def test_zmq(self): - try: - import zmq - except ImportError: - self.skipTest("zmq in not available") - done = [] - from cubicweb.devtools import TestServerConfiguration as ServerConfiguration - from cubicweb.server.cwzmq import ZMQRepositoryServer - # the client part has to be in a thread due to sqlite limitations - t = threading.Thread(target=self._zmq_client, args=(done,)) - t.start() - - zmq_server = ZMQRepositoryServer(self.repo) - zmq_server.connect('zmqpickle-tcp://127.0.0.1:41415') - - t2 = threading.Thread(target=self._zmq_quit, args=(done, zmq_server,)) - t2.start() - - zmq_server.run() - - t2.join(1) - t.join(1) - - if t.isAlive(): - self.fail('something went wrong, thread still alive') - - def _zmq_quit(self, done, srv): - while not done: - time.sleep(0.1) - srv.quit() - - def _zmq_client(self, done): - try: - cnx = connect('zmqpickle-tcp://127.0.0.1:41415', u'admin', password=u'gingkow', - initlog=False) # don't reset logging configuration - try: - cnx.load_appobjects(subpath=('entities',)) - # check we can get the schema - schema = cnx.get_schema() - self.assertTrue(cnx.vreg) - self.assertTrue('etypes'in cnx.vreg) - cu = cnx.cursor() - rset = cu.execute('Any U,G WHERE U in_group G') - user = iter(rset.entities()).next() - self.assertTrue(user._cw) - self.assertTrue(user._cw.vreg) - from cubicweb.entities import authobjs - self.assertIsInstance(user._cw.user, authobjs.CWUser) - cnx.close() - done.append(True) - finally: - # connect monkey patch some method by default, remove them - multiple_connections_unfix() - finally: - done.append(False) - def test_internal_api(self): repo = self.repo cnxid = repo.connect(self.admlogin, password=self.admpassword) @@ -699,7 +593,8 @@ cu = cnx.system_sql('SELECT * FROM entities WHERE eid = -1') data = cu.fetchall() self.assertEqual(tuplify(data), [(-1, 'Personne', 'system', None)]) - self.repo.delete_info(cnx, entity, 'system') + self.repo._delete_cascade_multi(cnx, [entity]) + self.repo.system_source.delete_info_multi(cnx, [entity]) #self.repo.commit() cu = cnx.system_sql('SELECT * FROM entities WHERE eid = -1') data = cu.fetchall() diff -r e52efb73f9ee -r 729f36a1bcfa server/test/unittest_schema2sql.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/server/test/unittest_schema2sql.py Wed Apr 22 10:08:14 2015 +0200 @@ -0,0 +1,288 @@ +# copyright 2004-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr +# +# This file is part of CubicWeb. +# +# CubicWeb is free software: you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free +# Software Foundation, either version 2.1 of the License, or (at your option) +# any later version. +# +# CubicWeb is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with CubicWeb. If not, see . +"""unit tests for module cubicweb.server.schema2sql +""" + +import os.path as osp + +from logilab.common.testlib import TestCase, unittest_main +from logilab.database import get_db_helper + +from yams.reader import SchemaLoader +from cubicweb.server import schema2sql + +schema2sql.SET_DEFAULT = True + +DATADIR = osp.abspath(osp.join(osp.dirname(__file__), 'data-schema2sql')) + +schema = SchemaLoader().load([DATADIR]) + + +EXPECTED_DATA_NO_DROP = """ +CREATE TABLE Affaire( + sujet varchar(128), + ref varchar(12), + inline_rel integer REFERENCES entities (eid) +); +CREATE INDEX affaire_inline_rel_idx ON Affaire(inline_rel); + +CREATE TABLE Company( + name text +); + +CREATE TABLE Datetest( + dt1 timestamp, + dt2 timestamp, + d1 date, + d2 date, + t1 time, + t2 time +); + +CREATE TABLE Division( + name text +); + +CREATE TABLE EPermission( + name varchar(100) NOT NULL +); +CREATE INDEX epermission_name_idx ON EPermission(name); + +CREATE TABLE Eetype( + name varchar(64) UNIQUE NOT NULL, + description text, + meta boolean, + final boolean, + initial_state integer REFERENCES entities (eid) +); +CREATE INDEX eetype_name_idx ON Eetype(name); +CREATE INDEX eetype_initial_state_idx ON Eetype(initial_state); + +CREATE TABLE Employee( +); + +CREATE TABLE Note( + date varchar(10), + type varchar(1), + para varchar(512) +); + +CREATE TABLE Person( + nom varchar(64) NOT NULL, + prenom varchar(64), + sexe varchar(1) DEFAULT 'M', + promo varchar(6), + titre varchar(128), + adel varchar(128), + ass varchar(128), + web varchar(128), + tel integer, + fax integer, + datenaiss date, + test boolean, + salary float +); +CREATE UNIQUE INDEX unique_e6c2d219772dbf1715597f7d9a6b3892 ON Person(nom,prenom); + +CREATE TABLE Salaried( + nom varchar(64) NOT NULL, + prenom varchar(64), + sexe varchar(1) DEFAULT 'M', + promo varchar(6), + titre varchar(128), + adel varchar(128), + ass varchar(128), + web varchar(128), + tel integer, + fax integer, + datenaiss date, + test boolean, + salary float +); +CREATE UNIQUE INDEX unique_98da0f9de8588baa8966f0b1a6f850a3 ON Salaried(nom,prenom); + +CREATE TABLE Societe( + nom varchar(64), + web varchar(128), + tel integer, + fax integer, + rncs varchar(32), + ad1 varchar(128), + ad2 varchar(128), + ad3 varchar(128), + cp varchar(12), + ville varchar(32) +); + +CREATE TABLE State( + eid integer PRIMARY KEY REFERENCES entities (eid), + name varchar(256) NOT NULL, + description text +); +CREATE INDEX state_name_idx ON State(name); + +CREATE TABLE Subcompany( + name text +); + +CREATE TABLE Subdivision( + name text +); + +CREATE TABLE pkginfo( + modname varchar(30) NOT NULL, + version varchar(10) DEFAULT '0.1' NOT NULL, + copyright text NOT NULL, + license varchar(3), + short_desc varchar(80) NOT NULL, + long_desc text NOT NULL, + author varchar(100) NOT NULL, + author_email varchar(100) NOT NULL, + mailinglist varchar(100), + debian_handler varchar(6) +); + + +CREATE TABLE concerne_relation ( + eid_from INTEGER NOT NULL REFERENCES entities (eid), + eid_to INTEGER NOT NULL REFERENCES entities (eid), + CONSTRAINT concerne_relation_p_key PRIMARY KEY(eid_from, eid_to) +); + +CREATE INDEX concerne_relation_from_idx ON concerne_relation(eid_from); +CREATE INDEX concerne_relation_to_idx ON concerne_relation(eid_to); + +CREATE TABLE division_of_relation ( + eid_from INTEGER NOT NULL REFERENCES entities (eid), + eid_to INTEGER NOT NULL REFERENCES entities (eid), + CONSTRAINT division_of_relation_p_key PRIMARY KEY(eid_from, eid_to) +); + +CREATE INDEX division_of_relation_from_idx ON division_of_relation(eid_from); +CREATE INDEX division_of_relation_to_idx ON division_of_relation(eid_to); + +CREATE TABLE evaluee_relation ( + eid_from INTEGER NOT NULL REFERENCES entities (eid), + eid_to INTEGER NOT NULL REFERENCES entities (eid), + CONSTRAINT evaluee_relation_p_key PRIMARY KEY(eid_from, eid_to) +); + +CREATE INDEX evaluee_relation_from_idx ON evaluee_relation(eid_from); +CREATE INDEX evaluee_relation_to_idx ON evaluee_relation(eid_to); + +CREATE TABLE next_state_relation ( + eid_from INTEGER NOT NULL REFERENCES entities (eid), + eid_to INTEGER NOT NULL REFERENCES entities (eid), + CONSTRAINT next_state_relation_p_key PRIMARY KEY(eid_from, eid_to) +); + +CREATE INDEX next_state_relation_from_idx ON next_state_relation(eid_from); +CREATE INDEX next_state_relation_to_idx ON next_state_relation(eid_to); + +CREATE TABLE obj_wildcard_relation ( + eid_from INTEGER NOT NULL REFERENCES entities (eid), + eid_to INTEGER NOT NULL REFERENCES entities (eid), + CONSTRAINT obj_wildcard_relation_p_key PRIMARY KEY(eid_from, eid_to) +); + +CREATE INDEX obj_wildcard_relation_from_idx ON obj_wildcard_relation(eid_from); +CREATE INDEX obj_wildcard_relation_to_idx ON obj_wildcard_relation(eid_to); + +CREATE TABLE require_permission_relation ( + eid_from INTEGER NOT NULL REFERENCES entities (eid), + eid_to INTEGER NOT NULL REFERENCES entities (eid), + CONSTRAINT require_permission_relation_p_key PRIMARY KEY(eid_from, eid_to) +); + +CREATE INDEX require_permission_relation_from_idx ON require_permission_relation(eid_from); +CREATE INDEX require_permission_relation_to_idx ON require_permission_relation(eid_to); + +CREATE TABLE state_of_relation ( + eid_from INTEGER NOT NULL REFERENCES entities (eid), + eid_to INTEGER NOT NULL REFERENCES entities (eid), + CONSTRAINT state_of_relation_p_key PRIMARY KEY(eid_from, eid_to) +); + +CREATE INDEX state_of_relation_from_idx ON state_of_relation(eid_from); +CREATE INDEX state_of_relation_to_idx ON state_of_relation(eid_to); + +CREATE TABLE subcompany_of_relation ( + eid_from INTEGER NOT NULL REFERENCES entities (eid), + eid_to INTEGER NOT NULL REFERENCES entities (eid), + CONSTRAINT subcompany_of_relation_p_key PRIMARY KEY(eid_from, eid_to) +); + +CREATE INDEX subcompany_of_relation_from_idx ON subcompany_of_relation(eid_from); +CREATE INDEX subcompany_of_relation_to_idx ON subcompany_of_relation(eid_to); + +CREATE TABLE subdivision_of_relation ( + eid_from INTEGER NOT NULL REFERENCES entities (eid), + eid_to INTEGER NOT NULL REFERENCES entities (eid), + CONSTRAINT subdivision_of_relation_p_key PRIMARY KEY(eid_from, eid_to) +); + +CREATE INDEX subdivision_of_relation_from_idx ON subdivision_of_relation(eid_from); +CREATE INDEX subdivision_of_relation_to_idx ON subdivision_of_relation(eid_to); + +CREATE TABLE subj_wildcard_relation ( + eid_from INTEGER NOT NULL REFERENCES entities (eid), + eid_to INTEGER NOT NULL REFERENCES entities (eid), + CONSTRAINT subj_wildcard_relation_p_key PRIMARY KEY(eid_from, eid_to) +); + +CREATE INDEX subj_wildcard_relation_from_idx ON subj_wildcard_relation(eid_from); +CREATE INDEX subj_wildcard_relation_to_idx ON subj_wildcard_relation(eid_to); + +CREATE TABLE sym_rel_relation ( + eid_from INTEGER NOT NULL REFERENCES entities (eid), + eid_to INTEGER NOT NULL REFERENCES entities (eid), + CONSTRAINT sym_rel_relation_p_key PRIMARY KEY(eid_from, eid_to) +); + +CREATE INDEX sym_rel_relation_from_idx ON sym_rel_relation(eid_from); +CREATE INDEX sym_rel_relation_to_idx ON sym_rel_relation(eid_to); + +CREATE TABLE travaille_relation ( + eid_from INTEGER NOT NULL REFERENCES entities (eid), + eid_to INTEGER NOT NULL REFERENCES entities (eid), + CONSTRAINT travaille_relation_p_key PRIMARY KEY(eid_from, eid_to) +); + +CREATE INDEX travaille_relation_from_idx ON travaille_relation(eid_from); +CREATE INDEX travaille_relation_to_idx ON travaille_relation(eid_to); + +CREATE TABLE works_for_relation ( + eid_from INTEGER NOT NULL REFERENCES entities (eid), + eid_to INTEGER NOT NULL REFERENCES entities (eid), + CONSTRAINT works_for_relation_p_key PRIMARY KEY(eid_from, eid_to) +); + +CREATE INDEX works_for_relation_from_idx ON works_for_relation(eid_from); +CREATE INDEX works_for_relation_to_idx ON works_for_relation(eid_to); +""" + +class SQLSchemaTC(TestCase): + + def test_known_values(self): + dbhelper = get_db_helper('postgres') + output = schema2sql.schema2sql(dbhelper, schema) + self.assertMultiLineEqual(EXPECTED_DATA_NO_DROP.strip(), output.strip()) + + +if __name__ == '__main__': + unittest_main() diff -r e52efb73f9ee -r 729f36a1bcfa server/test/unittest_schemaserial.py --- a/server/test/unittest_schemaserial.py Thu Apr 02 13:54:00 2015 +0200 +++ b/server/test/unittest_schemaserial.py Wed Apr 22 10:08:14 2015 +0200 @@ -17,9 +17,6 @@ # with CubicWeb. If not, see . """unit tests for schema rql (de)serialization""" -import sys -from cStringIO import StringIO - from logilab.common.testlib import TestCase, unittest_main from cubicweb import Binary diff -r e52efb73f9ee -r 729f36a1bcfa server/test/unittest_security.py --- a/server/test/unittest_security.py Thu Apr 02 13:54:00 2015 +0200 +++ b/server/test/unittest_security.py Wed Apr 22 10:08:14 2015 +0200 @@ -31,9 +31,9 @@ def setup_database(self): super(BaseSecurityTC, self).setup_database() with self.admin_access.client_cnx() as cnx: - self.create_user(cnx, 'iaminusersgrouponly') + self.create_user(cnx, u'iaminusersgrouponly') hash = _CRYPTO_CTX.encrypt('oldpassword', scheme='des_crypt') - self.create_user(cnx, 'oldpassword', password=Binary(hash)) + self.create_user(cnx, u'oldpassword', password=Binary(hash)) class LowLevelSecurityFunctionTC(BaseSecurityTC): @@ -45,7 +45,7 @@ with self.admin_access.repo_cnx() as cnx: self.repo.vreg.solutions(cnx, rqlst, None) check_relations_read_access(cnx, rqlst, {}) - with self.new_access('anon').repo_cnx() as cnx: + with self.new_access(u'anon').repo_cnx() as cnx: self.assertRaises(Unauthorized, check_relations_read_access, cnx, rqlst, {}) @@ -60,7 +60,7 @@ solution = rqlst.solutions[0] localchecks = get_local_checks(cnx, rqlst, solution) self.assertEqual({}, localchecks) - with self.new_access('anon').repo_cnx() as cnx: + with self.new_access(u'anon').repo_cnx() as cnx: self.assertRaises(Unauthorized, get_local_checks, cnx, rqlst, solution) @@ -70,7 +70,7 @@ with self.admin_access.repo_cnx() as cnx: self.assertRaises(Unauthorized, cnx.execute, 'Any X,P WHERE X is CWUser, X upassword P') - with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: self.assertRaises(Unauthorized, cnx.execute, 'Any X,P WHERE X is CWUser, X upassword P') @@ -104,7 +104,7 @@ super(SecurityRewritingTC, self).tearDown() def test_not_relation_read_security(self): - with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: self.hijack_source_execute() cnx.execute('Any U WHERE NOT A todo_by U, A is Affaire') self.assertEqual(self.query[0][1].as_string(), @@ -126,13 +126,13 @@ cnx.commit() def test_insert_security(self): - with self.new_access('anon').repo_cnx() as cnx: + with self.new_access(u'anon').repo_cnx() as cnx: cnx.execute("INSERT Personne X: X nom 'bidule'") self.assertRaises(Unauthorized, cnx.commit) self.assertEqual(cnx.execute('Personne X').rowcount, 1) def test_insert_security_2(self): - with self.new_access('anon').repo_cnx() as cnx: + with self.new_access(u'anon').repo_cnx() as cnx: cnx.execute("INSERT Affaire X") self.assertRaises(Unauthorized, cnx.commit) # anon has no read permission on Affaire entities, so @@ -141,20 +141,20 @@ def test_insert_rql_permission(self): # test user can only add une affaire related to a societe he owns - with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: cnx.execute("INSERT Affaire X: X sujet 'cool'") self.assertRaises(Unauthorized, cnx.commit) # test nothing has actually been inserted with self.admin_access.repo_cnx() as cnx: self.assertEqual(cnx.execute('Affaire X').rowcount, 1) - with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: cnx.execute("INSERT Affaire X: X sujet 'cool'") cnx.execute("INSERT Societe X: X nom 'chouette'") cnx.execute("SET A concerne S WHERE A sujet 'cool', S nom 'chouette'") cnx.commit() def test_update_security_1(self): - with self.new_access('anon').repo_cnx() as cnx: + with self.new_access(u'anon').repo_cnx() as cnx: # local security check cnx.execute( "SET X nom 'bidulechouette' WHERE X is Personne") self.assertRaises(Unauthorized, cnx.commit) @@ -164,7 +164,7 @@ def test_update_security_2(self): with self.temporary_permissions(Personne={'read': ('users', 'managers'), 'add': ('guests', 'users', 'managers')}): - with self.new_access('anon').repo_cnx() as cnx: + with self.new_access(u'anon').repo_cnx() as cnx: self.assertRaises(Unauthorized, cnx.execute, "SET X nom 'bidulechouette' WHERE X is Personne") # test nothing has actually been inserted @@ -172,7 +172,7 @@ self.assertEqual(cnx.execute('Personne X WHERE X nom "bidulechouette"').rowcount, 0) def test_update_security_3(self): - with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: cnx.execute("INSERT Personne X: X nom 'biduuule'") cnx.execute("INSERT Societe X: X nom 'looogilab'") cnx.execute("SET X travaille S WHERE X nom 'biduuule', S nom 'looogilab'") @@ -191,7 +191,7 @@ cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe") cnx.commit() # test user can only update une affaire related to a societe he owns - with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: cnx.execute("SET X sujet 'pascool' WHERE X is Affaire") # this won't actually do anything since the selection query won't return anything cnx.commit() @@ -212,7 +212,7 @@ #self.assertRaises(Unauthorized, # self.o.execute, user, "DELETE CWUser X WHERE X login 'bidule'") # check local security - with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: self.assertRaises(Unauthorized, cnx.execute, "DELETE CWGroup Y WHERE Y name 'staff'") def test_delete_rql_permission(self): @@ -220,7 +220,7 @@ cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe") cnx.commit() # test user can only dele une affaire related to a societe he owns - with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: # this won't actually do anything since the selection query won't return anything cnx.execute("DELETE Affaire X") cnx.commit() @@ -239,7 +239,7 @@ cnx.commit() def test_insert_relation_rql_permission(self): - with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe") # should raise Unauthorized since user don't own S though this won't # actually do anything since the selection query won't return @@ -266,7 +266,7 @@ with self.admin_access.repo_cnx() as cnx: cnx.execute("SET A concerne S WHERE A is Affaire, S is Societe") cnx.commit() - with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: # this won't actually do anything since the selection query won't return anything cnx.execute("DELETE A concerne S") cnx.commit() @@ -277,7 +277,7 @@ {'x': eid}) cnx.execute("SET A concerne S WHERE A sujet 'pascool', S is Societe") cnx.commit() - with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: self.assertRaises(Unauthorized, cnx.execute, "DELETE A concerne S") self.assertRaises(QueryError, cnx.commit) # can't commit anymore cnx.rollback() @@ -290,8 +290,8 @@ def test_user_can_change_its_upassword(self): with self.admin_access.repo_cnx() as cnx: - ueid = self.create_user(cnx, 'user').eid - with self.new_access('user').repo_cnx() as cnx: + ueid = self.create_user(cnx, u'user').eid + with self.new_access(u'user').repo_cnx() as cnx: cnx.execute('SET X upassword %(passwd)s WHERE X eid %(x)s', {'x': ueid, 'passwd': 'newpwd'}) cnx.commit() @@ -299,8 +299,8 @@ def test_user_cant_change_other_upassword(self): with self.admin_access.repo_cnx() as cnx: - ueid = self.create_user(cnx, 'otheruser').eid - with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + ueid = self.create_user(cnx, u'otheruser').eid + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: cnx.execute('SET X upassword %(passwd)s WHERE X eid %(x)s', {'x': ueid, 'passwd': 'newpwd'}) self.assertRaises(Unauthorized, cnx.commit) @@ -309,7 +309,7 @@ def test_read_base(self): with self.temporary_permissions(Personne={'read': ('users', 'managers')}): - with self.new_access('anon').repo_cnx() as cnx: + with self.new_access(u'anon').repo_cnx() as cnx: self.assertRaises(Unauthorized, cnx.execute, 'Personne U where U nom "managers"') @@ -317,7 +317,7 @@ with self.admin_access.repo_cnx() as cnx: eid = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0] cnx.commit() - with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: rset = cnx.execute('Affaire X') self.assertEqual(rset.rows, []) self.assertRaises(Unauthorized, cnx.execute, 'Any X WHERE X eid %(x)s', {'x': eid}) @@ -342,7 +342,7 @@ def test_entity_created_in_transaction(self): affschema = self.schema['Affaire'] with self.temporary_permissions(Affaire={'read': affschema.permissions['add']}): - with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: aff2 = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0] # entity created in transaction are readable *by eid* self.assertTrue(cnx.execute('Any X WHERE X eid %(x)s', {'x':aff2})) @@ -358,7 +358,7 @@ cnx.execute('SET X owned_by U WHERE X eid %(x)s, U login "iaminusersgrouponly"', {'x': card1}) cnx.commit() - with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: aff2 = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0] soc1 = cnx.execute("INSERT Societe X: X nom 'chouette'")[0][0] cnx.execute("SET A concerne S WHERE A eid %(a)s, S eid %(s)s", {'a': aff2, 's': soc1}) @@ -376,7 +376,7 @@ cnx.execute("INSERT Societe X: X nom 'bidule'") cnx.commit() with self.temporary_permissions(Personne={'read': ('managers',)}): - with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: rset = cnx.execute('Any N WHERE N has_text "bidule"') self.assertEqual(len(rset.rows), 1, rset.rows) rset = cnx.execute('Any N WITH N BEING (Any N WHERE N has_text "bidule")') @@ -388,7 +388,7 @@ cnx.execute("INSERT Societe X: X nom 'bidule'") cnx.commit() with self.temporary_permissions(Personne={'read': ('managers',)}): - with self.new_access('anon').repo_cnx() as cnx: + with self.new_access(u'anon').repo_cnx() as cnx: rset = cnx.execute('Any N,U WHERE N has_text "bidule", N owned_by U?') self.assertEqual(len(rset.rows), 1, rset.rows) @@ -396,7 +396,7 @@ with self.admin_access.repo_cnx() as cnx: cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0] cnx.commit() - with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: rset = cnx.execute('Any COUNT(X) WHERE X is Affaire') self.assertEqual(rset.rows, [[0]]) aff2 = cnx.execute("INSERT Affaire X: X sujet 'cool'")[0][0] @@ -424,7 +424,7 @@ "X web 'http://www.debian.org', X test TRUE")[0][0] cnx.execute('SET X test FALSE WHERE X eid %(x)s', {'x': eid}) cnx.commit() - with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: cnx.execute("INSERT Personne X: X nom 'bidule', " "X web 'http://www.debian.org', X test TRUE") self.assertRaises(Unauthorized, cnx.commit) @@ -440,7 +440,7 @@ self.assertRaises(Unauthorized, cnx.commit) cnx.execute('SET X web "http://www.logilab.org" WHERE X eid %(x)s', {'x': eid}) cnx.commit() - with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: cnx.execute('INSERT Frozable F: F name "Foo"') cnx.commit() cnx.execute('SET F name "Bar" WHERE F is Frozable') @@ -464,7 +464,7 @@ note.cw_adapt_to('IWorkflowable').fire_transition('markasdone') cnx.execute('SET X para "truc" WHERE X eid %(x)s', {'x': note.eid}) cnx.commit() - with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: cnx.execute("SET X para 'chouette' WHERE X eid %(x)s", {'x': note.eid}) self.assertRaises(Unauthorized, cnx.commit) note2 = cnx.execute("INSERT Note X: X para 'bidule'").get_entity(0, 0) @@ -496,7 +496,7 @@ login_rdef = self.repo.schema['CWUser'].rdef('login') with self.temporary_permissions((login_rdef, {'read': ('users', 'managers')}), CWUser={'read': ('guests', 'users', 'managers')}): - with self.new_access('anon').repo_cnx() as cnx: + with self.new_access(u'anon').repo_cnx() as cnx: rset = cnx.execute('CWUser X') self.assertTrue(rset) x = rset.get_entity(0, 0) @@ -510,7 +510,7 @@ def test_yams_inheritance_and_security_bug(self): with self.temporary_permissions(Division={'read': ('managers', ERQLExpression('X owned_by U'))}): - with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: querier = cnx.repo.querier rqlst = querier.parse('Any X WHERE X is_instance_of Societe') querier.solutions(cnx, rqlst, {}) @@ -519,7 +519,7 @@ plan.preprocess(rqlst) self.assertEqual( rqlst.as_string(), - '(Any X WHERE X is IN(SubDivision, Societe)) UNION ' + '(Any X WHERE X is IN(Societe, SubDivision)) UNION ' '(Any X WHERE X is Division, EXISTS(X owned_by %(B)s))') @@ -528,7 +528,7 @@ def test_user_can_delete_object_he_created(self): # even if some other user have changed object'state - with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: # due to security test, affaire has to concerne a societe the user owns cnx.execute('INSERT Societe X: X nom "ARCTIA"') cnx.execute('INSERT Affaire X: X ref "ARCT01", X concerne S WHERE S nom "ARCTIA"') @@ -542,7 +542,7 @@ self.assertEqual(len(cnx.execute('TrInfo X WHERE X wf_info_for A, A ref "ARCT01",' 'X owned_by U, U login "admin"')), 1) # TrInfo at the above state change - with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: cnx.execute('DELETE Affaire X WHERE X ref "ARCT01"') cnx.commit() self.assertFalse(cnx.execute('Affaire X')) @@ -550,7 +550,7 @@ def test_users_and_groups_non_readable_by_guests(self): with self.repo.internal_cnx() as cnx: admineid = cnx.execute('CWUser U WHERE U login "admin"').rows[0][0] - with self.new_access('anon').repo_cnx() as cnx: + with self.new_access(u'anon').repo_cnx() as cnx: anon = cnx.user # anonymous user can only read itself rset = cnx.execute('Any L WHERE X owned_by U, U login L') @@ -569,7 +569,7 @@ self.assertRaises(Unauthorized, cnx.commit) def test_in_group_relation(self): - with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: rql = u"DELETE U in_group G WHERE U login 'admin'" self.assertRaises(Unauthorized, cnx.execute, rql) rql = u"SET U in_group G WHERE U login 'admin', G name 'users'" @@ -579,7 +579,7 @@ with self.admin_access.repo_cnx() as cnx: cnx.execute("INSERT Personne X: X nom 'bidule'") cnx.commit() - with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: rql = u"SET X owned_by U WHERE U login 'iaminusersgrouponly', X is Personne" self.assertRaises(Unauthorized, cnx.execute, rql) @@ -589,7 +589,7 @@ beid2 = cnx.execute('INSERT Bookmark B: B path "?vid=index", B title "index", ' 'B bookmarked_by U WHERE U login "anon"')[0][0] cnx.commit() - with self.new_access('anon').repo_cnx() as cnx: + with self.new_access(u'anon').repo_cnx() as cnx: anoneid = cnx.user.eid self.assertEqual(cnx.execute('Any T,P ORDERBY lower(T) WHERE B is Bookmark,B title T,B path P,' 'B bookmarked_by U, U eid %s' % anoneid).rows, @@ -606,7 +606,7 @@ {'x': anoneid, 'b': beid1}) def test_ambigous_ordered(self): - with self.new_access('anon').repo_cnx() as cnx: + with self.new_access(u'anon').repo_cnx() as cnx: names = [t for t, in cnx.execute('Any N ORDERBY lower(N) WHERE X name N')] self.assertEqual(names, sorted(names, key=lambda x: x.lower())) @@ -617,7 +617,7 @@ with self.admin_access.repo_cnx() as cnx: eid = cnx.execute('INSERT Affaire X: X ref "ARCT01"')[0][0] cnx.commit() - with self.new_access('iaminusersgrouponly').repo_cnx() as cnx: + with self.new_access(u'iaminusersgrouponly').repo_cnx() as cnx: # needed to remove rql expr granting update perm to the user affschema = self.schema['Affaire'] with self.temporary_permissions(Affaire={'update': affschema.get_groups('update'), @@ -675,7 +675,7 @@ 'U use_email X WHERE U login "anon"').get_entity(0, 0) cnx.commit() self.assertEqual(len(cnx.execute('Any X WHERE X is EmailAddress')), 2) - with self.new_access('anon').repo_cnx() as cnx: + with self.new_access(u'anon').repo_cnx() as cnx: self.assertEqual(len(cnx.execute('Any X WHERE X is EmailAddress')), 1) if __name__ == '__main__': diff -r e52efb73f9ee -r 729f36a1bcfa server/test/unittest_tools.py --- a/server/test/unittest_tools.py Thu Apr 02 13:54:00 2015 +0200 +++ b/server/test/unittest_tools.py Wed Apr 22 10:08:14 2015 +0200 @@ -23,7 +23,6 @@ class ImportTC(TestCase): def test(self): # the minimal test: module is importable... - import cubicweb.server.server import cubicweb.server.checkintegrity import cubicweb.server.serverctl diff -r e52efb73f9ee -r 729f36a1bcfa server/test/unittest_undo.py --- a/server/test/unittest_undo.py Thu Apr 02 13:54:00 2015 +0200 +++ b/server/test/unittest_undo.py Wed Apr 22 10:08:14 2015 +0200 @@ -48,7 +48,6 @@ def tearDown(self): cubicweb.server.session.Connection = OldConnection - self.restore_connection() super(UndoableTransactionTC, self).tearDown() def check_transaction_deleted(self, cnx, txuuid): @@ -210,10 +209,10 @@ ['CWUser']) # undoing shouldn't be visble in undoable transaction, and the undone # transaction should be removed - txs = self.cnx.undoable_transactions() + txs = cnx.undoable_transactions() self.assertEqual(len(txs), 2) self.assertRaises(NoSuchTransaction, - self.cnx.transaction_info, txuuid) + cnx.transaction_info, txuuid) with self.admin_access.repo_cnx() as cnx: with cnx.ensure_cnx_set: self.check_transaction_deleted(cnx, txuuid) @@ -238,6 +237,8 @@ cnx.commit() p.cw_clear_all_caches() self.assertEqual(p.fiche[0].eid, c2.eid) + # we restored the card + self.assertTrue(cnx.entity_from_eid(c.eid)) def test_undo_deletion_integrity_2(self): with self.admin_access.client_cnx() as cnx: @@ -375,6 +376,17 @@ p.cw_clear_all_caches() self.assertFalse(p.fiche) + def test_undo_inline_rel_delete_ko(self): + with self.admin_access.client_cnx() as cnx: + c = cnx.create_entity('Card', title=u'hop', content=u'hop') + txuuid = cnx.commit() + p = cnx.create_entity('Personne', nom=u'louis', fiche=c) + cnx.commit() + integrityerror = self.repo.sources_by_uri['system'].dbhelper.dbapi_module.IntegrityError + with self.assertRaises(integrityerror): + cnx.undo_transaction(txuuid) + + def test_undo_inline_rel_add_ko(self): """Undo add relation Personne (?) fiche (?) Card diff -r e52efb73f9ee -r 729f36a1bcfa skeleton/__pkginfo__.py.tmpl --- a/skeleton/__pkginfo__.py.tmpl Thu Apr 02 13:54:00 2015 +0200 +++ b/skeleton/__pkginfo__.py.tmpl Wed Apr 22 10:08:14 2015 +0200 @@ -13,7 +13,7 @@ description = '%(shortdesc)s' web = 'http://www.cubicweb.org/project/%%s' %% distname -__depends__ = %(dependencies)s +__depends__ = %(dependencies)s __recommends__ = {} classifiers = [ @@ -29,6 +29,7 @@ THIS_CUBE_DIR = join('share', 'cubicweb', 'cubes', modname) + def listdir(dirpath): return [join(dirpath, fname) for fname in _listdir(dirpath) if fname[0] != '.' and not fname.endswith('.pyc') @@ -40,9 +41,9 @@ [THIS_CUBE_DIR, [fname for fname in glob('*.py') if fname != 'setup.py']], ] # check for possible extended cube layout -for dname in ('entities', 'views', 'sobjects', 'hooks', 'schema', 'data', 'wdoc', 'i18n', 'migration'): +for dname in ('entities', 'views', 'sobjects', 'hooks', 'schema', 'data', + 'wdoc', 'i18n', 'migration'): if isdir(dname): data_files.append([join(THIS_CUBE_DIR, dname), listdir(dname)]) # Note: here, you'll need to add subdirectories if you want # them to be included in the debian package - diff -r e52efb73f9ee -r 729f36a1bcfa skeleton/migration/postcreate.py.tmpl --- a/skeleton/migration/postcreate.py.tmpl Thu Apr 02 13:54:00 2015 +0200 +++ b/skeleton/migration/postcreate.py.tmpl Wed Apr 22 10:08:14 2015 +0200 @@ -11,4 +11,3 @@ # Example of site property change #set_property('ui.site-title', "") - diff -r e52efb73f9ee -r 729f36a1bcfa skeleton/setup.py --- a/skeleton/setup.py Thu Apr 02 13:54:00 2015 +0200 +++ b/skeleton/setup.py Wed Apr 22 10:08:14 2015 +0200 @@ -16,8 +16,8 @@ # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . +# You should have received a copy of the GNU Lesser General Public License +# along with CubicWeb. If not, see . """Generic Setup script, takes package info from __pkginfo__.py file """ __docformat__ = "restructuredtext en" @@ -25,11 +25,11 @@ import os import sys import shutil -from os.path import isdir, exists, join, walk +from os.path import exists, join, walk try: if os.environ.get('NO_SETUPTOOLS'): - raise ImportError() # do as there is no setuptools + raise ImportError() # do as there is no setuptools from setuptools import setup from setuptools.command import install_lib USE_SETUPTOOLS = True @@ -41,7 +41,7 @@ # import required features from __pkginfo__ import modname, version, license, description, web, \ - author, author_email, classifiers + author, author_email, classifiers if exists('README'): long_description = file('README').read() @@ -52,10 +52,10 @@ import __pkginfo__ if USE_SETUPTOOLS: requires = {} - for entry in ("__depends__",): # "__recommends__"): + for entry in ("__depends__",): # "__recommends__"): requires.update(getattr(__pkginfo__, entry, {})) install_requires = [("%s %s" % (d, v and v or "")).strip() - for d, v in requires.iteritems()] + for d, v in requires.iteritems()] else: install_requires = [] @@ -82,6 +82,7 @@ scripts_ = linux_scripts return scripts_ + def export(from_dir, to_dir, blacklist=BASE_BLACKLIST, ignore_ext=IGNORED_EXTENSIONS, @@ -150,13 +151,15 @@ old_install_data.run(self) self.install_dir = _old_install_dir try: - import setuptools.command.easy_install # only if easy_install available + # only if easy_install available + import setuptools.command.easy_install # noqa # monkey patch: Crack SandboxViolation verification from setuptools.sandbox import DirectorySandbox as DS old_ok = DS._ok + def _ok(self, path): """Return True if ``path`` can be written during installation.""" - out = old_ok(self, path) # here for side effect from setuptools + out = old_ok(self, path) # here for side effect from setuptools realpath = os.path.normcase(os.path.realpath(path)) allowed_path = os.path.normcase(sys.prefix) if realpath.startswith(allowed_path): @@ -166,6 +169,7 @@ except ImportError: pass + def install(**kwargs): """setup entry point""" if USE_SETUPTOOLS: @@ -181,21 +185,22 @@ kwargs['zip_safe'] = False cmdclass['install_data'] = MyInstallData - return setup(name = distname, - version = version, - license = license, - description = description, - long_description = long_description, - author = author, - author_email = author_email, - url = web, - scripts = ensure_scripts(scripts), - data_files = data_files, - ext_modules = ext_modules, - cmdclass = cmdclass, - classifiers = classifiers, + return setup(name=distname, + version=version, + license=license, + description=description, + long_description=long_description, + author=author, + author_email=author_email, + url=web, + scripts=ensure_scripts(scripts), + data_files=data_files, + ext_modules=ext_modules, + cmdclass=cmdclass, + classifiers=classifiers, **kwargs ) -if __name__ == '__main__' : + +if __name__ == '__main__': install() diff -r e52efb73f9ee -r 729f36a1bcfa skeleton/test/pytestconf.py --- a/skeleton/test/pytestconf.py Thu Apr 02 13:54:00 2015 +0200 +++ b/skeleton/test/pytestconf.py Wed Apr 22 10:08:14 2015 +0200 @@ -13,8 +13,8 @@ # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . +# You should have received a copy of the GNU Lesser General Public License +# along with CubicWeb. If not, see . """ """ @@ -23,6 +23,7 @@ from logilab.common.pytest import PyTester + def getlogin(): """avoid usinng os.getlogin() because of strange tty / stdin problems (man 3 getlogin) diff -r e52efb73f9ee -r 729f36a1bcfa skeleton/test/realdb_test_CUBENAME.py --- a/skeleton/test/realdb_test_CUBENAME.py Thu Apr 02 13:54:00 2015 +0200 +++ b/skeleton/test/realdb_test_CUBENAME.py Wed Apr 22 10:08:14 2015 +0200 @@ -13,14 +13,15 @@ # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # -# You should have received a copy of the GNU Lesser General Public License along -# with CubicWeb. If not, see . +# You should have received a copy of the GNU Lesser General Public License +# along with CubicWeb. If not, see . """ """ from cubicweb.devtools.testlib import CubicWebTC from cubicweb.devtools.realdbtest import buildconfig, loadconfig + def setUpModule(options): if options.source: configcls = loadconfig(options.source) @@ -28,13 +29,13 @@ raise Exception('either or options are required') else: configcls = buildconfig(options.dbuser, options.dbpassword, - options.dbname, options.euser, - options.epassword) + options.dbname, + options.euser, options.epassword) RealDatabaseTC.configcls = configcls class RealDatabaseTC(CubicWebTC): - configcls = None # set by setUpModule() + configcls = None # set by setUpModule() def test_all_primaries(self): for rset in self.iter_individual_rsets(limit=50): diff -r e52efb73f9ee -r 729f36a1bcfa skeleton/test/test_CUBENAME.py.tmpl --- a/skeleton/test/test_CUBENAME.py.tmpl Thu Apr 02 13:54:00 2015 +0200 +++ b/skeleton/test/test_CUBENAME.py.tmpl Wed Apr 22 10:08:14 2015 +0200 @@ -27,6 +27,7 @@ from cubicweb.devtools import testlib + class DefaultTC(testlib.CubicWebTC): def test_something(self): self.skipTest('this cube has no test') diff -r e52efb73f9ee -r 729f36a1bcfa sobjects/notification.py --- a/sobjects/notification.py Thu Apr 02 13:54:00 2015 +0200 +++ b/sobjects/notification.py Wed Apr 22 10:08:14 2015 +0200 @@ -270,7 +270,7 @@ """ __abstract__ = True __regid__ = 'notif_entity_updated' - msgid_timestamp = False + msgid_timestamp = True message = _('updated') no_detailed_change_attrs = () content = """ diff -r e52efb73f9ee -r 729f36a1bcfa sobjects/test/unittest_cwxmlparser.py --- a/sobjects/test/unittest_cwxmlparser.py Thu Apr 02 13:54:00 2015 +0200 +++ b/sobjects/test/unittest_cwxmlparser.py Wed Apr 22 10:08:14 2015 +0200 @@ -17,6 +17,7 @@ # with CubicWeb. If not, see . from datetime import datetime +from urlparse import urlsplit, parse_qsl from cubicweb.devtools.testlib import CubicWebTC from cubicweb.sobjects.cwxmlparser import CWEntityXMLParser @@ -133,6 +134,16 @@ """ test_db_id = 'xmlparser' + def assertURLEquiv(self, first, second): + # ignore ordering differences in query params + parsed_first = urlsplit(first) + parsed_second = urlsplit(second) + self.assertEqual(parsed_first.scheme, parsed_second.scheme) + self.assertEqual(parsed_first.netloc, parsed_second.netloc) + self.assertEqual(parsed_first.path, parsed_second.path) + self.assertEqual(parsed_first.fragment, parsed_second.fragment) + self.assertCountEqual(parse_qsl(parsed_first.query), parse_qsl(parsed_second.query)) + @classmethod def pre_setup_database(cls, cnx, config): myfeed = cnx.create_entity('CWSource', name=u'myfeed', type=u'datafeed', @@ -161,16 +172,16 @@ dfsource = self.repo.sources_by_uri['myfeed'] with self.admin_access.repo_cnx() as cnx: parser = dfsource._get_parser(cnx) - self.assertEqual(parser.complete_url('http://www.cubicweb.org/CWUser'), - 'http://www.cubicweb.org/CWUser?relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject') - self.assertEqual(parser.complete_url('http://www.cubicweb.org/cwuser'), - 'http://www.cubicweb.org/cwuser?relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject') - self.assertEqual(parser.complete_url('http://www.cubicweb.org/cwuser?vid=rdf&relation=hop'), - 'http://www.cubicweb.org/cwuser?relation=hop&relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject&vid=rdf') - self.assertEqual(parser.complete_url('http://www.cubicweb.org/?rql=cwuser&vid=rdf&relation=hop'), - 'http://www.cubicweb.org/?rql=cwuser&relation=hop&vid=rdf') - self.assertEqual(parser.complete_url('http://www.cubicweb.org/?rql=cwuser&relation=hop'), - 'http://www.cubicweb.org/?rql=cwuser&relation=hop') + self.assertURLEquiv(parser.complete_url('http://www.cubicweb.org/CWUser'), + 'http://www.cubicweb.org/CWUser?relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject') + self.assertURLEquiv(parser.complete_url('http://www.cubicweb.org/cwuser'), + 'http://www.cubicweb.org/cwuser?relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject') + self.assertURLEquiv(parser.complete_url('http://www.cubicweb.org/cwuser?vid=rdf&relation=hop'), + 'http://www.cubicweb.org/cwuser?relation=hop&relation=tags-object&relation=in_group-subject&relation=in_state-subject&relation=use_email-subject&vid=rdf') + self.assertURLEquiv(parser.complete_url('http://www.cubicweb.org/?rql=cwuser&vid=rdf&relation=hop'), + 'http://www.cubicweb.org/?rql=cwuser&relation=hop&vid=rdf') + self.assertURLEquiv(parser.complete_url('http://www.cubicweb.org/?rql=cwuser&relation=hop'), + 'http://www.cubicweb.org/?rql=cwuser&relation=hop') def test_actions(self): diff -r e52efb73f9ee -r 729f36a1bcfa test/unittest_cwconfig.py --- a/test/unittest_cwconfig.py Thu Apr 02 13:54:00 2015 +0200 +++ b/test/unittest_cwconfig.py Wed Apr 22 10:08:14 2015 +0200 @@ -104,11 +104,14 @@ def test_appobjects_path(self): self.config.__class__.CUBES_PATH = [CUSTOM_CUBES_DIR] self.config.adjust_sys_path() - self.assertEqual([unabsolutize(p) for p in self.config.appobjects_path()], - ['entities', 'web/views', 'sobjects', 'hooks', - 'file/entities', 'file/views.py', 'file/hooks', - 'email/entities.py', 'email/views', 'email/hooks.py', - 'test/data/entities.py', 'test/data/views.py']) + path = [unabsolutize(p) for p in self.config.appobjects_path()] + self.assertEqual(path[0], 'entities') + self.assertCountEqual(path[1:4], ['web/views', 'sobjects', 'hooks']) + self.assertEqual(path[4], 'file/entities') + self.assertCountEqual(path[5:7], ['file/views.py', 'file/hooks']) + self.assertEqual(path[7], 'email/entities.py') + self.assertCountEqual(path[8:10], ['email/views', 'email/hooks.py']) + self.assertEqual(path[10:], ['test/data/entities.py', 'test/data/views.py']) def test_cubes_path(self): # make sure we don't import the email cube, but the stdlib email package diff -r e52efb73f9ee -r 729f36a1bcfa test/unittest_schema.py --- a/test/unittest_schema.py Thu Apr 02 13:54:00 2015 +0200 +++ b/test/unittest_schema.py Wed Apr 22 10:08:14 2015 +0200 @@ -360,8 +360,8 @@ schema['produces_and_buys'].rdefs.keys()) self.assertEqual([('Person','Service')], schema['produces_and_buys2'].rdefs.keys()) - self.assertEqual([('Company', 'Service'), ('Person', 'Service')], - schema['reproduce'].rdefs.keys()) + self.assertCountEqual([('Company', 'Service'), ('Person', 'Service')], + schema['reproduce'].rdefs.keys()) # check relation definitions are marked infered rdef = schema['produces_and_buys'].rdefs[('Person','Service')] self.assertTrue(rdef.infered) diff -r e52efb73f9ee -r 729f36a1bcfa test/unittest_utils.py --- a/test/unittest_utils.py Thu Apr 02 13:54:00 2015 +0200 +++ b/test/unittest_utils.py Wed Apr 22 10:08:14 2015 +0200 @@ -58,12 +58,6 @@ parse_repo_uri('myapp')) self.assertEqual(('inmemory', None, 'myapp'), parse_repo_uri('inmemory://myapp')) - self.assertEqual(('pyro', 'pyro-ns-host:pyro-ns-port', '/myapp'), - parse_repo_uri('pyro://pyro-ns-host:pyro-ns-port/myapp')) - self.assertEqual(('pyroloc', 'host:port', '/appkey'), - parse_repo_uri('pyroloc://host:port/appkey')) - self.assertEqual(('zmqpickle-tcp', '127.0.0.1:666', ''), - parse_repo_uri('zmqpickle-tcp://127.0.0.1:666')) with self.assertRaises(NotImplementedError): parse_repo_uri('foo://bar') diff -r e52efb73f9ee -r 729f36a1bcfa transaction.py --- a/transaction.py Thu Apr 02 13:54:00 2015 +0200 +++ b/transaction.py Wed Apr 22 10:08:14 2015 +0200 @@ -1,4 +1,4 @@ -# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved. +# copyright 2003-2014 LOGILAB S.A. (Paris, FRANCE), all rights reserved. # contact http://www.logilab.fr/ -- mailto:contact@logilab.fr # # This file is part of CubicWeb. @@ -15,13 +15,7 @@ # # You should have received a copy of the GNU Lesser General Public License along # with CubicWeb. If not, see . -"""undoable transaction objects. - - -This module is in the cubicweb package and not in cubicweb.server because those -objects should be accessible to client through pyro, where the cubicweb.server -package may not be installed. -""" +""" undoable transaction objects. """ __docformat__ = "restructuredtext en" _ = unicode diff -r e52efb73f9ee -r 729f36a1bcfa utils.py --- a/utils.py Thu Apr 02 13:54:00 2015 +0200 +++ b/utils.py Wed Apr 22 10:08:14 2015 +0200 @@ -21,7 +21,6 @@ __docformat__ = "restructuredtext en" -import sys import decimal import datetime import random @@ -553,8 +552,12 @@ """ def _dict2js(d, predictable=False): + if predictable: + it = sorted(d.iteritems()) + else: + it = d.iteritems() res = [key + ': ' + js_dumps(val, predictable) - for key, val in d.iteritems()] + for key, val in it] return '{%s}' % ', '.join(res) def _list2js(l, predictable=False): @@ -578,7 +581,7 @@ return _list2js(something, predictable) if isinstance(something, JSString): return something - return json_dumps(something) + return json_dumps(something, sort_keys=predictable) PERCENT_IN_URLQUOTE_RE = re.compile(r'%(?=[0-9a-fA-F]{2})') def js_href(javascript_code): @@ -608,8 +611,6 @@ """ transform a command line uri into a (protocol, hostport, appid), e.g: -> 'inmemory', None, '' inmemory:// -> 'inmemory', None, '' - pyro://[host][:port] -> 'pyro', 'host:port', None - zmqpickle://[host][:port] -> 'zmqpickle', 'host:port', None """ parseduri = urlparse(uri) scheme = parseduri.scheme @@ -617,8 +618,6 @@ return ('inmemory', None, parseduri.path) if scheme == 'inmemory': return (scheme, None, parseduri.netloc) - if scheme in ('pyro', 'pyroloc') or scheme.startswith('zmqpickle-'): - return (scheme, parseduri.netloc, parseduri.path) raise NotImplementedError('URI protocol not implemented for `%s`' % uri) diff -r e52efb73f9ee -r 729f36a1bcfa view.py --- a/view.py Thu Apr 02 13:54:00 2015 +0200 +++ b/view.py Wed Apr 22 10:08:14 2015 +0200 @@ -20,7 +20,7 @@ __docformat__ = "restructuredtext en" _ = unicode -from cStringIO import StringIO +from io import BytesIO from warnings import warn from functools import partial @@ -101,7 +101,7 @@ return if w is None: if self.binary: - self._stream = stream = StringIO() + self._stream = stream = BytesIO() else: self._stream = stream = UStringIO() w = stream.write @@ -471,7 +471,7 @@ return if w is None: if self.binary: - self._stream = stream = StringIO() + self._stream = stream = BytesIO() else: self._stream = stream = HTMLStream(self._cw) w = stream.write diff -r e52efb73f9ee -r 729f36a1bcfa web/data/cubicweb.ajax.js --- a/web/data/cubicweb.ajax.js Thu Apr 02 13:54:00 2015 +0200 +++ b/web/data/cubicweb.ajax.js Wed Apr 22 10:08:14 2015 +0200 @@ -518,59 +518,6 @@ }); } -userCallback = cw.utils.deprecatedFunction( - '[3.19] use a plain ajaxfunc instead of user callbacks', - function userCallback(cbname) { - setProgressCursor(); - var d = loadRemote(AJAX_BASE_URL, ajaxFuncArgs('user_callback', null, cbname)); - d.addCallback(resetCursor); - d.addErrback(resetCursor); - d.addErrback(remoteCallFailed); - return d; -}); - -userCallbackThenUpdateUI = cw.utils.deprecatedFunction( - '[3.19] use a plain ajaxfunc instead of user callbacks', - function userCallbackThenUpdateUI(cbname, compid, rql, msg, registry, nodeid) { - var d = userCallback(cbname); - d.addCallback(function() { - $('#' + nodeid).loadxhtml(AJAX_BASE_URL, ajaxFuncArgs('render', {'rql': rql}, - registry, compid), null, 'swap'); - if (msg) { - updateMessage(msg); - } - }); -}); - -userCallbackThenReloadPage = cw.utils.deprecatedFunction( - '[3.19] use a plain ajaxfunc instead of user callbacks', - function userCallbackThenReloadPage(cbname, msg) { - var d = userCallback(cbname); - d.addCallback(function() { - window.location.reload(); - if (msg) { - updateMessage(msg); - } - }); -}); - -/** - * .. function:: unregisterUserCallback(cbname) - * - * unregisters the python function registered on the server's side - * while the page was generated. - */ -unregisterUserCallback = cw.utils.deprecatedFunction( - '[3.19] use a plain ajaxfunc instead of user callbacks', - function unregisterUserCallback(cbname) { - setProgressCursor(); - var d = loadRemote(AJAX_BASE_URL, ajaxFuncArgs('unregister_user_callback', - null, cbname)); - d.addCallback(resetCursor); - d.addErrback(resetCursor); - d.addErrback(remoteCallFailed); -}); - //============= XXX move those functions? ====================================// function openHash() { diff -r e52efb73f9ee -r 729f36a1bcfa web/data/cubicweb.timeline-bundle.js --- a/web/data/cubicweb.timeline-bundle.js Thu Apr 02 13:54:00 2015 +0200 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,10129 +0,0 @@ -/** - * This file contains timeline utilities - * :organization: Logilab - */ - -var SimileAjax_urlPrefix = BASE_URL + 'data/'; -var Timeline_urlPrefix = BASE_URL + 'data/'; - -/* - * Simile Ajax API - * - * Include this file in your HTML file as follows:: - * - * - * - * - */ - -if (typeof SimileAjax == "undefined") { - var SimileAjax = { - loaded: false, - loadingScriptsCount: 0, - error: null, - params: { bundle:"true" } - }; - - SimileAjax.Platform = new Object(); - /* - HACK: We need these 2 things here because we cannot simply append - a "); - return; - } catch (e) { - // fall through - } - } - - var script = doc.createElement("script"); - if (onerror) { - try { script.innerHTML = onerror; } catch(e) {} - script.setAttribute("onerror", onerror); - } - if (charset) { - script.setAttribute("charset", charset); - } - script.type = "text/javascript"; - script.language = "JavaScript"; - script.src = url; - return getHead(doc).appendChild(script); - }; - SimileAjax.includeJavascriptFiles = function(doc, urlPrefix, filenames) { - for (var i = 0; i < filenames.length; i++) { - SimileAjax.includeJavascriptFile(doc, urlPrefix + filenames[i]); - } - SimileAjax.loadingScriptsCount += filenames.length; - // XXX adim SimileAjax.includeJavascriptFile(doc, SimileAjax.urlPrefix + "scripts/signal.js?" + filenames.length); - }; - SimileAjax.includeCssFile = function(doc, url) { - if (doc.body == null) { - try { - doc.write(""); - return; - } catch (e) { - // fall through - } - } - - var link = doc.createElement("link"); - link.setAttribute("rel", "stylesheet"); - link.setAttribute("type", "text/css"); - link.setAttribute("href", url); - getHead(doc).appendChild(link); - }; - SimileAjax.includeCssFiles = function(doc, urlPrefix, filenames) { - for (var i = 0; i < filenames.length; i++) { - SimileAjax.includeCssFile(doc, urlPrefix + filenames[i]); - } - }; - - /** - * Append into urls each string in suffixes after prefixing it with urlPrefix. - * @param {Array} urls - * @param {String} urlPrefix - * @param {Array} suffixes - */ - SimileAjax.prefixURLs = function(urls, urlPrefix, suffixes) { - for (var i = 0; i < suffixes.length; i++) { - urls.push(urlPrefix + suffixes[i]); - } - }; - - /** - * Parse out the query parameters from a URL - * @param {String} url the url to parse, or location.href if undefined - * @param {Object} to optional object to extend with the parameters - * @param {Object} types optional object mapping keys to value types - * (String, Number, Boolean or Array, String by default) - * @return a key/value Object whose keys are the query parameter names - * @type Object - */ - SimileAjax.parseURLParameters = function(url, to, types) { - to = to || {}; - types = types || {}; - - if (typeof url == "undefined") { - url = location.href; - } - var q = url.indexOf("?"); - if (q < 0) { - return to; - } - url = (url+"#").slice(q+1, url.indexOf("#")); // toss the URL fragment - - var params = url.split("&"), param, parsed = {}; - var decode = window.decodeURIComponent || unescape; - for (var i = 0; param = params[i]; i++) { - var eq = param.indexOf("="); - var name = decode(param.slice(0,eq)); - var old = parsed[name]; - if (typeof old == "undefined") { - old = []; - } else if (!(old instanceof Array)) { - old = [old]; - } - parsed[name] = old.concat(decode(param.slice(eq+1))); - } - for (var i in parsed) { - if (!parsed.hasOwnProperty(i)) continue; - var type = types[i] || String; - var data = parsed[i]; - if (!(data instanceof Array)) { - data = [data]; - } - if (type === Boolean && data[0] == "false") { - to[i] = false; // because Boolean("false") === true - } else { - to[i] = type.apply(this, data); - } - } - return to; - }; - - (function() { - var javascriptFiles = [ - "jquery-1.2.6.js", - "platform.js", - "debug.js", - "xmlhttp.js", - "json.js", - "dom.js", - "graphics.js", - "date-time.js", - "string.js", - "html.js", - "data-structure.js", - "units.js", - - "ajax.js", - "history.js", - "window-manager.js" - ]; - var cssFiles = [ - "graphics.css" - ]; - - if (typeof SimileAjax_urlPrefix == "string") { - SimileAjax.urlPrefix = SimileAjax_urlPrefix; - } else { - var url = SimileAjax.findScript(document, "simile-ajax-api.js"); - if (url == null) { - SimileAjax.error = new Error("Failed to derive URL prefix for Simile Ajax API code files"); - return; - } - - SimileAjax.urlPrefix = url.substr(0, url.indexOf("simile-ajax-api.js")); - } - - SimileAjax.parseURLParameters(url, SimileAjax.params, {bundle:Boolean}); -// if (SimileAjax.params.bundle) { -// SimileAjax.includeJavascriptFiles(document, SimileAjax.urlPrefix, [ "simile-ajax-bundle.js" ]); -// } else { -// SimileAjax.includeJavascriptFiles(document, SimileAjax.urlPrefix + "scripts/", javascriptFiles); -// } -// SimileAjax.includeCssFiles(document, SimileAjax.urlPrefix + "styles/", cssFiles); - - SimileAjax.loaded = true; - })(); -} -/* - * Platform Utility Functions and Constants - * - */ - -/* This must be called after our jQuery has been loaded - but before control returns to user-code. -*/ -SimileAjax.jQuery = jQuery; -// SimileAjax.jQuery = jQuery.noConflict(true); -if (typeof window["$"] == "undefined") { - window.$ = SimileAjax.jQuery; -} - -SimileAjax.Platform.os = { - isMac: false, - isWin: false, - isWin32: false, - isUnix: false -}; -SimileAjax.Platform.browser = { - isIE: false, - isNetscape: false, - isMozilla: false, - isFirefox: false, - isOpera: false, - isSafari: false, - - majorVersion: 0, - minorVersion: 0 -}; - -(function() { - var an = navigator.appName.toLowerCase(); - var ua = navigator.userAgent.toLowerCase(); - - /* - * Operating system - */ - SimileAjax.Platform.os.isMac = (ua.indexOf('mac') != -1); - SimileAjax.Platform.os.isWin = (ua.indexOf('win') != -1); - SimileAjax.Platform.os.isWin32 = SimileAjax.Platform.isWin && ( - ua.indexOf('95') != -1 || - ua.indexOf('98') != -1 || - ua.indexOf('nt') != -1 || - ua.indexOf('win32') != -1 || - ua.indexOf('32bit') != -1 - ); - SimileAjax.Platform.os.isUnix = (ua.indexOf('x11') != -1); - - /* - * Browser - */ - SimileAjax.Platform.browser.isIE = (an.indexOf("microsoft") != -1); - SimileAjax.Platform.browser.isNetscape = (an.indexOf("netscape") != -1); - SimileAjax.Platform.browser.isMozilla = (ua.indexOf("mozilla") != -1); - SimileAjax.Platform.browser.isFirefox = (ua.indexOf("firefox") != -1); - SimileAjax.Platform.browser.isOpera = (an.indexOf("opera") != -1); - SimileAjax.Platform.browser.isSafari = (an.indexOf("safari") != -1); - - var parseVersionString = function(s) { - var a = s.split("."); - SimileAjax.Platform.browser.majorVersion = parseInt(a[0]); - SimileAjax.Platform.browser.minorVersion = parseInt(a[1]); - }; - var indexOf = function(s, sub, start) { - var i = s.indexOf(sub, start); - return i >= 0 ? i : s.length; - }; - - if (SimileAjax.Platform.browser.isMozilla) { - var offset = ua.indexOf("mozilla/"); - if (offset >= 0) { - parseVersionString(ua.substring(offset + 8, indexOf(ua, " ", offset))); - } - } - if (SimileAjax.Platform.browser.isIE) { - var offset = ua.indexOf("msie "); - if (offset >= 0) { - parseVersionString(ua.substring(offset + 5, indexOf(ua, ";", offset))); - } - } - if (SimileAjax.Platform.browser.isNetscape) { - var offset = ua.indexOf("rv:"); - if (offset >= 0) { - parseVersionString(ua.substring(offset + 3, indexOf(ua, ")", offset))); - } - } - if (SimileAjax.Platform.browser.isFirefox) { - var offset = ua.indexOf("firefox/"); - if (offset >= 0) { - parseVersionString(ua.substring(offset + 8, indexOf(ua, " ", offset))); - } - } - - if (!("localeCompare" in String.prototype)) { - String.prototype.localeCompare = function (s) { - if (this < s) return -1; - else if (this > s) return 1; - else return 0; - }; - } -})(); - -SimileAjax.Platform.getDefaultLocale = function() { - return SimileAjax.Platform.clientLocale; -}; -/* - * Debug Utility Functions - * - */ - -SimileAjax.Debug = { - silent: false -}; - -SimileAjax.Debug.log = function(msg) { - var f; - if ("console" in window && "log" in window.console) { // FireBug installed - f = function(msg2) { - console.log(msg2); - } - } else { - f = function(msg2) { - if (!SimileAjax.Debug.silent) { - alert(msg2); - } - } - } - SimileAjax.Debug.log = f; - f(msg); -}; - -SimileAjax.Debug.warn = function(msg) { - var f; - if ("console" in window && "warn" in window.console) { // FireBug installed - f = function(msg2) { - console.warn(msg2); - } - } else { - f = function(msg2) { - if (!SimileAjax.Debug.silent) { - alert(msg2); - } - } - } - SimileAjax.Debug.warn = f; - f(msg); -}; - -SimileAjax.Debug.exception = function(e, msg) { - var f, params = SimileAjax.parseURLParameters(); - if (params.errors == "throw" || SimileAjax.params.errors == "throw") { - f = function(e2, msg2) { - throw(e2); // do not hide from browser's native debugging features - }; - } else if ("console" in window && "error" in window.console) { // FireBug installed - f = function(e2, msg2) { - if (msg2 != null) { - console.error(msg2 + " %o", e2); - } else { - console.error(e2); - } - throw(e2); // do not hide from browser's native debugging features - }; - } else { - f = function(e2, msg2) { - if (!SimileAjax.Debug.silent) { - alert("Caught exception: " + msg2 + "\n\nDetails: " + ("description" in e2 ? e2.description : e2)); - } - throw(e2); // do not hide from browser's native debugging features - }; - } - SimileAjax.Debug.exception = f; - f(e, msg); -}; - -SimileAjax.Debug.objectToString = function(o) { - return SimileAjax.Debug._objectToString(o, ""); -}; - -SimileAjax.Debug._objectToString = function(o, indent) { - var indent2 = indent + " "; - if (typeof o == "object") { - var s = "{"; - for (n in o) { - s += indent2 + n + ": " + SimileAjax.Debug._objectToString(o[n], indent2) + "\n"; - } - s += indent + "}"; - return s; - } else if (typeof o == "array") { - var s = "["; - for (var n = 0; n < o.length; n++) { - s += SimileAjax.Debug._objectToString(o[n], indent2) + "\n"; - } - s += indent + "]"; - return s; - } else { - return o; - } -}; -/** - * @fileOverview XmlHttp utility functions - * @name SimileAjax.XmlHttp - */ - -SimileAjax.XmlHttp = new Object(); - -/** - * Callback for XMLHttp onRequestStateChange. - */ -SimileAjax.XmlHttp._onReadyStateChange = function(xmlhttp, fError, fDone) { - switch (xmlhttp.readyState) { - // 1: Request not yet made - // 2: Contact established with server but nothing downloaded yet - // 3: Called multiple while downloading in progress - - // Download complete - case 4: - try { - if (xmlhttp.status == 0 // file:// urls, works on Firefox - || xmlhttp.status == 200 // http:// urls - ) { - if (fDone) { - fDone(xmlhttp); - } - } else { - if (fError) { - fError( - xmlhttp.statusText, - xmlhttp.status, - xmlhttp - ); - } - } - } catch (e) { - SimileAjax.Debug.exception("XmlHttp: Error handling onReadyStateChange", e); - } - break; - } -}; - -/** - * Creates an XMLHttpRequest object. On the first run, this - * function creates a platform-specific function for - * instantiating an XMLHttpRequest object and then replaces - * itself with that function. - */ -SimileAjax.XmlHttp._createRequest = function() { - if (SimileAjax.Platform.browser.isIE) { - var programIDs = [ - "Msxml2.XMLHTTP", - "Microsoft.XMLHTTP", - "Msxml2.XMLHTTP.4.0" - ]; - for (var i = 0; i < programIDs.length; i++) { - try { - var programID = programIDs[i]; - var f = function() { - return new ActiveXObject(programID); - }; - var o = f(); - - // We are replacing the SimileAjax._createXmlHttpRequest - // function with this inner function as we've - // found out that it works. This is so that we - // don't have to do all the testing over again - // on subsequent calls. - SimileAjax.XmlHttp._createRequest = f; - - return o; - } catch (e) { - // silent - } - } - // fall through to try new XMLHttpRequest(); - } - - try { - var f = function() { - return new XMLHttpRequest(); - }; - var o = f(); - - // We are replacing the SimileAjax._createXmlHttpRequest - // function with this inner function as we've - // found out that it works. This is so that we - // don't have to do all the testing over again - // on subsequent calls. - SimileAjax.XmlHttp._createRequest = f; - - return o; - } catch (e) { - throw new Error("Failed to create an XMLHttpRequest object"); - } -}; - -/** - * Performs an asynchronous HTTP GET. - * - * @param {Function} fError a function of the form - function(statusText, statusCode, xmlhttp) - * @param {Function} fDone a function of the form function(xmlhttp) - */ -SimileAjax.XmlHttp.get = function(url, fError, fDone) { - var xmlhttp = SimileAjax.XmlHttp._createRequest(); - - xmlhttp.open("GET", url, true); - xmlhttp.onreadystatechange = function() { - SimileAjax.XmlHttp._onReadyStateChange(xmlhttp, fError, fDone); - }; - xmlhttp.send(null); -}; - -/** - * Performs an asynchronous HTTP POST. - * - * @param {Function} fError a function of the form - function(statusText, statusCode, xmlhttp) - * @param {Function} fDone a function of the form function(xmlhttp) - */ -SimileAjax.XmlHttp.post = function(url, body, fError, fDone) { - var xmlhttp = SimileAjax.XmlHttp._createRequest(); - - xmlhttp.open("POST", url, true); - xmlhttp.onreadystatechange = function() { - SimileAjax.XmlHttp._onReadyStateChange(xmlhttp, fError, fDone); - }; - xmlhttp.send(body); -}; - -SimileAjax.XmlHttp._forceXML = function(xmlhttp) { - try { - xmlhttp.overrideMimeType("text/xml"); - } catch (e) { - xmlhttp.setrequestheader("Content-Type", "text/xml"); - } -};/* - * Copied directly from http://www.json.org/json.js. - */ - -/* - json.js - 2006-04-28 - - This file adds these methods to JavaScript: - - object.toJSONString() - - This method produces a JSON text from an object. The - object must not contain any cyclical references. - - array.toJSONString() - - This method produces a JSON text from an array. The - array must not contain any cyclical references. - - string.parseJSON() - - This method parses a JSON text to produce an object or - array. It will return false if there is an error. -*/ - -SimileAjax.JSON = new Object(); - -(function () { - var m = { - '\b': '\\b', - '\t': '\\t', - '\n': '\\n', - '\f': '\\f', - '\r': '\\r', - '"' : '\\"', - '\\': '\\\\' - }; - var s = { - array: function (x) { - var a = ['['], b, f, i, l = x.length, v; - for (i = 0; i < l; i += 1) { - v = x[i]; - f = s[typeof v]; - if (f) { - v = f(v); - if (typeof v == 'string') { - if (b) { - a[a.length] = ','; - } - a[a.length] = v; - b = true; - } - } - } - a[a.length] = ']'; - return a.join(''); - }, - 'boolean': function (x) { - return String(x); - }, - 'null': function (x) { - return "null"; - }, - number: function (x) { - return isFinite(x) ? String(x) : 'null'; - }, - object: function (x) { - if (x) { - if (x instanceof Array) { - return s.array(x); - } - var a = ['{'], b, f, i, v; - for (i in x) { - v = x[i]; - f = s[typeof v]; - if (f) { - v = f(v); - if (typeof v == 'string') { - if (b) { - a[a.length] = ','; - } - a.push(s.string(i), ':', v); - b = true; - } - } - } - a[a.length] = '}'; - return a.join(''); - } - return 'null'; - }, - string: function (x) { - if (/["\\\x00-\x1f]/.test(x)) { - x = x.replace(/([\x00-\x1f\\"])/g, function(a, b) { - var c = m[b]; - if (c) { - return c; - } - c = b.charCodeAt(); - return '\\u00' + - Math.floor(c / 16).toString(16) + - (c % 16).toString(16); - }); - } - return '"' + x + '"'; - } - }; - - SimileAjax.JSON.toJSONString = function(o) { - if (o instanceof Object) { - return s.object(o); - } else if (o instanceof Array) { - return s.array(o); - } else { - return o.toString(); - } - }; - - SimileAjax.JSON.parseJSON = function () { - try { - return !(/[^,:{}\[\]0-9.\-+Eaeflnr-u \n\r\t]/.test( - this.replace(/"(\\.|[^"\\])*"/g, ''))) && - eval('(' + this + ')'); - } catch (e) { - return false; - } - }; -})(); -/* - * DOM Utility Functions - * - */ - -SimileAjax.DOM = new Object(); - -SimileAjax.DOM.registerEventWithObject = function(elmt, eventName, obj, handlerName) { - SimileAjax.DOM.registerEvent(elmt, eventName, function(elmt2, evt, target) { - return obj[handlerName].call(obj, elmt2, evt, target); - }); -}; - -SimileAjax.DOM.registerEvent = function(elmt, eventName, handler) { - var handler2 = function(evt) { - evt = (evt) ? evt : ((event) ? event : null); - if (evt) { - var target = (evt.target) ? - evt.target : ((evt.srcElement) ? evt.srcElement : null); - if (target) { - target = (target.nodeType == 1 || target.nodeType == 9) ? - target : target.parentNode; - } - - return handler(elmt, evt, target); - } - return true; - } - - if (SimileAjax.Platform.browser.isIE) { - elmt.attachEvent("on" + eventName, handler2); - } else { - elmt.addEventListener(eventName, handler2, false); - } -}; - -SimileAjax.DOM.getPageCoordinates = function(elmt) { - var left = 0; - var top = 0; - - if (elmt.nodeType != 1) { - elmt = elmt.parentNode; - } - - var elmt2 = elmt; - while (elmt2 != null) { - left += elmt2.offsetLeft; - top += elmt2.offsetTop; - elmt2 = elmt2.offsetParent; - } - - var body = document.body; - while (elmt != null && elmt != body) { - if ("scrollLeft" in elmt) { - left -= elmt.scrollLeft; - top -= elmt.scrollTop; - } - elmt = elmt.parentNode; - } - - return { left: left, top: top }; -}; - -SimileAjax.DOM.getSize = function(elmt) { - var w = this.getStyle(elmt,"width"); - var h = this.getStyle(elmt,"height"); - if (w.indexOf("px") > -1) w = w.replace("px",""); - if (h.indexOf("px") > -1) h = h.replace("px",""); - return { - w: w, - h: h - } -} - -SimileAjax.DOM.getStyle = function(elmt, styleProp) { - if (elmt.currentStyle) { // IE - var style = elmt.currentStyle[styleProp]; - } else if (window.getComputedStyle) { // standard DOM - var style = document.defaultView.getComputedStyle(elmt, null).getPropertyValue(styleProp); - } else { - var style = ""; - } - return style; -} - -SimileAjax.DOM.getEventRelativeCoordinates = function(evt, elmt) { - if (SimileAjax.Platform.browser.isIE) { - if (evt.type == "mousewheel") { - var coords = SimileAjax.DOM.getPageCoordinates(elmt); - return { - x: evt.clientX - coords.left, - y: evt.clientY - coords.top - }; - } else { - return { - x: evt.offsetX, - y: evt.offsetY - }; - } - } else { - var coords = SimileAjax.DOM.getPageCoordinates(elmt); - - if ((evt.type == "DOMMouseScroll") && - SimileAjax.Platform.browser.isFirefox && - (SimileAjax.Platform.browser.majorVersion == 2)) { - // Due to: https://bugzilla.mozilla.org/show_bug.cgi?id=352179 - - return { - x: evt.screenX - coords.left, - y: evt.screenY - coords.top - }; - } else { - return { - x: evt.pageX - coords.left, - y: evt.pageY - coords.top - }; - } - } -}; - -SimileAjax.DOM.getEventPageCoordinates = function(evt) { - if (SimileAjax.Platform.browser.isIE) { - return { - x: evt.clientX + document.body.scrollLeft, - y: evt.clientY + document.body.scrollTop - }; - } else { - return { - x: evt.pageX, - y: evt.pageY - }; - } -}; - -SimileAjax.DOM.hittest = function(x, y, except) { - return SimileAjax.DOM._hittest(document.body, x, y, except); -}; - -SimileAjax.DOM._hittest = function(elmt, x, y, except) { - var childNodes = elmt.childNodes; - outer: for (var i = 0; i < childNodes.length; i++) { - var childNode = childNodes[i]; - for (var j = 0; j < except.length; j++) { - if (childNode == except[j]) { - continue outer; - } - } - - if (childNode.offsetWidth == 0 && childNode.offsetHeight == 0) { - /* - * Sometimes SPAN elements have zero width and height but - * they have children like DIVs that cover non-zero areas. - */ - var hitNode = SimileAjax.DOM._hittest(childNode, x, y, except); - if (hitNode != childNode) { - return hitNode; - } - } else { - var top = 0; - var left = 0; - - var node = childNode; - while (node) { - top += node.offsetTop; - left += node.offsetLeft; - node = node.offsetParent; - } - - if (left <= x && top <= y && (x - left) < childNode.offsetWidth && (y - top) < childNode.offsetHeight) { - return SimileAjax.DOM._hittest(childNode, x, y, except); - } else if (childNode.nodeType == 1 && childNode.tagName == "TR") { - /* - * Table row might have cells that span several rows. - */ - var childNode2 = SimileAjax.DOM._hittest(childNode, x, y, except); - if (childNode2 != childNode) { - return childNode2; - } - } - } - } - return elmt; -}; - -SimileAjax.DOM.cancelEvent = function(evt) { - evt.returnValue = false; - evt.cancelBubble = true; - if ("preventDefault" in evt) { - evt.preventDefault(); - } -}; - -SimileAjax.DOM.appendClassName = function(elmt, className) { - var classes = elmt.className.split(" "); - for (var i = 0; i < classes.length; i++) { - if (classes[i] == className) { - return; - } - } - classes.push(className); - elmt.className = classes.join(" "); -}; - -SimileAjax.DOM.createInputElement = function(type) { - var div = document.createElement("div"); - div.innerHTML = ""; - - return div.firstChild; -}; - -SimileAjax.DOM.createDOMFromTemplate = function(template) { - var result = {}; - result.elmt = SimileAjax.DOM._createDOMFromTemplate(template, result, null); - - return result; -}; - -SimileAjax.DOM._createDOMFromTemplate = function(templateNode, result, parentElmt) { - if (templateNode == null) { - /* - var node = doc.createTextNode("--null--"); - if (parentElmt != null) { - parentElmt.appendChild(node); - } - return node; - */ - return null; - } else if (typeof templateNode != "object") { - var node = document.createTextNode(templateNode); - if (parentElmt != null) { - parentElmt.appendChild(node); - } - return node; - } else { - var elmt = null; - if ("tag" in templateNode) { - var tag = templateNode.tag; - if (parentElmt != null) { - if (tag == "tr") { - elmt = parentElmt.insertRow(parentElmt.rows.length); - } else if (tag == "td") { - elmt = parentElmt.insertCell(parentElmt.cells.length); - } - } - if (elmt == null) { - elmt = tag == "input" ? - SimileAjax.DOM.createInputElement(templateNode.type) : - document.createElement(tag); - - if (parentElmt != null) { - parentElmt.appendChild(elmt); - } - } - } else { - elmt = templateNode.elmt; - if (parentElmt != null) { - parentElmt.appendChild(elmt); - } - } - - for (var attribute in templateNode) { - var value = templateNode[attribute]; - - if (attribute == "field") { - result[value] = elmt; - - } else if (attribute == "className") { - elmt.className = value; - } else if (attribute == "id") { - elmt.id = value; - } else if (attribute == "title") { - elmt.title = value; - } else if (attribute == "type" && elmt.tagName == "input") { - // do nothing - } else if (attribute == "style") { - for (n in value) { - var v = value[n]; - if (n == "float") { - n = SimileAjax.Platform.browser.isIE ? "styleFloat" : "cssFloat"; - } - elmt.style[n] = v; - } - } else if (attribute == "children") { - for (var i = 0; i < value.length; i++) { - SimileAjax.DOM._createDOMFromTemplate(value[i], result, elmt); - } - } else if (attribute != "tag" && attribute != "elmt") { - elmt.setAttribute(attribute, value); - } - } - return elmt; - } -} - -SimileAjax.DOM._cachedParent = null; -SimileAjax.DOM.createElementFromString = function(s) { - if (SimileAjax.DOM._cachedParent == null) { - SimileAjax.DOM._cachedParent = document.createElement("div"); - } - SimileAjax.DOM._cachedParent.innerHTML = s; - return SimileAjax.DOM._cachedParent.firstChild; -}; - -SimileAjax.DOM.createDOMFromString = function(root, s, fieldElmts) { - var elmt = typeof root == "string" ? document.createElement(root) : root; - elmt.innerHTML = s; - - var dom = { elmt: elmt }; - SimileAjax.DOM._processDOMChildrenConstructedFromString(dom, elmt, fieldElmts != null ? fieldElmts : {} ); - - return dom; -}; - -SimileAjax.DOM._processDOMConstructedFromString = function(dom, elmt, fieldElmts) { - var id = elmt.id; - if (id != null && id.length > 0) { - elmt.removeAttribute("id"); - if (id in fieldElmts) { - var parentElmt = elmt.parentNode; - parentElmt.insertBefore(fieldElmts[id], elmt); - parentElmt.removeChild(elmt); - - dom[id] = fieldElmts[id]; - return; - } else { - dom[id] = elmt; - } - } - - if (elmt.hasChildNodes()) { - SimileAjax.DOM._processDOMChildrenConstructedFromString(dom, elmt, fieldElmts); - } -}; - -SimileAjax.DOM._processDOMChildrenConstructedFromString = function(dom, elmt, fieldElmts) { - var node = elmt.firstChild; - while (node != null) { - var node2 = node.nextSibling; - if (node.nodeType == 1) { - SimileAjax.DOM._processDOMConstructedFromString(dom, node, fieldElmts); - } - node = node2; - } -}; -/** - * @fileOverview Graphics utility functions and constants - * @name SimileAjax.Graphics - */ - -SimileAjax.Graphics = new Object(); - -/** - * A boolean value indicating whether PNG translucency is supported on the - * user's browser or not. - * - * @type Boolean - */ -SimileAjax.Graphics.pngIsTranslucent = (!SimileAjax.Platform.browser.isIE) || (SimileAjax.Platform.browser.majorVersion > 6); -if (!SimileAjax.Graphics.pngIsTranslucent) { - SimileAjax.includeCssFile(document, SimileAjax.urlPrefix + "styles/graphics-ie6.css"); -} - -/* - * Opacity, translucency - * - */ -SimileAjax.Graphics._createTranslucentImage1 = function(url, verticalAlign) { - var elmt = document.createElement("img"); - elmt.setAttribute("src", url); - if (verticalAlign != null) { - elmt.style.verticalAlign = verticalAlign; - } - return elmt; -}; -SimileAjax.Graphics._createTranslucentImage2 = function(url, verticalAlign) { - var elmt = document.createElement("img"); - elmt.style.width = "1px"; // just so that IE will calculate the size property - elmt.style.height = "1px"; - elmt.style.filter = "progid:DXImageTransform.Microsoft.AlphaImageLoader(src='" + url +"', sizingMethod='image')"; - elmt.style.verticalAlign = (verticalAlign != null) ? verticalAlign : "middle"; - return elmt; -}; - -/** - * Creates a DOM element for an img tag using the URL given. This - * is a convenience method that automatically includes the necessary CSS to - * allow for translucency, even on IE. - * - * @function - * @param {String} url the URL to the image - * @param {String} verticalAlign the CSS value for the image's vertical-align - * @return {Element} a DOM element containing the img tag - */ -SimileAjax.Graphics.createTranslucentImage = SimileAjax.Graphics.pngIsTranslucent ? - SimileAjax.Graphics._createTranslucentImage1 : - SimileAjax.Graphics._createTranslucentImage2; - -SimileAjax.Graphics._createTranslucentImageHTML1 = function(url, verticalAlign) { - return ""; -}; -SimileAjax.Graphics._createTranslucentImageHTML2 = function(url, verticalAlign) { - var style = - "width: 1px; height: 1px; " + - "filter:progid:DXImageTransform.Microsoft.AlphaImageLoader(src='" + url +"', sizingMethod='image');" + - (verticalAlign != null ? " vertical-align: " + verticalAlign + ";" : ""); - - return ""; -}; - -/** - * Creates an HTML string for an img tag using the URL given. - * This is a convenience method that automatically includes the necessary CSS - * to allow for translucency, even on IE. - * - * @function - * @param {String} url the URL to the image - * @param {String} verticalAlign the CSS value for the image's vertical-align - * @return {String} a string containing the img tag - */ -SimileAjax.Graphics.createTranslucentImageHTML = SimileAjax.Graphics.pngIsTranslucent ? - SimileAjax.Graphics._createTranslucentImageHTML1 : - SimileAjax.Graphics._createTranslucentImageHTML2; - -/** - * Sets the opacity on the given DOM element. - * - * @param {Element} elmt the DOM element to set the opacity on - * @param {Number} opacity an integer from 0 to 100 specifying the opacity - */ -SimileAjax.Graphics.setOpacity = function(elmt, opacity) { - if (SimileAjax.Platform.browser.isIE) { - elmt.style.filter = "progid:DXImageTransform.Microsoft.Alpha(Style=0,Opacity=" + opacity + ")"; - } else { - var o = (opacity / 100).toString(); - elmt.style.opacity = o; - elmt.style.MozOpacity = o; - } -}; - -/* - * Bubble - * - */ - -SimileAjax.Graphics.bubbleConfig = { - containerCSSClass: "simileAjax-bubble-container", - innerContainerCSSClass: "simileAjax-bubble-innerContainer", - contentContainerCSSClass: "simileAjax-bubble-contentContainer", - - borderGraphicSize: 50, - borderGraphicCSSClassPrefix: "simileAjax-bubble-border-", - - arrowGraphicTargetOffset: 33, // from tip of arrow to the side of the graphic that touches the content of the bubble - arrowGraphicLength: 100, // dimension of arrow graphic along the direction that the arrow points - arrowGraphicWidth: 49, // dimension of arrow graphic perpendicular to the direction that the arrow points - arrowGraphicCSSClassPrefix: "simileAjax-bubble-arrow-", - - closeGraphicCSSClass: "simileAjax-bubble-close", - - extraPadding: 20 -}; - -/** - * Creates a nice, rounded bubble popup with the given content in a div, - * page coordinates and a suggested width. The bubble will point to the - * location on the page as described by pageX and pageY. All measurements - * should be given in pixels. - * - * @param {Element} the content div - * @param {Number} pageX the x coordinate of the point to point to - * @param {Number} pageY the y coordinate of the point to point to - * @param {Number} contentWidth a suggested width of the content - * @param {String} orientation a string ("top", "bottom", "left", or "right") - * that describes the orientation of the arrow on the bubble - * @param {Number} maxHeight. Add a scrollbar div if bubble would be too tall. - * Default of 0 or null means no maximum - */ -SimileAjax.Graphics.createBubbleForContentAndPoint = function( - div, pageX, pageY, contentWidth, orientation, maxHeight) { - if (typeof contentWidth != "number") { - contentWidth = 300; - } - if (typeof maxHeight != "number") { - maxHeight = 0; - } - - div.style.position = "absolute"; - div.style.left = "-5000px"; - div.style.top = "0px"; - div.style.width = contentWidth + "px"; - document.body.appendChild(div); - - window.setTimeout(function() { - var width = div.scrollWidth + 10; - var height = div.scrollHeight + 10; - var scrollDivW = 0; // width of the possible inner container when we want vertical scrolling - if (maxHeight > 0 && height > maxHeight) { - height = maxHeight; - scrollDivW = width - 25; - } - - var bubble = SimileAjax.Graphics.createBubbleForPoint(pageX, pageY, width, height, orientation); - - document.body.removeChild(div); - div.style.position = "static"; - div.style.left = ""; - div.style.top = ""; - - // create a scroll div if needed - if (scrollDivW > 0) { - var scrollDiv = document.createElement("div"); - div.style.width = ""; - scrollDiv.style.width = scrollDivW + "px"; - scrollDiv.appendChild(div); - bubble.content.appendChild(scrollDiv); - } else { - div.style.width = width + "px"; - bubble.content.appendChild(div); - } - }, 200); -}; - -/** - * Creates a nice, rounded bubble popup with the given page coordinates and - * content dimensions. The bubble will point to the location on the page - * as described by pageX and pageY. All measurements should be given in - * pixels. - * - * @param {Number} pageX the x coordinate of the point to point to - * @param {Number} pageY the y coordinate of the point to point to - * @param {Number} contentWidth the width of the content box in the bubble - * @param {Number} contentHeight the height of the content box in the bubble - * @param {String} orientation a string ("top", "bottom", "left", or "right") - * that describes the orientation of the arrow on the bubble - * @return {Element} a DOM element for the newly created bubble - */ -SimileAjax.Graphics.createBubbleForPoint = function(pageX, pageY, contentWidth, contentHeight, orientation) { - contentWidth = parseInt(contentWidth, 10); // harden against bad input bugs - contentHeight = parseInt(contentHeight, 10); // getting numbers-as-strings - - var bubbleConfig = SimileAjax.Graphics.bubbleConfig; - var pngTransparencyClassSuffix = - SimileAjax.Graphics.pngIsTranslucent ? "pngTranslucent" : "pngNotTranslucent"; - - var bubbleWidth = contentWidth + 2 * bubbleConfig.borderGraphicSize; - var bubbleHeight = contentHeight + 2 * bubbleConfig.borderGraphicSize; - - var generatePngSensitiveClass = function(className) { - return className + " " + className + "-" + pngTransparencyClassSuffix; - }; - - /* - * Render container divs - */ - var div = document.createElement("div"); - div.className = generatePngSensitiveClass(bubbleConfig.containerCSSClass); - div.style.width = contentWidth + "px"; - div.style.height = contentHeight + "px"; - - var divInnerContainer = document.createElement("div"); - divInnerContainer.className = generatePngSensitiveClass(bubbleConfig.innerContainerCSSClass); - div.appendChild(divInnerContainer); - - /* - * Create layer for bubble - */ - var close = function() { - if (!bubble._closed) { - document.body.removeChild(bubble._div); - bubble._doc = null; - bubble._div = null; - bubble._content = null; - bubble._closed = true; - } - } - var bubble = { _closed: false }; - var layer = SimileAjax.WindowManager.pushLayer(close, true, div); - bubble._div = div; - bubble.close = function() { SimileAjax.WindowManager.popLayer(layer); } - - /* - * Render border graphics - */ - var createBorder = function(classNameSuffix) { - var divBorderGraphic = document.createElement("div"); - divBorderGraphic.className = generatePngSensitiveClass(bubbleConfig.borderGraphicCSSClassPrefix + classNameSuffix); - divInnerContainer.appendChild(divBorderGraphic); - }; - createBorder("top-left"); - createBorder("top-right"); - createBorder("bottom-left"); - createBorder("bottom-right"); - createBorder("left"); - createBorder("right"); - createBorder("top"); - createBorder("bottom"); - - /* - * Render content - */ - var divContentContainer = document.createElement("div"); - divContentContainer.className = generatePngSensitiveClass(bubbleConfig.contentContainerCSSClass); - divInnerContainer.appendChild(divContentContainer); - bubble.content = divContentContainer; - - /* - * Render close button - */ - var divClose = document.createElement("div"); - divClose.className = generatePngSensitiveClass(bubbleConfig.closeGraphicCSSClass); - divInnerContainer.appendChild(divClose); - SimileAjax.WindowManager.registerEventWithObject(divClose, "click", bubble, "close"); - - (function() { - var dims = SimileAjax.Graphics.getWindowDimensions(); - var docWidth = dims.w; - var docHeight = dims.h; - - var halfArrowGraphicWidth = Math.ceil(bubbleConfig.arrowGraphicWidth / 2); - - var createArrow = function(classNameSuffix) { - var divArrowGraphic = document.createElement("div"); - divArrowGraphic.className = generatePngSensitiveClass(bubbleConfig.arrowGraphicCSSClassPrefix + "point-" + classNameSuffix); - divInnerContainer.appendChild(divArrowGraphic); - return divArrowGraphic; - }; - - if (pageX - halfArrowGraphicWidth - bubbleConfig.borderGraphicSize - bubbleConfig.extraPadding > 0 && - pageX + halfArrowGraphicWidth + bubbleConfig.borderGraphicSize + bubbleConfig.extraPadding < docWidth) { - - /* - * Bubble can be positioned above or below the target point. - */ - - var left = pageX - Math.round(contentWidth / 2); - left = pageX < (docWidth / 2) ? - Math.max(left, bubbleConfig.extraPadding + bubbleConfig.borderGraphicSize) : - Math.min(left, docWidth - bubbleConfig.extraPadding - bubbleConfig.borderGraphicSize - contentWidth); - - if ((orientation && orientation == "top") || - (!orientation && - (pageY - - bubbleConfig.arrowGraphicTargetOffset - - contentHeight - - bubbleConfig.borderGraphicSize - - bubbleConfig.extraPadding > 0))) { - - /* - * Position bubble above the target point. - */ - - var divArrow = createArrow("down"); - divArrow.style.left = (pageX - halfArrowGraphicWidth - left) + "px"; - - div.style.left = left + "px"; - div.style.top = (pageY - bubbleConfig.arrowGraphicTargetOffset - contentHeight) + "px"; - - return; - } else if ((orientation && orientation == "bottom") || - (!orientation && - (pageY - + bubbleConfig.arrowGraphicTargetOffset - + contentHeight - + bubbleConfig.borderGraphicSize - + bubbleConfig.extraPadding < docHeight))) { - - /* - * Position bubble below the target point. - */ - - var divArrow = createArrow("up"); - divArrow.style.left = (pageX - halfArrowGraphicWidth - left) + "px"; - - div.style.left = left + "px"; - div.style.top = (pageY + bubbleConfig.arrowGraphicTargetOffset) + "px"; - - return; - } - } - - var top = pageY - Math.round(contentHeight / 2); - top = pageY < (docHeight / 2) ? - Math.max(top, bubbleConfig.extraPadding + bubbleConfig.borderGraphicSize) : - Math.min(top, docHeight - bubbleConfig.extraPadding - bubbleConfig.borderGraphicSize - contentHeight); - - if ((orientation && orientation == "left") || - (!orientation && - (pageX - - bubbleConfig.arrowGraphicTargetOffset - - contentWidth - - bubbleConfig.borderGraphicSize - - bubbleConfig.extraPadding > 0))) { - - /* - * Position bubble left of the target point. - */ - - var divArrow = createArrow("right"); - divArrow.style.top = (pageY - halfArrowGraphicWidth - top) + "px"; - - div.style.top = top + "px"; - div.style.left = (pageX - bubbleConfig.arrowGraphicTargetOffset - contentWidth) + "px"; - } else { - - /* - * Position bubble right of the target point, as the last resort. - */ - - var divArrow = createArrow("left"); - divArrow.style.top = (pageY - halfArrowGraphicWidth - top) + "px"; - - div.style.top = top + "px"; - div.style.left = (pageX + bubbleConfig.arrowGraphicTargetOffset) + "px"; - } - })(); - - document.body.appendChild(div); - - return bubble; -}; - -SimileAjax.Graphics.getWindowDimensions = function() { - if (typeof window.innerHeight == 'number') { - return { w:window.innerWidth, h:window.innerHeight }; // Non-IE - } else if (document.documentElement && document.documentElement.clientHeight) { - return { // IE6+, in "standards compliant mode" - w:document.documentElement.clientWidth, - h:document.documentElement.clientHeight - }; - } else if (document.body && document.body.clientHeight) { - return { // IE 4 compatible - w:document.body.clientWidth, - h:document.body.clientHeight - }; - } -}; - - -/** - * Creates a floating, rounded message bubble in the center of the window for - * displaying modal information, e.g. "Loading..." - * - * @param {Document} doc the root document for the page to render on - * @param {Object} an object with two properties, contentDiv and containerDiv, - * consisting of the newly created DOM elements - */ -SimileAjax.Graphics.createMessageBubble = function(doc) { - var containerDiv = doc.createElement("div"); - if (SimileAjax.Graphics.pngIsTranslucent) { - var topDiv = doc.createElement("div"); - topDiv.style.height = "33px"; - topDiv.style.background = "url(" + SimileAjax.urlPrefix + "images/message-top-left.png) top left no-repeat"; - topDiv.style.paddingLeft = "44px"; - containerDiv.appendChild(topDiv); - - var topRightDiv = doc.createElement("div"); - topRightDiv.style.height = "33px"; - topRightDiv.style.background = "url(" + SimileAjax.urlPrefix + "images/message-top-right.png) top right no-repeat"; - topDiv.appendChild(topRightDiv); - - var middleDiv = doc.createElement("div"); - middleDiv.style.background = "url(" + SimileAjax.urlPrefix + "images/message-left.png) top left repeat-y"; - middleDiv.style.paddingLeft = "44px"; - containerDiv.appendChild(middleDiv); - - var middleRightDiv = doc.createElement("div"); - middleRightDiv.style.background = "url(" + SimileAjax.urlPrefix + "images/message-right.png) top right repeat-y"; - middleRightDiv.style.paddingRight = "44px"; - middleDiv.appendChild(middleRightDiv); - - var contentDiv = doc.createElement("div"); - middleRightDiv.appendChild(contentDiv); - - var bottomDiv = doc.createElement("div"); - bottomDiv.style.height = "55px"; - bottomDiv.style.background = "url(" + SimileAjax.urlPrefix + "images/message-bottom-left.png) bottom left no-repeat"; - bottomDiv.style.paddingLeft = "44px"; - containerDiv.appendChild(bottomDiv); - - var bottomRightDiv = doc.createElement("div"); - bottomRightDiv.style.height = "55px"; - bottomRightDiv.style.background = "url(" + SimileAjax.urlPrefix + "images/message-bottom-right.png) bottom right no-repeat"; - bottomDiv.appendChild(bottomRightDiv); - } else { - containerDiv.style.border = "2px solid #7777AA"; - containerDiv.style.padding = "20px"; - containerDiv.style.background = "white"; - SimileAjax.Graphics.setOpacity(containerDiv, 90); - - var contentDiv = doc.createElement("div"); - containerDiv.appendChild(contentDiv); - } - - return { - containerDiv: containerDiv, - contentDiv: contentDiv - }; -}; - -/* - * Animation - * - */ - -/** - * Creates an animation for a function, and an interval of values. The word - * "animation" here is used in the sense of repeatedly calling a function with - * a current value from within an interval, and a delta value. - * - * @param {Function} f a function to be called every 50 milliseconds throughout - * the animation duration, of the form f(current, delta), where current is - * the current value within the range and delta is the current change. - * @param {Number} from a starting value - * @param {Number} to an ending value - * @param {Number} duration the duration of the animation in milliseconds - * @param {Function} [cont] an optional function that is called at the end of - * the animation, i.e. a continuation. - * @return {SimileAjax.Graphics._Animation} a new animation object - */ -SimileAjax.Graphics.createAnimation = function(f, from, to, duration, cont) { - return new SimileAjax.Graphics._Animation(f, from, to, duration, cont); -}; - -SimileAjax.Graphics._Animation = function(f, from, to, duration, cont) { - this.f = f; - this.cont = (typeof cont == "function") ? cont : function() {}; - - this.from = from; - this.to = to; - this.current = from; - - this.duration = duration; - this.start = new Date().getTime(); - this.timePassed = 0; -}; - -/** - * Runs this animation. - */ -SimileAjax.Graphics._Animation.prototype.run = function() { - var a = this; - window.setTimeout(function() { a.step(); }, 50); -}; - -/** - * Increments this animation by one step, and then continues the animation with - * run(). - */ -SimileAjax.Graphics._Animation.prototype.step = function() { - this.timePassed += 50; - - var timePassedFraction = this.timePassed / this.duration; - var parameterFraction = -Math.cos(timePassedFraction * Math.PI) / 2 + 0.5; - var current = parameterFraction * (this.to - this.from) + this.from; - - try { - this.f(current, current - this.current); - } catch (e) { - } - this.current = current; - - if (this.timePassed < this.duration) { - this.run(); - } else { - this.f(this.to, 0); - this["cont"](); - } -}; - -/* - * CopyPasteButton - * - * Adapted from http://spaces.live.com/editorial/rayozzie/demo/liveclip/liveclipsample/techPreview.html. - * - */ - -/** - * Creates a button and textarea for displaying structured data and copying it - * to the clipboard. The data is dynamically generated by the given - * createDataFunction parameter. - * - * @param {String} image an image URL to use as the background for the - * generated box - * @param {Number} width the width in pixels of the generated box - * @param {Number} height the height in pixels of the generated box - * @param {Function} createDataFunction a function that is called with no - * arguments to generate the structured data - * @return a new DOM element - */ -SimileAjax.Graphics.createStructuredDataCopyButton = function(image, width, height, createDataFunction) { - var div = document.createElement("div"); - div.style.position = "relative"; - div.style.display = "inline"; - div.style.width = width + "px"; - div.style.height = height + "px"; - div.style.overflow = "hidden"; - div.style.margin = "2px"; - - if (SimileAjax.Graphics.pngIsTranslucent) { - div.style.background = "url(" + image + ") no-repeat"; - } else { - div.style.filter = "progid:DXImageTransform.Microsoft.AlphaImageLoader(src='" + image +"', sizingMethod='image')"; - } - - var style; - if (SimileAjax.Platform.browser.isIE) { - style = "filter:alpha(opacity=0)"; - } else { - style = "opacity: 0"; - } - div.innerHTML = "