# HG changeset patch
# User Adrien Di Mascio
# Date 1225896770 -3600
# Node ID b97547f5f1fa96cb25eab1b10d877db8ae6dedbf
Showtime !
diff -r 000000000000 -r b97547f5f1fa COPYING
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/COPYING Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,165 @@
+ GNU LESSER GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+
+ This version of the GNU Lesser General Public License incorporates
+the terms and conditions of version 3 of the GNU General Public
+License, supplemented by the additional permissions listed below.
+
+ 0. Additional Definitions.
+
+ As used herein, "this License" refers to version 3 of the GNU Lesser
+General Public License, and the "GNU GPL" refers to version 3 of the GNU
+General Public License.
+
+ "The Library" refers to a covered work governed by this License,
+other than an Application or a Combined Work as defined below.
+
+ An "Application" is any work that makes use of an interface provided
+by the Library, but which is not otherwise based on the Library.
+Defining a subclass of a class defined by the Library is deemed a mode
+of using an interface provided by the Library.
+
+ A "Combined Work" is a work produced by combining or linking an
+Application with the Library. The particular version of the Library
+with which the Combined Work was made is also called the "Linked
+Version".
+
+ The "Minimal Corresponding Source" for a Combined Work means the
+Corresponding Source for the Combined Work, excluding any source code
+for portions of the Combined Work that, considered in isolation, are
+based on the Application, and not on the Linked Version.
+
+ The "Corresponding Application Code" for a Combined Work means the
+object code and/or source code for the Application, including any data
+and utility programs needed for reproducing the Combined Work from the
+Application, but excluding the System Libraries of the Combined Work.
+
+ 1. Exception to Section 3 of the GNU GPL.
+
+ You may convey a covered work under sections 3 and 4 of this License
+without being bound by section 3 of the GNU GPL.
+
+ 2. Conveying Modified Versions.
+
+ If you modify a copy of the Library, and, in your modifications, a
+facility refers to a function or data to be supplied by an Application
+that uses the facility (other than as an argument passed when the
+facility is invoked), then you may convey a copy of the modified
+version:
+
+ a) under this License, provided that you make a good faith effort to
+ ensure that, in the event an Application does not supply the
+ function or data, the facility still operates, and performs
+ whatever part of its purpose remains meaningful, or
+
+ b) under the GNU GPL, with none of the additional permissions of
+ this License applicable to that copy.
+
+ 3. Object Code Incorporating Material from Library Header Files.
+
+ The object code form of an Application may incorporate material from
+a header file that is part of the Library. You may convey such object
+code under terms of your choice, provided that, if the incorporated
+material is not limited to numerical parameters, data structure
+layouts and accessors, or small macros, inline functions and templates
+(ten or fewer lines in length), you do both of the following:
+
+ a) Give prominent notice with each copy of the object code that the
+ Library is used in it and that the Library and its use are
+ covered by this License.
+
+ b) Accompany the object code with a copy of the GNU GPL and this license
+ document.
+
+ 4. Combined Works.
+
+ You may convey a Combined Work under terms of your choice that,
+taken together, effectively do not restrict modification of the
+portions of the Library contained in the Combined Work and reverse
+engineering for debugging such modifications, if you also do each of
+the following:
+
+ a) Give prominent notice with each copy of the Combined Work that
+ the Library is used in it and that the Library and its use are
+ covered by this License.
+
+ b) Accompany the Combined Work with a copy of the GNU GPL and this license
+ document.
+
+ c) For a Combined Work that displays copyright notices during
+ execution, include the copyright notice for the Library among
+ these notices, as well as a reference directing the user to the
+ copies of the GNU GPL and this license document.
+
+ d) Do one of the following:
+
+ 0) Convey the Minimal Corresponding Source under the terms of this
+ License, and the Corresponding Application Code in a form
+ suitable for, and under terms that permit, the user to
+ recombine or relink the Application with a modified version of
+ the Linked Version to produce a modified Combined Work, in the
+ manner specified by section 6 of the GNU GPL for conveying
+ Corresponding Source.
+
+ 1) Use a suitable shared library mechanism for linking with the
+ Library. A suitable mechanism is one that (a) uses at run time
+ a copy of the Library already present on the user's computer
+ system, and (b) will operate properly with a modified version
+ of the Library that is interface-compatible with the Linked
+ Version.
+
+ e) Provide Installation Information, but only if you would otherwise
+ be required to provide such information under section 6 of the
+ GNU GPL, and only to the extent that such information is
+ necessary to install and execute a modified version of the
+ Combined Work produced by recombining or relinking the
+ Application with a modified version of the Linked Version. (If
+ you use option 4d0, the Installation Information must accompany
+ the Minimal Corresponding Source and Corresponding Application
+ Code. If you use option 4d1, you must provide the Installation
+ Information in the manner specified by section 6 of the GNU GPL
+ for conveying Corresponding Source.)
+
+ 5. Combined Libraries.
+
+ You may place library facilities that are a work based on the
+Library side by side in a single library together with other library
+facilities that are not Applications and are not covered by this
+License, and convey such a combined library under terms of your
+choice, if you do both of the following:
+
+ a) Accompany the combined library with a copy of the same work based
+ on the Library, uncombined with any other library facilities,
+ conveyed under the terms of this License.
+
+ b) Give prominent notice with the combined library that part of it
+ is a work based on the Library, and explaining where to find the
+ accompanying uncombined form of the same work.
+
+ 6. Revised Versions of the GNU Lesser General Public License.
+
+ The Free Software Foundation may publish revised and/or new versions
+of the GNU Lesser General Public License from time to time. Such new
+versions will be similar in spirit to the present version, but may
+differ in detail to address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Library as you received it specifies that a certain numbered version
+of the GNU Lesser General Public License "or any later version"
+applies to it, you have the option of following the terms and
+conditions either of that published version or of any later version
+published by the Free Software Foundation. If the Library as you
+received it does not specify a version number of the GNU Lesser
+General Public License, you may choose any version of the GNU Lesser
+General Public License ever published by the Free Software Foundation.
+
+ If the Library as you received it specifies that a proxy can decide
+whether future versions of the GNU Lesser General Public License shall
+apply, that proxy's public statement of acceptance of any version is
+permanent authorization for you to choose that version for the
+Library.
diff -r 000000000000 -r b97547f5f1fa MANIFEST
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/MANIFEST Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,645 @@
+README
+pylintrc
+setup.py
+./__init__.py
+./__pkginfo__.py
+./_exceptions.py
+./cwconfig.py
+./cwctl.py
+./cwvreg.py
+./dbapi.py
+./gettext.py
+./hercule.py
+./interfaces.py
+./md5crypt.py
+./rset.py
+./schema.py
+./schemaviewer.py
+./toolsutils.py
+./vregistry.py
+./common/__init__.py
+./common/appobject.py
+./common/entity.py
+./common/html4zope.py
+./common/i18n.py
+./common/mail.py
+./common/migration.py
+./common/mixins.py
+./common/mttransforms.py
+./common/registerers.py
+./common/rest.py
+./common/schema.py
+./common/selectors.py
+./common/tal.py
+./common/uilib.py
+./common/utils.py
+./common/view.py
+./common/test/unittest_entity.py
+./common/test/unittest_mail.py
+./common/test/unittest_migration.py
+./common/test/unittest_rest.py
+./common/test/unittest_uilib.py
+./common/test/unittest_utils.py
+./devtools/__init__.py
+./devtools/_apptest.py
+./devtools/apptest.py
+./devtools/cwtwill.py
+./devtools/devctl.py
+./devtools/fake.py
+./devtools/fill.py
+./devtools/htmlparser.py
+./devtools/livetest.py
+./devtools/migrtest.py
+./devtools/pkginfo.py
+./devtools/repotest.py
+./devtools/stresstester.py
+./devtools/testlib.py
+./devtools/test/runtests.py
+./devtools/test/unittest_dbfill.py
+./devtools/test/unittest_fill.py
+./devtools/test/unittest_testlib.py
+./entities/__init__.py
+./entities/authobjs.py
+./entities/lib.py
+./entities/schemaobjs.py
+./entities/wfobjs.py
+./entities/test/unittest_base.py
+./etwist/__init__.py
+./etwist/request.py
+./etwist/server.py
+./etwist/twconfig.py
+./etwist/twctl.py
+./goa/__init__.py
+./goa/db.py
+./goa/dbinit.py
+./goa/dbmyams.py
+./goa/gaesource.py
+./goa/goaconfig.py
+./goa/goactl.py
+./goa/goavreg.py
+./goa/rqlinterpreter.py
+./goa/testlib.py
+./goa/appobjects/__init__.py
+./goa/appobjects/components.py
+./goa/appobjects/dbmgmt.py
+./goa/appobjects/gauthservice.py
+./goa/appobjects/sessions.py
+./goa/overrides/__init__.py
+./goa/overrides/mttransforms.py
+./goa/overrides/rqlannotation.py
+./goa/overrides/server__init__.py
+./goa/overrides/server_utils.py
+./goa/overrides/toolsutils.py
+./goa/test/pytestconf.py
+./goa/test/unittest_db.py
+./goa/test/unittest_editcontroller.py
+./goa/test/unittest_metadata.py
+./goa/test/unittest_rql.py
+./goa/test/unittest_schema.py
+./goa/test/unittest_views.py
+./goa/test/data/__init__.py
+./goa/test/data/schema.py
+./goa/test/data/settings.py
+./goa/test/data/views.py
+./goa/tools/__init__.py
+./goa/tools/generate_schema_img.py
+./goa/tools/i18n.py
+./goa/tools/laxctl.py
+./server/__init__.py
+./server/checkintegrity.py
+./server/hookhelper.py
+./server/hooks.py
+./server/hooksmanager.py
+./server/migractions.py
+./server/msplanner.py
+./server/mssteps.py
+./server/pool.py
+./server/querier.py
+./server/repository.py
+./server/rqlannotation.py
+./server/rqlrewrite.py
+./server/schemahooks.py
+./server/schemaserial.py
+./server/securityhooks.py
+./server/server.py
+./server/serverconfig.py
+./server/serverctl.py
+./server/session.py
+./server/sqlutils.py
+./server/ssplanner.py
+./server/utils.py
+./server/sources/__init__.py
+./server/sources/extlite.py
+./server/sources/ldapuser.py
+./server/sources/native.py
+./server/sources/pyrorql.py
+./server/sources/rql2sql.py
+./server/test/runtests.py
+./server/test/unittest_checkintegrity.py
+./server/test/unittest_config.py
+./server/test/unittest_hookhelper.py
+./server/test/unittest_hooks.py
+./server/test/unittest_hooksmanager.py
+./server/test/unittest_migractions.py
+./server/test/unittest_querier.py
+./server/test/unittest_repository.py
+./server/test/unittest_rql2sql.py
+./server/test/unittest_rqlannotation.py
+./server/test/unittest_rqlrewrite.py
+./server/test/unittest_schemaserial.py
+./server/test/unittest_security.py
+./server/test/unittest_session.py
+./server/test/unittest_sqlutils.py
+./server/test/unittest_ssplanner.py
+./server/test/unittest_tools.py
+./sobjects/__init__.py
+./sobjects/email.py
+./sobjects/hooks.py
+./sobjects/notification.py
+./sobjects/supervising.py
+./sobjects/test/unittest_email.py
+./sobjects/test/unittest_hooks.py
+./sobjects/test/unittest_notification.py
+./sobjects/test/unittest_supervising.py
+./test/unittest_cwconfig.py
+./test/unittest_cwctl.py
+./test/unittest_dbapi.py
+./test/unittest_rset.py
+./test/unittest_schema.py
+./test/unittest_vregistry.py
+./web/__init__.py
+./web/_exceptions.py
+./web/action.py
+./web/application.py
+./web/box.py
+./web/component.py
+./web/controller.py
+./web/facet.py
+./web/form.py
+./web/htmlwidgets.py
+./web/httpcache.py
+./web/request.py
+./web/webconfig.py
+./web/webctl.py
+./web/widgets.py
+./web/test/runtests.py
+./web/test/test_views.py
+./web/test/unittest_application.py
+./web/test/unittest_controller.py
+./web/test/unittest_magicsearch.py
+./web/test/unittest_urlpublisher.py
+./web/test/unittest_urlrewrite.py
+./web/test/unittest_views_actions.py
+./web/test/unittest_views_apacherewrite.py
+./web/test/unittest_views_basecontrollers.py
+./web/test/unittest_views_baseforms.py
+./web/test/unittest_views_baseviews.py
+./web/test/unittest_views_embeding.py
+./web/test/unittest_views_navigation.py
+./web/test/unittest_views_searchrestriction.py
+./web/test/unittest_viewselector.py
+./web/test/unittest_webconfig.py
+./web/test/unittest_widgets.py
+./web/views/__init__.py
+./web/views/actions.py
+./web/views/ajaxedit.py
+./web/views/apacherewrite.py
+./web/views/authentication.py
+./web/views/basecomponents.py
+./web/views/basecontrollers.py
+./web/views/baseforms.py
+./web/views/basetemplates.py
+./web/views/baseviews.py
+./web/views/bookmark.py
+./web/views/boxes.py
+./web/views/calendar.py
+./web/views/card.py
+./web/views/debug.py
+./web/views/dynimages.py
+./web/views/editcontroller.py
+./web/views/emailaddress.py
+./web/views/embedding.py
+./web/views/eproperties.py
+./web/views/error.py
+./web/views/euser.py
+./web/views/facets.py
+./web/views/ibreadcrumbs.py
+./web/views/idownloadable.py
+./web/views/igeocodable.py
+./web/views/iprogress.py
+./web/views/magicsearch.py
+./web/views/management.py
+./web/views/massmailing.py
+./web/views/navigation.py
+./web/views/old_calendar.py
+./web/views/plots.py
+./web/views/schemaentities.py
+./web/views/searchrestriction.py
+./web/views/sessions.py
+./web/views/startup.py
+./web/views/tableview.py
+./web/views/timeline.py
+./web/views/timetable.py
+./web/views/treeview.py
+./web/views/urlpublishing.py
+./web/views/urlrewrite.py
+./web/views/vcard.py
+./web/views/wdoc.py
+./web/views/wfentities.py
+./web/views/xbel.py
+./wsgi/__init__.py
+./wsgi/handler.py
+./wsgi/request.py
+bin/cubicweb-ctl
+common/test/data/bootstrap_packages
+common/test/data/entities.py
+common/test/data/migration/0.0.3_Any.py
+common/test/data/migration/0.0.4_Any.py
+common/test/data/migration/0.1.0_Any.py
+common/test/data/migration/0.1.0_common.py
+common/test/data/migration/0.1.0_repository.py
+common/test/data/migration/0.1.0_web.py
+common/test/data/migration/0.1.2_Any.py
+common/test/data/migration/depends.map
+common/test/data/schema/Affaire.sql
+common/test/data/schema/Note.py
+common/test/data/schema/Note.sql
+common/test/data/schema/Personne.sql
+common/test/data/schema/Societe.sql
+common/test/data/schema/relations.rel
+common/test/data/server_migration/2.10.2_Any.sql
+common/test/data/server_migration/2.5.0_Any.sql
+common/test/data/server_migration/2.6.0_Any.sql
+common/test/data/server_migration/bootstrapmigration_repository.py
+devtools/test/data/bootstrap_packages
+devtools/test/data/dbfill.conf
+devtools/test/data/firstnames.txt
+devtools/test/data/schema/Bug.sql
+devtools/test/data/schema/Project.sql
+devtools/test/data/schema/Story.sql
+devtools/test/data/schema/Version.sql
+devtools/test/data/schema/custom.py
+devtools/test/data/schema/relations.rel
+devtools/test/data/views/__init__.py
+devtools/test/data/views/bug.py
+doc/cubicweb.zargo
+doc/index.txt
+doc/makefile
+doc/plan_formation_python_cubicweb.txt
+doc/querier.txt
+doc/securite.txt
+doc/.static/logilab.png
+doc/.templates/layout.html
+doc/devmanual_fr/advanced_notes.txt
+doc/devmanual_fr/archi_globale.png
+doc/devmanual_fr/chap_autres_composants_ui.txt
+doc/devmanual_fr/chap_bases_framework_cubicweb.txt
+doc/devmanual_fr/chap_configuration_instance.txt
+doc/devmanual_fr/chap_definition_schema.txt
+doc/devmanual_fr/chap_definition_workflows.txt
+doc/devmanual_fr/chap_fondements_cubicweb.txt
+doc/devmanual_fr/chap_i18n.txt
+doc/devmanual_fr/chap_manipulation_donnees.txt
+doc/devmanual_fr/chap_migration.txt
+doc/devmanual_fr/chap_mise_en_place_environnement.txt
+doc/devmanual_fr/chap_rql.txt
+doc/devmanual_fr/chap_serveur_crochets.txt
+doc/devmanual_fr/chap_serveur_notification.txt
+doc/devmanual_fr/chap_tests.txt
+doc/devmanual_fr/chap_ui_gestion_formulaire.txt
+doc/devmanual_fr/chap_ui_js_json.txt
+doc/devmanual_fr/chap_visualisation_donnees.txt
+doc/devmanual_fr/index.txt
+doc/devmanual_fr/main_template_layout.png
+doc/devmanual_fr/makefile
+doc/devmanual_fr/sect_cubicweb-ctl.txt
+doc/devmanual_fr/sect_definition_entites.txt
+doc/devmanual_fr/sect_definition_schema.txt
+doc/devmanual_fr/sect_installation.txt
+doc/devmanual_fr/sect_mercurial.txt
+doc/devmanual_fr/sect_stdlib_schemas.txt
+doc/devmanual_fr/sect_stdlib_vues.txt
+doc/html-build/genindex.html
+doc/html-build/index.html
+doc/html-build/modindex.html
+doc/html-build/plan_formation_python_cubicweb.html
+doc/html-build/querier.html
+doc/html-build/search.html
+doc/html-build/securite.html
+doc/html-build/_images/archi_globale.png
+doc/html-build/_images/main_template_layout.png
+doc/html-build/_sources/index.txt
+doc/html-build/_sources/plan_formation_python_cubicweb.txt
+doc/html-build/_sources/querier.txt
+doc/html-build/_sources/securite.txt
+doc/html-build/_sources/devmanual_fr/advanced_notes.txt
+doc/html-build/_sources/devmanual_fr/chap_autres_composants_ui.txt
+doc/html-build/_sources/devmanual_fr/chap_bases_framework_cubicweb.txt
+doc/html-build/_sources/devmanual_fr/chap_configuration_instance.txt
+doc/html-build/_sources/devmanual_fr/chap_definition_schema.txt
+doc/html-build/_sources/devmanual_fr/chap_definition_workflows.txt
+doc/html-build/_sources/devmanual_fr/chap_fondements_cubicweb.txt
+doc/html-build/_sources/devmanual_fr/chap_i18n.txt
+doc/html-build/_sources/devmanual_fr/chap_manipulation_donnees.txt
+doc/html-build/_sources/devmanual_fr/chap_migration.txt
+doc/html-build/_sources/devmanual_fr/chap_mise_en_place_environnement.txt
+doc/html-build/_sources/devmanual_fr/chap_rql.txt
+doc/html-build/_sources/devmanual_fr/chap_serveur_crochets.txt
+doc/html-build/_sources/devmanual_fr/chap_serveur_notification.txt
+doc/html-build/_sources/devmanual_fr/chap_tests.txt
+doc/html-build/_sources/devmanual_fr/chap_ui_gestion_formulaire.txt
+doc/html-build/_sources/devmanual_fr/chap_ui_js_json.txt
+doc/html-build/_sources/devmanual_fr/chap_visualisation_donnees.txt
+doc/html-build/_sources/devmanual_fr/index.txt
+doc/html-build/_sources/devmanual_fr/sect_cubicweb-ctl.txt
+doc/html-build/_sources/devmanual_fr/sect_definition_entites.txt
+doc/html-build/_sources/devmanual_fr/sect_definition_schema.txt
+doc/html-build/_sources/devmanual_fr/sect_installation.txt
+doc/html-build/_sources/devmanual_fr/sect_mercurial.txt
+doc/html-build/_sources/devmanual_fr/sect_stdlib_schemas.txt
+doc/html-build/_sources/devmanual_fr/sect_stdlib_vues.txt
+doc/html-build/_sources/source/index.txt
+doc/html-build/_static/contents.png
+doc/html-build/_static/file.png
+doc/html-build/_static/logilab.png
+doc/html-build/_static/minus.png
+doc/html-build/_static/navigation.png
+doc/html-build/_static/plus.png
+doc/html-build/devmanual_fr/advanced_notes.html
+doc/html-build/devmanual_fr/chap_autres_composants_ui.html
+doc/html-build/devmanual_fr/chap_bases_framework_cubicweb.html
+doc/html-build/devmanual_fr/chap_configuration_instance.html
+doc/html-build/devmanual_fr/chap_definition_schema.html
+doc/html-build/devmanual_fr/chap_definition_workflows.html
+doc/html-build/devmanual_fr/chap_fondements_cubicweb.html
+doc/html-build/devmanual_fr/chap_i18n.html
+doc/html-build/devmanual_fr/chap_manipulation_donnees.html
+doc/html-build/devmanual_fr/chap_migration.html
+doc/html-build/devmanual_fr/chap_mise_en_place_environnement.html
+doc/html-build/devmanual_fr/chap_rql.html
+doc/html-build/devmanual_fr/chap_serveur_crochets.html
+doc/html-build/devmanual_fr/chap_serveur_notification.html
+doc/html-build/devmanual_fr/chap_tests.html
+doc/html-build/devmanual_fr/chap_ui_gestion_formulaire.html
+doc/html-build/devmanual_fr/chap_ui_js_json.html
+doc/html-build/devmanual_fr/chap_visualisation_donnees.html
+doc/html-build/devmanual_fr/index.html
+doc/html-build/devmanual_fr/sect_cubicweb-ctl.html
+doc/html-build/devmanual_fr/sect_definition_entites.html
+doc/html-build/devmanual_fr/sect_definition_schema.html
+doc/html-build/devmanual_fr/sect_installation.html
+doc/html-build/devmanual_fr/sect_mercurial.html
+doc/html-build/devmanual_fr/sect_stdlib_schemas.html
+doc/html-build/devmanual_fr/sect_stdlib_vues.html
+doc/html-build/source/index.html
+entities/test/data/bootstrap_packages
+entities/test/data/schema.py
+i18n/en.po
+i18n/entities.pot
+i18n/fr.po
+man/cubicweb-ctl.1
+misc/cwdesklets/rql_query.display
+misc/cwdesklets/web_query.display
+misc/cwdesklets/gfx/bg.png
+misc/cwdesklets/gfx/border-left.png
+misc/cwdesklets/gfx/logo_cw.png
+misc/cwdesklets/gfx/rss.png
+misc/cwdesklets/rqlsensor/__init__.py
+misc/cwzope/cwzope.py
+misc/migration/2.37.1_Any.py
+misc/migration/2.39.0_Any.py
+misc/migration/2.42.0_Any.py
+misc/migration/2.42.1_Any.py
+misc/migration/2.43.0_Any.py
+misc/migration/2.44.0_Any.py
+misc/migration/2.45.0_Any.py
+misc/migration/2.46.0_Any.py
+misc/migration/2.47.0_Any.py
+misc/migration/2.48.8_Any.py
+misc/migration/2.49.3_Any.py
+misc/migration/2.50.0_Any.py
+misc/migration/3.0.0_Any.py
+misc/migration/bootstrapmigration_repository.py
+misc/migration/postcreate.py
+schemas/Bookmark.py
+schemas/Card.py
+schemas/_regproc.sql.mysql
+schemas/_regproc.sql.postgres
+schemas/base.py
+schemas/bootstrap.py
+server/test/data/bootstrap_packages
+server/test/data/hooks.py
+server/test/data/config1/application_hooks.py
+server/test/data/config1/bootstrap_packages
+server/test/data/config1/server-ctl.conf
+server/test/data/config1/sources
+server/test/data/config2/application_hooks.py
+server/test/data/config2/bootstrap_packages
+server/test/data/config2/server-ctl.conf
+server/test/data/config2/sources
+server/test/data/migration/postcreate.py
+server/test/data/migrschema/Affaire.py
+server/test/data/migrschema/Folder2.py
+server/test/data/migrschema/Note.py
+server/test/data/migrschema/Personne.sql
+server/test/data/migrschema/Societe.perms
+server/test/data/migrschema/Societe.sql
+server/test/data/migrschema/relations.rel
+server/test/data/schema/Affaire.py
+server/test/data/schema/Note.sql
+server/test/data/schema/Personne.sql
+server/test/data/schema/Societe.py
+server/test/data/schema/custom.py
+server/test/data/schema/note.py
+server/test/data/schema/relations.rel
+sobjects/test/data/bootstrap_packages
+sobjects/test/data/schema.py
+sobjects/test/data/sobjects/__init__.py
+web/data/IE_styles.css
+web/data/MochiKit.js
+web/data/acl.css
+web/data/ajax.js
+web/data/asc.gif
+web/data/banner.png
+web/data/bg.gif
+web/data/bg_trame_grise.png
+web/data/black-check.png
+web/data/bookmarks.js
+web/data/bullet.png
+web/data/bullet_orange.png
+web/data/button.png
+web/data/calendar.css
+web/data/calendar.gif
+web/data/calendar.js
+web/data/calendar_popup.css
+web/data/compat.js
+web/data/critical.png
+web/data/cubicweb.css
+web/data/desc.gif
+web/data/download.gif
+web/data/dublincore-button.png
+web/data/dublincore-icon.png
+web/data/edition.js
+web/data/error.png
+web/data/external_resources
+web/data/favicon.ico
+web/data/fckcwconfig.js
+web/data/feed-icon.png
+web/data/feed-icon16x16.png
+web/data/feed-icon32x32.png
+web/data/file.gif
+web/data/folder-closed.gif
+web/data/folder.gif
+web/data/form.css
+web/data/formfilter.js
+web/data/gmap.js
+web/data/gmap.utility.labeledmarker.js
+web/data/gmap_blue_marker.png
+web/data/go.png
+web/data/goa.js
+web/data/gradient-grey-up.png
+web/data/gradient-grey.gif
+web/data/help.png
+web/data/help_ie.png
+web/data/html_tree.css
+web/data/htmlhelpers.js
+web/data/icon_blank.png
+web/data/icon_bookmark.gif
+web/data/icon_emailaddress.gif
+web/data/icon_euser.gif
+web/data/icon_map.png
+web/data/icon_state.gif
+web/data/information.png
+web/data/iprogress.css
+web/data/jquery.autocomplete.css
+web/data/jquery.autocomplete.js
+web/data/jquery.js
+web/data/jquery.json.js
+web/data/jquery.tablesorter.js
+web/data/jquery.treeview.css
+web/data/jquery.treeview.js
+web/data/liveclipboard-icon.png
+web/data/loading.gif
+web/data/login.css
+web/data/logo.png
+web/data/logo.xcf
+web/data/mail.gif
+web/data/mailform.css
+web/data/microformats-button.png
+web/data/microformats-icon.png
+web/data/minus.gif
+web/data/no-check-no-border.png
+web/data/nomail.gif
+web/data/nomail.xcf
+web/data/plus.gif
+web/data/preferences.css
+web/data/print.css
+web/data/puce.png
+web/data/puce_down.png
+web/data/puce_down_black.png
+web/data/pygments.css
+web/data/python.js
+web/data/required.png
+web/data/rss-button.png
+web/data/rss.png
+web/data/schema.css
+web/data/search.png
+web/data/sendcancel.png
+web/data/sendok.png
+web/data/shadow.gif
+web/data/simile-ajax-api.js
+web/data/simile-ajax-bundle.js
+web/data/sortable.js
+web/data/suggest.css
+web/data/tablesorter.css
+web/data/timeline-big-bundle.js
+web/data/timeline-bundle.css
+web/data/timeline-stubs.js
+web/data/timeline.ext.js
+web/data/timeline.js
+web/data/timetable.css
+web/data/treeview-black-line.gif
+web/data/treeview-black.gif
+web/data/treeview-default-line.gif
+web/data/treeview-default.gif
+web/data/treeview-famfamfam-line.gif
+web/data/treeview-famfamfam.gif
+web/data/treeview-gray-line.gif
+web/data/treeview-gray.gif
+web/data/treeview-red-line.gif
+web/data/treeview-red.gif
+web/data/widgets.js
+web/data/timeline/blue-circle.png
+web/data/timeline/bubble-arrows.png
+web/data/timeline/bubble-body-and-arrows.png
+web/data/timeline/bubble-body.png
+web/data/timeline/bubble-bottom-arrow.png
+web/data/timeline/bubble-bottom-left.png
+web/data/timeline/bubble-bottom-right.png
+web/data/timeline/bubble-bottom.png
+web/data/timeline/bubble-left-arrow.png
+web/data/timeline/bubble-left.png
+web/data/timeline/bubble-right-arrow.png
+web/data/timeline/bubble-right.png
+web/data/timeline/bubble-top-arrow.png
+web/data/timeline/bubble-top-left.png
+web/data/timeline/bubble-top-right.png
+web/data/timeline/bubble-top.png
+web/data/timeline/close-button.png
+web/data/timeline/copyright-vertical.png
+web/data/timeline/copyright.png
+web/data/timeline/dark-blue-circle.png
+web/data/timeline/dark-green-circle.png
+web/data/timeline/dark-red-circle.png
+web/data/timeline/dull-blue-circle.png
+web/data/timeline/dull-green-circle.png
+web/data/timeline/dull-red-circle.png
+web/data/timeline/gray-circle.png
+web/data/timeline/green-circle.png
+web/data/timeline/message-bottom-left.png
+web/data/timeline/message-bottom-right.png
+web/data/timeline/message-left.png
+web/data/timeline/message-right.png
+web/data/timeline/message-top-left.png
+web/data/timeline/message-top-right.png
+web/data/timeline/message.png
+web/data/timeline/progress-running.gif
+web/data/timeline/red-circle.png
+web/data/timeline/sundial.png
+web/data/timeline/top-bubble.png
+web/views/edit_attributes.pt
+web/views/edit_multiple.pt
+web/views/edit_relations.pt
+web/wdoc/ChangeLog_en
+web/wdoc/ChangeLog_fr
+web/wdoc/about_en.rst
+web/wdoc/about_fr.rst
+web/wdoc/add_content_en.rst
+web/wdoc/add_content_fr.rst
+web/wdoc/advanced_usage_en.rst
+web/wdoc/advanced_usage_schema_en.rst
+web/wdoc/advanced_usage_schema_fr.rst
+web/wdoc/bookmarks_en.rst
+web/wdoc/bookmarks_fr.rst
+web/wdoc/custom_view_en.rst
+web/wdoc/custom_view_fr.rst
+web/wdoc/custom_view_last_update_en.rst
+web/wdoc/custom_view_last_update_fr.rst
+web/wdoc/custom_view_rss_en.rst
+web/wdoc/custom_view_rss_fr.rst
+web/wdoc/glossary_en.rst
+web/wdoc/glossary_fr.rst
+web/wdoc/main_en.rst
+web/wdoc/search_en.rst
+web/wdoc/search_fr.rst
+web/wdoc/search_sample_queries_en.rst
+web/wdoc/search_sample_queries_fr.rst
+web/wdoc/standard_usage_en.rst
+web/wdoc/standard_usage_fr.rst
+web/wdoc/toc.xml
+web/wdoc/tut_rql_en.rst
+web/wdoc/tut_rql_fr.rst
+web/wdoc/userprefs_en.rst
+web/wdoc/userprefs_fr.rst
+web/wdoc/images/userprefs_en.png
+web/wdoc/images/userprefs_fr.png
diff -r 000000000000 -r b97547f5f1fa MANIFEST.in
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/MANIFEST.in Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,28 @@
+include README
+include pylintrc
+include bin/cubicweb-*
+include man/cubicweb-ctl.1
+
+recursive-include doc *.txt *.zargo *.png *.html makefile
+
+recursive-include misc *
+
+recursive-include web/data *
+recursive-include web/wdoc *.rst *.png *.xml ChangeLog*
+
+include web/views/*.pt
+
+recursive-include etwist *.xml *.html
+
+recursive-include i18n *.pot *.po
+recursive-include schemas *.py *.rel *.sql.*
+
+recursive-include common/test/data *
+recursive-include entities/test/data *
+recursive-include sobjects/test/data *
+recursive-include server/test/data *
+recursive-include server/test sources*
+recursive-include web/test/data *.js *.css *.png *.gif *.jpg *.ico external_resources
+recursive-include devtools/test/data *
+
+prune misc/cwfs
diff -r 000000000000 -r b97547f5f1fa README
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/README Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,18 @@
+CubicWeb semantic web framework
+===============================
+
+Install
+-------
+From the source distribution, extract the tarball and run ::
+
+ python setup.py install
+
+For deb and rpm packages, use the tools recommended by your distribution.
+
+
+Documentation
+-------------
+Look in the doc/ subdirectory.
+
+
+
diff -r 000000000000 -r b97547f5f1fa __init__.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/__init__.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,299 @@
+"""CubicWeb is a generic framework to quickly build applications which describes
+relations between entitites.
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+:license: General Public License version 2 - http://www.gnu.org/licenses
+"""
+__docformat__ = "restructuredtext en"
+from cubicweb.__pkginfo__ import version as __version__
+
+import __builtin__
+# '_' is available in builtins to mark internationalized string but should
+# not be used to do the actual translation
+if not hasattr(__builtin__, '_'):
+ __builtin__._ = unicode
+
+CW_SOFTWARE_ROOT = __path__[0]
+
+import sys, os, logging
+from StringIO import StringIO
+from urllib import quote as urlquote, unquote as urlunquote
+
+from logilab.common.decorators import cached
+
+
+LLDEBUG = 5
+logging.addLevelName(LLDEBUG, 'LLDEBUG')
+
+class CubicWebLogger(logging.Logger):
+
+ def lldebug(self, msg, *args, **kwargs):
+ """
+ Log 'msg % args' with severity 'DEBUG'.
+
+ To pass exception information, use the keyword argument exc_info with
+ a true value, e.g.
+
+ logger.debug("Houston, we have a %s", "thorny problem", exc_info=1)
+ """
+ if self.manager.disable >= LLDEBUG:
+ return
+ if LLDEBUG >= self.getEffectiveLevel():
+ self._log(LLDEBUG, msg, args, **kwargs)
+
+logging.setLoggerClass(CubicWebLogger)
+
+def set_log_methods(cls, logger):
+ """bind standart logger's methods as static methods on the class
+ """
+ cls._logger = logger
+ for attr in ('lldebug', 'debug', 'info', 'warning', 'error', 'critical', 'exception'):
+ setattr(cls, attr, getattr(logger, attr))
+
+if os.environ.get('APYCOT_ROOT'):
+ logging.basicConfig(level=logging.CRITICAL)
+else:
+ logging.basicConfig()
+
+
+set_log_methods(sys.modules[__name__], logging.getLogger('cubicweb'))
+
+# make all exceptions accessible from the package
+from cubicweb._exceptions import *
+
+# convert eid to the right type, raise ValueError if it's not a valid eid
+typed_eid = int
+
+
+#def log_thread(f, w, a):
+# print f.f_code.co_filename, f.f_code.co_name
+#import threading
+#threading.settrace(log_thread)
+
+class Binary(StringIO):
+ """customize StringIO to make sure we don't use unicode"""
+ def __init__(self, buf= ''):
+ assert isinstance(buf, (str, buffer)), \
+ "Binary objects must use raw strings, not %s" % buf.__class__
+ StringIO.__init__(self, buf)
+
+ def write(self, data):
+ assert isinstance(data, (str, buffer)), \
+ "Binary objects must use raw strings, not %s" % data.__class__
+ StringIO.write(self, data)
+
+
+class RequestSessionMixIn(object):
+ """mixin class containing stuff shared by server session and web request
+ """
+ def __init__(self, vreg):
+ self.vreg = vreg
+ try:
+ encoding = vreg.property_value('ui.encoding')
+ except: # no vreg or property not registered
+ encoding = 'utf-8'
+ self.encoding = encoding
+ # cache result of execution for (rql expr / eids),
+ # should be emptied on commit/rollback of the server session / web
+ # connection
+ self.local_perm_cache = {}
+
+ def property_value(self, key):
+ if self.user:
+ return self.user.property_value(key)
+ return self.vreg.property_value(key)
+
+ def etype_rset(self, etype, size=1):
+ """return a fake result set for a particular entity type"""
+ from cubicweb.rset import ResultSet
+ rset = ResultSet([('A',)]*size, '%s X' % etype,
+ description=[(etype,)]*size)
+ def get_entity(row, col=0, etype=etype, vreg=self.vreg, rset=rset):
+ return self.vreg.etype_class(etype)(self, rset, row, col)
+ rset.get_entity = get_entity
+ return self.decorate_rset(rset)
+
+ def eid_rset(self, eid, etype=None):
+ """return a result set for the given eid without doing actual query
+ (we have the eid, we can suppose it exists and user has access to the
+ entity)
+ """
+ from cubicweb.rset import ResultSet
+ eid = typed_eid(eid)
+ if etype is None:
+ etype = self.describe(eid)[0]
+ rset = ResultSet([(eid,)], 'Any X WHERE X eid %(x)s', {'x': eid},
+ [(etype,)])
+ return self.decorate_rset(rset)
+
+ def entity_from_eid(self, eid, etype=None):
+ rset = self.eid_rset(eid, etype)
+ if rset:
+ return rset.get_entity(0, 0)
+ else:
+ return None
+
+ # url generation methods ##################################################
+
+ def build_url(self, method, base_url=None, **kwargs):
+ """return an absolute URL using params dictionary key/values as URL
+ parameters. Values are automatically URL quoted, and the
+ publishing method to use may be specified or will be guessed.
+ """
+ if base_url is None:
+ base_url = self.base_url()
+ if '_restpath' in kwargs:
+ assert method == 'view', method
+ path = kwargs.pop('_restpath')
+ else:
+ path = method
+ if not kwargs:
+ return u'%s%s' % (base_url, path)
+ return u'%s%s?%s' % (base_url, path, self.build_url_params(**kwargs))
+
+
+ def build_url_params(self, **kwargs):
+ """return encoded params to incorporate them in an URL"""
+ args = []
+ for param, values in kwargs.items():
+ if not isinstance(values, (list, tuple)):
+ values = (values,)
+ for value in values:
+ args.append(u'%s=%s' % (param, self.url_quote(value)))
+ return '&'.join(args)
+
+ def url_quote(self, value, safe=''):
+ """urllib.quote is not unicode safe, use this method to do the
+ necessary encoding / decoding. Also it's designed to quote each
+ part of a url path and so the '/' character will be encoded as well.
+ """
+ if isinstance(value, unicode):
+ quoted = urlquote(value.encode(self.encoding), safe=safe)
+ return unicode(quoted, self.encoding)
+ return urlquote(str(value), safe=safe)
+
+ def url_unquote(self, quoted):
+ """returns a unicode unquoted string
+
+ decoding is based on `self.encoding` which is the encoding
+ used in `url_quote`
+ """
+ if isinstance(quoted, unicode):
+ quoted = quoted.encode(self.encoding)
+ try:
+ return unicode(urlunquote(quoted), self.encoding)
+ except UnicodeDecodeError: # might occurs on manually typed URLs
+ return unicode(urlunquote(quoted), 'iso-8859-1')
+
+
+ # session's user related methods #####################################
+
+ @cached
+ def user_data(self):
+ """returns a dictionnary with this user's information"""
+ userinfo = {}
+ if self.is_internal_session:
+ userinfo['login'] = "cubicweb"
+ userinfo['name'] = "cubicweb"
+ userinfo['email'] = ""
+ return userinfo
+ user = self.actual_session().user
+ rql = "Any F,S,A where U eid %(x)s, U firstname F, U surname S, U primary_email E, E address A"
+ try:
+ firstname, lastname, email = self.execute(rql, {'x': user.eid}, 'x')[0]
+ if firstname is None and lastname is None:
+ userinfo['name'] = ''
+ else:
+ userinfo['name'] = ("%s %s" % (firstname, lastname))
+ userinfo['email'] = email
+ except IndexError:
+ userinfo['name'] = None
+ userinfo['email'] = None
+ userinfo['login'] = user.login
+ return userinfo
+
+ def is_internal_session(self):
+ """overrided on the server-side"""
+ return False
+
+ # abstract methods to override according to the web front-end #############
+
+ def base_url(self):
+ """return the root url of the application"""
+ raise NotImplementedError
+
+ def decorate_rset(self, rset):
+ """add vreg/req (at least) attributes to the given result set """
+ raise NotImplementedError
+
+ def describe(self, eid):
+ """return a tuple (type, sourceuri, extid) for the entity with id """
+ raise NotImplementedError
+
+
+# XXX 2.45 is allowing nicer entity type names, use this map for bw compat
+ETYPE_NAME_MAP = {'Eetype': 'EEType',
+ 'Ertype': 'ERType',
+ 'Efrdef': 'EFRDef',
+ 'Enfrdef': 'ENFRDef',
+ 'Econstraint': 'EConstraint',
+ 'Econstrainttype': 'EConstraintType',
+ 'Epermission': 'EPermission',
+ 'Egroup': 'EGroup',
+ 'Euser': 'EUser',
+ 'Eproperty': 'EProperty',
+ 'Emailaddress': 'EmailAddress',
+ 'Rqlexpression': 'RQLExpression',
+ 'Trinfo': 'TrInfo',
+ }
+
+
+
+# XXX cubic web cube migration map
+CW_MIGRATION_MAP = {'erudi': 'cubicweb',
+
+ 'eaddressbook': 'addressbook',
+ 'ebasket': 'basket',
+ 'eblog': 'blog',
+ 'ebook': 'book',
+ 'ecomment': 'comment',
+ 'ecompany': 'company',
+ 'econference': 'conference',
+ 'eemail': 'email',
+ 'eevent': 'event',
+ 'eexpense': 'expense',
+ 'efile': 'file',
+ 'einvoice': 'invoice',
+ 'elink': 'link',
+ 'emailinglist': 'mailinglist',
+ 'eperson': 'person',
+ 'eshopcart': 'shopcart',
+ 'eskillmat': 'skillmat',
+ 'etask': 'task',
+ 'eworkcase': 'workcase',
+ 'eworkorder': 'workorder',
+ 'ezone': 'zone',
+ 'i18ncontent': 'i18ncontent',
+ 'svnfile': 'vcsfile',
+
+ 'eclassschemes': 'keyword',
+ 'eclassfolders': 'folder',
+ 'eclasstags': 'tag',
+
+ 'jpl': 'jpl',
+ 'jplintra': 'jplintra',
+ 'jplextra': 'jplextra',
+ 'jplorg': 'jplorg',
+ 'jplrecia': 'jplrecia',
+ 'crm': 'crm',
+ 'agueol': 'agueol',
+ 'docaster': 'docaster',
+ 'asteretud': 'asteretud',
+
+ # XXX temp
+ 'keywords': 'keyword',
+ 'folders': 'folder',
+ 'tags': 'tag',
+ }
diff -r 000000000000 -r b97547f5f1fa __pkginfo__.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/__pkginfo__.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,92 @@
+# pylint: disable-msg=W0622,C0103
+"""cubicweb global packaging information for the cubicweb knowledge management
+software
+"""
+
+distname = "cubicweb"
+modname = "cubicweb"
+
+numversion = (3, 0, 0)
+version = '.'.join(str(num) for num in numversion)
+
+license = 'LCL'
+copyright = '''Copyright (c) 2003-2008 LOGILAB S.A. (Paris, FRANCE).
+http://www.logilab.fr/ -- mailto:contact@logilab.fr'''
+
+author = "Logilab"
+author_email = "contact@logilab.fr"
+
+short_desc = "a repository of entities / relations for knowledge management"
+long_desc = """CubicWeb is a entities / relations based knowledge management system
+developped at Logilab.
+
+This package contains:
+* a repository server
+* a RQL command line client to the repository
+* an adaptative modpython interface to the server
+* a bunch of other management tools
+"""
+
+web = ''
+ftp = ''
+pyversions = ['2.4']
+
+
+from os import listdir, environ
+from os.path import join, isdir
+import glob
+
+scripts = [s for s in glob.glob(join('bin', 'cubicweb-*'))
+ if not s.endswith('.bat')]
+include_dirs = [join('common', 'test', 'data'),
+ join('server', 'test', 'data'),
+ join('web', 'test', 'data'),
+ join('devtools', 'test', 'data'),]
+
+
+entities_dir = 'entities'
+schema_dir = 'schemas'
+sobjects_dir = 'sobjects'
+server_migration_dir = join('misc', 'migration')
+data_dir = join('web', 'data')
+wdoc_dir = join('web', 'wdoc')
+wdocimages_dir = join(wdoc_dir, 'images')
+views_dir = join('web', 'views')
+i18n_dir = 'i18n'
+
+if environ.get('APYCOT_ROOT'):
+ # --home install
+ pydir = 'python'
+else:
+ pydir = join('python2.4', 'site-packages')
+try:
+ data_files = [
+ # common data
+ #[join('share', 'cubicweb', 'entities'),
+ # [join(entities_dir, filename) for filename in listdir(entities_dir)]],
+ # server data
+ [join('share', 'cubicweb', 'schemas'),
+ [join(schema_dir, filename) for filename in listdir(schema_dir)]],
+ #[join('share', 'cubicweb', 'sobjects'),
+ # [join(sobjects_dir, filename) for filename in listdir(sobjects_dir)]],
+ [join('share', 'cubicweb', 'migration'),
+ [join(server_migration_dir, filename)
+ for filename in listdir(server_migration_dir)]],
+ # web data
+ [join('share', 'cubicweb', 'cubes', 'shared', 'data'),
+ [join(data_dir, fname) for fname in listdir(data_dir) if not isdir(join(data_dir, fname))]],
+ [join('share', 'cubicweb', 'cubes', 'shared', 'data', 'timeline'),
+ [join(data_dir, 'timeline', fname) for fname in listdir(join(data_dir, 'timeline'))]],
+ [join('share', 'cubicweb', 'cubes', 'shared', 'wdoc'),
+ [join(wdoc_dir, fname) for fname in listdir(wdoc_dir) if not isdir(join(wdoc_dir, fname))]],
+ [join('share', 'cubicweb', 'cubes', 'shared', 'wdoc', 'images'),
+ [join(wdocimages_dir, fname) for fname in listdir(wdocimages_dir)]],
+ # XXX: .pt install should be handled properly in a near future version
+ [join('lib', pydir, 'cubicweb', 'web', 'views'),
+ [join(views_dir, fname) for fname in listdir(views_dir) if fname.endswith('.pt')]],
+ [join('share', 'cubicweb', 'cubes', 'shared', 'i18n'),
+ [join(i18n_dir, fname) for fname in listdir(i18n_dir)]],
+ ]
+except OSError:
+ # we are in an installed directory, don't care about this
+ pass
diff -r 000000000000 -r b97547f5f1fa _exceptions.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/_exceptions.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,148 @@
+"""Exceptions shared by different cubicweb packages.
+
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from yams import ValidationError
+
+# abstract exceptions #########################################################
+
+class CubicWebException(Exception):
+ """base class for cubicweb server exception"""
+ msg = ""
+ def __str__(self):
+ if self.msg:
+ if self.args:
+ return self.msg % tuple(self.args)
+ return self.msg
+ return ' '.join(str(arg) for arg in self.args)
+
+
+class ConfigurationError(CubicWebException):
+ """a misconfiguration error"""
+
+class InternalError(CubicWebException):
+ """base class for exceptions which should not occurs"""
+
+class SecurityError(CubicWebException):
+ """base class for cubicweb server security exception"""
+
+class RepositoryError(CubicWebException):
+ """base class for repository exceptions"""
+
+class SourceException(CubicWebException):
+ """base class for source exceptions"""
+
+class CubicWebRuntimeError(CubicWebException):
+ """base class for runtime exceptions"""
+
+# repository exceptions #######################################################
+
+class ConnectionError(RepositoryError):
+ """raised when a bad connection id is given or when an attempt to establish
+ a connection failed"""
+
+class AuthenticationError(ConnectionError):
+ """raised when a bad connection id is given or when an attempt to establish
+ a connection failed"""
+
+class BadConnectionId(ConnectionError):
+ """raised when a bad connection id is given or when an attempt to establish
+ a connection failed"""
+
+BadSessionId = BadConnectionId # XXX bw compat for pyro connections
+
+class UnknownEid(RepositoryError):
+ """the eid is not defined in the system tables"""
+ msg = 'No entity with eid %s in the repository'
+
+class ETypeNotSupportedBySources(RepositoryError, InternalError):
+ """no source support an entity type"""
+ msg = 'No source supports %r entity\'s type'
+
+class RTypeNotSupportedBySources(RepositoryError, InternalError):
+ """no source support a relation type"""
+ msg = 'No source supports %r relation\'s type'
+
+
+# security exceptions #########################################################
+
+class Unauthorized(SecurityError):
+ """raised when a user tries to perform an action without sufficient
+ credentials
+ """
+ msg = 'You are not allowed to perform this operation'
+ msg1 = 'You are not allowed to perform %s operation on %s'
+ var = None
+ #def __init__(self, *args):
+ # self.args = args
+
+ def __str__(self):
+ try:
+ if self.args and len(self.args) == 2:
+ return self.msg1 % self.args
+ if self.args:
+ return ' '.join(self.args)
+ return self.msg
+ except Exception, ex:
+ return str(ex)
+
+# source exceptions ###########################################################
+
+class EidNotInSource(SourceException):
+ """trying to access an object with a particular eid from a particular
+ source has failed
+ """
+ msg = 'No entity with eid %s in %s'
+
+
+# registry exceptions #########################################################
+
+class RegistryException(CubicWebException):
+ """raised when an unregistered view is called"""
+
+class RegistryNotFound(RegistryException):
+ """raised when an unknown registry is requested
+
+ this is usually a programming/typo error...
+ """
+
+class ObjectNotFound(RegistryException):
+ """raised when an unregistered object is requested
+
+ this may be a programming/typo or a misconfiguration error
+ """
+
+# class ViewNotFound(ObjectNotFound):
+# """raised when an unregistered view is called"""
+
+class NoSelectableObject(RegistryException):
+ """some views with the given vid have been found but no
+ one is applyable to the result set
+ """
+
+class UnknownProperty(RegistryException):
+ """property found in database but unknown in registry"""
+
+# query exception #############################################################
+
+class QueryError(CubicWebRuntimeError):
+ """a query try to do something it shouldn't"""
+
+class NotAnEntity(CubicWebRuntimeError):
+ """raised when get_entity is called for a column which doesn't contain
+ a non final entity
+ """
+
+# tools exceptions ############################################################
+
+class ExecutionError(Exception):
+ """server execution control error (already started, not running...)"""
+
+# pylint: disable-msg=W0611
+from logilab.common.clcommands import BadCommandUsage
+
diff -r 000000000000 -r b97547f5f1fa bin/cubicweb-ctl
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/bin/cubicweb-ctl Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,4 @@
+#!/usr/bin/env python
+from cubicweb.cwctl import run
+import sys
+run(sys.argv[1:])
diff -r 000000000000 -r b97547f5f1fa cleanappl.sh
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cleanappl.sh Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,9 @@
+#!/bin/sh
+
+rm -f web/test/tmpdb*
+rm -f web/tali18n.py
+
+rm -f applications/*/test/tmpdb*
+rm -f applications/*/tali18n.py
+rm -f applications/*/i18n/*_full.po
+rm -f applications/*/data/Schema.dot
diff -r 000000000000 -r b97547f5f1fa common/__init__.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/common/__init__.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,52 @@
+"""Common subpackage of cubicweb : defines library functions used both on the
+hg stserver side and on the client side
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+
+from logilab.common.adbh import FunctionDescr
+
+from cubicweb._exceptions import * # bw compat
+
+from rql.utils import register_function, iter_funcnode_variables
+
+class COMMA_JOIN(FunctionDescr):
+ supported_backends = ('postgres', 'sqlite',)
+ rtype = 'String'
+
+ @classmethod
+ def st_description(cls, funcnode):
+ return ', '.join(term.get_description()
+ for term in iter_funcnode_variables(funcnode))
+
+register_function(COMMA_JOIN) # XXX do not expose?
+
+
+class CONCAT_STRINGS(COMMA_JOIN):
+ aggregat = True
+
+register_function(CONCAT_STRINGS) # XXX bw compat
+
+class GROUP_CONCAT(CONCAT_STRINGS):
+ supported_backends = ('mysql', 'postgres', 'sqlite',)
+
+register_function(GROUP_CONCAT)
+
+
+class LIMIT_SIZE(FunctionDescr):
+ supported_backends = ('postgres', 'sqlite',)
+ rtype = 'String'
+
+ @classmethod
+ def st_description(cls, funcnode):
+ return funcnode.children[0].get_description()
+
+register_function(LIMIT_SIZE)
+
+
+class TEXT_LIMIT_SIZE(LIMIT_SIZE):
+ supported_backends = ('mysql', 'postgres', 'sqlite',)
+
+register_function(TEXT_LIMIT_SIZE)
diff -r 000000000000 -r b97547f5f1fa common/appobject.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/common/appobject.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,461 @@
+"""Base class for dynamically loaded objects manipulated in the web interface
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from warnings import warn
+
+from mx.DateTime import now, oneSecond
+from simplejson import dumps
+
+from logilab.common.deprecation import obsolete
+from rql.stmts import Union, Select
+
+from cubicweb import Unauthorized
+from cubicweb.vregistry import VObject
+from cubicweb.common.utils import UStringIO
+from cubicweb.common.uilib import html_escape, ustrftime
+from cubicweb.common.registerers import yes_registerer, priority_registerer
+from cubicweb.common.selectors import yes_selector
+
+_MARKER = object()
+
+
+class Cache(dict):
+ def __init__(self):
+ super(Cache, self).__init__()
+ self.cache_creation_date = None
+ self.latest_cache_lookup = now()
+
+CACHE_REGISTRY = {}
+
+class AppRsetObject(VObject):
+ """This is the base class for CubicWeb application objects
+ which are selected according to a request and result set.
+
+ Classes are kept in the vregistry and instantiation is done at selection
+ time.
+
+ At registration time, the following attributes are set on the class:
+ :vreg:
+ the application's registry
+ :schema:
+ the application's schema
+ :config:
+ the application's configuration
+
+ At instantiation time, the following attributes are set on the instance:
+ :req:
+ current request
+ :rset:
+ result set on which the object is applied
+ """
+
+ @classmethod
+ def registered(cls, vreg):
+ cls.vreg = vreg
+ cls.schema = vreg.schema
+ cls.config = vreg.config
+ cls.register_properties()
+ return cls
+
+ @classmethod
+ def selected(cls, req, rset, row=None, col=None, **kwargs):
+ """by default web app objects are usually instantiated on
+ selection according to a request, a result set, and optional
+ row and col
+ """
+ instance = cls(req, rset)
+ instance.row = row
+ instance.col = col
+ return instance
+
+ # Eproperties definition:
+ # key: id of the property (the actual EProperty key is build using
+ # ..
+ # value: tuple (property type, vocabfunc, default value, property description)
+ # possible types are those used by `logilab.common.configuration`
+ #
+ # notice that when it exists multiple objects with the same id (adaptation,
+ # overriding) only the first encountered definition is considered, so those
+ # objects can't try to have different default values for instance.
+
+ property_defs = {}
+
+ @classmethod
+ def register_properties(cls):
+ for propid, pdef in cls.property_defs.items():
+ pdef = pdef.copy() # may be shared
+ pdef['default'] = getattr(cls, propid, pdef['default'])
+ pdef['sitewide'] = getattr(cls, 'site_wide', pdef.get('sitewide'))
+ cls.vreg.register_property(cls.propkey(propid), **pdef)
+
+ @classmethod
+ def propkey(cls, propid):
+ return '%s.%s.%s' % (cls.__registry__, cls.id, propid)
+
+
+ def __init__(self, req, rset):
+ super(AppRsetObject, self).__init__()
+ self.req = req
+ self.rset = rset
+
+ @property
+ def cursor(self): # XXX deprecate in favor of req.cursor?
+ msg = '.cursor is deprecated, use req.execute (or req.cursor if necessary)'
+ warn(msg, DeprecationWarning, stacklevel=2)
+ return self.req.cursor
+
+ def get_cache(self, cachename):
+ """
+ NOTE: cachename should be dotted names as in :
+ - cubicweb.mycache
+ - cubes.blog.mycache
+ - etc.
+ """
+ if cachename in CACHE_REGISTRY:
+ cache = CACHE_REGISTRY[cachename]
+ else:
+ cache = Cache()
+ CACHE_REGISTRY[cachename] = cache
+ _now = now()
+ if _now > cache.latest_cache_lookup + oneSecond:
+ ecache = self.req.execute('Any C,T WHERE C is ECache, C name %(name)s, C timestamp T',
+ {'name':cachename}).get_entity(0,0)
+ cache.latest_cache_lookup = _now
+ if not ecache.valid(cache.cache_creation_date):
+ cache.empty()
+ cache.cache_creation_date = _now
+ return cache
+
+ def propval(self, propid):
+ assert self.req
+ return self.req.property_value(self.propkey(propid))
+
+
+ def limited_rql(self):
+ """return a printable rql for the result set associated to the object,
+ with limit/offset correctly set according to maximum page size and
+ currently displayed page when necessary
+ """
+ # try to get page boundaries from the navigation component
+ # XXX we should probably not have a ref to this component here (eg in
+ # cubicweb.common)
+ nav = self.vreg.select_component('navigation', self.req, self.rset)
+ if nav:
+ start, stop = nav.page_boundaries()
+ rql = self._limit_offset_rql(stop - start, start)
+ # result set may have be limited manually in which case navigation won't
+ # apply
+ elif self.rset.limited:
+ rql = self._limit_offset_rql(*self.rset.limited)
+ # navigation component doesn't apply and rset has not been limited, no
+ # need to limit query
+ else:
+ rql = self.rset.printable_rql()
+ return rql
+
+ def _limit_offset_rql(self, limit, offset):
+ rqlst = self.rset.syntax_tree()
+ if len(rqlst.children) == 1:
+ select = rqlst.children[0]
+ olimit, ooffset = select.limit, select.offset
+ select.limit, select.offset = limit, offset
+ rql = rqlst.as_string(kwargs=self.rset.args)
+ # restore original limit/offset
+ select.limit, select.offset = olimit, ooffset
+ else:
+ newselect = Select()
+ newselect.limit = limit
+ newselect.offset = offset
+ aliases = [VariableRef(newselect.get_variable(vref.name, i))
+ for i, vref in enumerate(rqlst.selection)]
+ newselect.set_with([SubQuery(aliases, rqlst)], check=False)
+ newunion = Union()
+ newunion.append(newselect)
+ rql = rqlst.as_string(kwargs=self.rset.args)
+ rqlst.parent = None
+ return rql
+
+ # url generation methods ##################################################
+
+ controller = 'view'
+
+ def build_url(self, method=None, **kwargs):
+ """return an absolute URL using params dictionary key/values as URL
+ parameters. Values are automatically URL quoted, and the
+ publishing method to use may be specified or will be guessed.
+ """
+ # XXX I (adim) think that if method is passed explicitly, we should
+ # not try to process it and directly call req.build_url()
+ if method is None:
+ method = self.controller
+ if method == 'view' and self.req.from_controller() == 'view' and \
+ not '_restpath' in kwargs:
+ method = self.req.relative_path(includeparams=False) or 'view'
+ return self.req.build_url(method, **kwargs)
+
+ # various resources accessors #############################################
+
+ def etype_rset(self, etype, size=1):
+ """return a fake result set for a particular entity type"""
+ msg = '.etype_rset is deprecated, use req.etype_rset'
+ warn(msg, DeprecationWarning, stacklevel=2)
+ return self.req.etype_rset(etype, size=1)
+
+ def eid_rset(self, eid, etype=None):
+ """return a result set for the given eid"""
+ msg = '.eid_rset is deprecated, use req.eid_rset'
+ warn(msg, DeprecationWarning, stacklevel=2)
+ return self.req.eid_rset(eid, etype)
+
+ def entity(self, row, col=0):
+ """short cut to get an entity instance for a particular row/column
+ (col default to 0)
+ """
+ return self.rset.get_entity(row, col)
+
+ def complete_entity(self, row, col=0, skip_bytes=True):
+ """short cut to get an completed entity instance for a particular
+ row (all instance's attributes have been fetched)
+ """
+ entity = self.entity(row, col)
+ entity.complete(skip_bytes=skip_bytes)
+ return entity
+
+ def user_rql_callback(self, args, msg=None):
+ """register a user callback to execute some rql query and return an url
+ to call it ready to be inserted in html
+ """
+ def rqlexec(req, rql, args=None, key=None):
+ req.execute(rql, args, key)
+ return self.user_callback(rqlexec, args, msg)
+
+ def user_callback(self, cb, args, msg=None, nonify=False):
+ """register the given user callback and return an url to call it ready to be
+ inserted in html
+ """
+ self.req.add_js('cubicweb.ajax.js')
+ if nonify:
+ # XXX < 2.48.3 bw compat
+ warn('nonify argument is deprecated', DeprecationWarning, stacklevel=2)
+ _cb = cb
+ def cb(*args):
+ _cb(*args)
+ cbname = self.req.register_onetime_callback(cb, *args)
+ msg = dumps(msg or '')
+ return "javascript:userCallbackThenReloadPage('%s', %s)" % (
+ cbname, msg)
+
+ # formating methods #######################################################
+
+ def tal_render(self, template, variables):
+ """render a precompiled page template with variables in the given
+ dictionary as context
+ """
+ from cubicweb.common.tal import CubicWebContext
+ context = CubicWebContext()
+ context.update({'self': self, 'rset': self.rset, '_' : self.req._,
+ 'req': self.req, 'user': self.req.user})
+ context.update(variables)
+ output = UStringIO()
+ template.expand(context, output)
+ return output.getvalue()
+
+ def format_date(self, date, date_format=None, time=False):
+ """return a string for a mx date time according to application's
+ configuration
+ """
+ if date:
+ if date_format is None:
+ if time:
+ date_format = self.req.property_value('ui.datetime-format')
+ else:
+ date_format = self.req.property_value('ui.date-format')
+ return ustrftime(date, date_format)
+ return u''
+
+ def format_time(self, time):
+ """return a string for a mx date time according to application's
+ configuration
+ """
+ if time:
+ return ustrftime(time, self.req.property_value('ui.time-format'))
+ return u''
+
+ def format_float(self, num):
+ """return a string for floating point number according to application's
+ configuration
+ """
+ if num:
+ return self.req.property_value('ui.float-format') % num
+ return u''
+
+ # security related methods ################################################
+
+ def ensure_ro_rql(self, rql):
+ """raise an exception if the given rql is not a select query"""
+ first = rql.split(' ', 1)[0].lower()
+ if first in ('insert', 'set', 'delete'):
+ raise Unauthorized(self.req._('only select queries are authorized'))
+
+ # .accepts handling utilities #############################################
+
+ accepts = ('Any',)
+
+ @classmethod
+ def accept_rset(cls, req, rset, row, col):
+ """apply the following rules:
+ * if row is None, return the sum of values returned by the method
+ for each entity's type in the result set. If any score is 0,
+ return 0.
+ * if row is specified, return the value returned by the method with
+ the entity's type of this row
+ """
+ if row is None:
+ score = 0
+ for etype in rset.column_types(0):
+ accepted = cls.accept(req.user, etype)
+ if not accepted:
+ return 0
+ score += accepted
+ return score
+ return cls.accept(req.user, rset.description[row][col or 0])
+
+ @classmethod
+ def accept(cls, user, etype):
+ """score etype, returning better score on exact match"""
+ if 'Any' in cls.accepts:
+ return 1
+ eschema = cls.schema.eschema(etype)
+ matching_types = [e.type for e in eschema.ancestors()]
+ matching_types.append(etype)
+ for index, basetype in enumerate(matching_types):
+ if basetype in cls.accepts:
+ return 2 + index
+ return 0
+
+ # .rtype handling utilities ##############################################
+
+ @classmethod
+ def relation_possible(cls, etype):
+ """tell if a relation with etype entity is possible according to
+ mixed class'.etype, .rtype and .target attributes
+
+ XXX should probably be moved out to a function
+ """
+ schema = cls.schema
+ rtype = cls.rtype
+ eschema = schema.eschema(etype)
+ if hasattr(cls, 'role'):
+ role = cls.role
+ elif cls.target == 'subject':
+ role = 'object'
+ else:
+ role = 'subject'
+ # check if this relation is possible according to the schema
+ try:
+ if role == 'object':
+ rschema = eschema.object_relation(rtype)
+ else:
+ rschema = eschema.subject_relation(rtype)
+ except KeyError:
+ return False
+ if hasattr(cls, 'etype'):
+ letype = cls.etype
+ try:
+ if role == 'object':
+ return etype in rschema.objects(letype)
+ else:
+ return etype in rschema.subjects(letype)
+ except KeyError, ex:
+ return False
+ return True
+
+
+ # XXX deprecated (since 2.43) ##########################
+
+ @obsolete('use req.datadir_url')
+ def datadir_url(self):
+ """return url of the application's data directory"""
+ return self.req.datadir_url
+
+ @obsolete('use req.external_resource()')
+ def external_resource(self, rid, default=_MARKER):
+ return self.req.external_resource(rid, default)
+
+
+class AppObject(AppRsetObject):
+ """base class for application objects which are not selected
+ according to a result set, only by their identifier.
+
+ Those objects may not have req, rset and cursor set.
+ """
+
+ @classmethod
+ def selected(cls, *args, **kwargs):
+ """by default web app objects are usually instantiated on
+ selection
+ """
+ return cls(*args, **kwargs)
+
+ def __init__(self, req=None, rset=None, **kwargs):
+ self.req = req
+ self.rset = rset
+ self.__dict__.update(kwargs)
+
+
+class ReloadableMixIn(object):
+ """simple mixin for reloadable parts of UI"""
+
+ def user_callback(self, cb, args, msg=None, nonify=False):
+ """register the given user callback and return an url to call it ready to be
+ inserted in html
+ """
+ self.req.add_js('cubicweb.ajax.js')
+ if nonify:
+ _cb = cb
+ def cb(*args):
+ _cb(*args)
+ cbname = self.req.register_onetime_callback(cb, *args)
+ return self.build_js(cbname, html_escape(msg or ''))
+
+ def build_update_js_call(self, cbname, msg):
+ rql = html_escape(self.rset.printable_rql())
+ return "javascript:userCallbackThenUpdateUI('%s', '%s', '%s', '%s', '%s', '%s')" % (
+ cbname, self.id, rql, msg, self.__registry__, self.div_id())
+
+ def build_reload_js_call(self, cbname, msg):
+ return "javascript:userCallbackThenReloadPage('%s', '%s')" % (cbname, msg)
+
+ build_js = build_update_js_call # expect updatable component by default
+
+ def div_id(self):
+ return ''
+
+
+class ComponentMixIn(ReloadableMixIn):
+ """simple mixin for component object"""
+ __registry__ = 'components'
+ __registerer__ = yes_registerer
+ __selectors__ = (yes_selector,)
+ __select__ = classmethod(*__selectors__)
+
+ def div_class(self):
+ return '%s %s' % (self.propval('htmlclass'), self.id)
+
+ def div_id(self):
+ return '%sComponent' % self.id
+
+
+class Component(ComponentMixIn, AppObject):
+ """base class for non displayable components
+ """
+
+class SingletonComponent(Component):
+ """base class for non displayable unique components
+ """
+ __registerer__ = priority_registerer
diff -r 000000000000 -r b97547f5f1fa common/entity.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/common/entity.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1094 @@
+"""Base class for entity objects manipulated in clients
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logilab.common import interface
+from logilab.common.compat import all
+from logilab.common.decorators import cached
+from logilab.mtconverter import TransformData, TransformError
+from rql.utils import rqlvar_maker
+
+from cubicweb import Unauthorized
+from cubicweb.vregistry import autoselectors
+from cubicweb.rset import ResultSet
+from cubicweb.common.appobject import AppRsetObject
+from cubicweb.common.registerers import id_registerer
+from cubicweb.common.selectors import yes_selector
+from cubicweb.common.uilib import printable_value, html_escape, soup2xhtml
+from cubicweb.common.mixins import MI_REL_TRIGGERS
+from cubicweb.common.mttransforms import ENGINE
+from cubicweb.schema import RQLVocabularyConstraint, RQLConstraint, bw_normalize_etype
+
+_marker = object()
+
+def greater_card(rschema, subjtypes, objtypes, index):
+ for subjtype in subjtypes:
+ for objtype in objtypes:
+ card = rschema.rproperty(subjtype, objtype, 'cardinality')[index]
+ if card in '+*':
+ return card
+ return '1'
+
+
+class RelationTags(object):
+
+ MODE_TAGS = frozenset(('link', 'create'))
+ CATEGORY_TAGS = frozenset(('primary', 'secondary', 'generic', 'generated',
+ 'inlineview'))
+
+ def __init__(self, eclass, tagdefs):
+ self.eclass = eclass
+ self._tagdefs = {}
+ for relation, tags in tagdefs.iteritems():
+ # tags must become a set
+ if isinstance(tags, basestring):
+ tags = set((tags,))
+ elif not isinstance(tags, set):
+ tags = set(tags)
+ # relation must become a 3-uple (rtype, targettype, role)
+ if isinstance(relation, basestring):
+ self._tagdefs[(relation, '*', 'subject')] = tags
+ self._tagdefs[(relation, '*', 'object')] = tags
+ elif len(relation) == 1: # useful ?
+ self._tagdefs[(relation[0], '*', 'subject')] = tags
+ self._tagdefs[(relation[0], '*', 'object')] = tags
+ elif len(relation) == 2:
+ rtype, ttype = relation
+ ttype = bw_normalize_etype(ttype) # XXX bw compat
+ self._tagdefs[rtype, ttype, 'subject'] = tags
+ self._tagdefs[rtype, ttype, 'object'] = tags
+ elif len(relation) == 3:
+ relation = list(relation) # XXX bw compat
+ relation[1] = bw_normalize_etype(relation[1])
+ self._tagdefs[tuple(relation)] = tags
+ else:
+ raise ValueError('bad rtag definition (%r)' % (relation,))
+
+
+ def __initialize__(self):
+ # eclass.[*]schema are only set when registering
+ self.schema = self.eclass.schema
+ eschema = self.eschema = self.eclass.e_schema
+ rtags = self._tagdefs
+ # expand wildcards in rtags and add automatic tags
+ for rschema, tschemas, role in sorted(eschema.relation_definitions(True)):
+ rtype = rschema.type
+ star_tags = rtags.pop((rtype, '*', role), set())
+ for tschema in tschemas:
+ tags = rtags.setdefault((rtype, tschema.type, role), set(star_tags))
+ if role == 'subject':
+ X, Y = eschema, tschema
+ card = rschema.rproperty(X, Y, 'cardinality')[0]
+ composed = rschema.rproperty(X, Y, 'composite') == 'object'
+ else:
+ X, Y = tschema, eschema
+ card = rschema.rproperty(X, Y, 'cardinality')[1]
+ composed = rschema.rproperty(X, Y, 'composite') == 'subject'
+ # set default category tags if needed
+ if not tags & self.CATEGORY_TAGS:
+ if card in '1+':
+ if not rschema.is_final() and composed:
+ category = 'generated'
+ elif rschema.is_final() and (
+ rschema.type.endswith('_format')
+ or rschema.type.endswith('_encoding')):
+ category = 'generated'
+ else:
+ category = 'primary'
+ elif rschema.is_final():
+ if (rschema.type.endswith('_format')
+ or rschema.type.endswith('_encoding')):
+ category = 'generated'
+ else:
+ category = 'secondary'
+ else:
+ category = 'generic'
+ tags.add(category)
+ if not tags & self.MODE_TAGS:
+ if card in '?1':
+ # by default, suppose link mode if cardinality doesn't allow
+ # more than one relation
+ mode = 'link'
+ elif rschema.rproperty(X, Y, 'composite') == role:
+ # if self is composed of the target type, create mode
+ mode = 'create'
+ else:
+ # link mode by default
+ mode = 'link'
+ tags.add(mode)
+
+ def _default_target(self, rschema, role='subject'):
+ eschema = self.eschema
+ if role == 'subject':
+ return eschema.subject_relation(rschema).objects(eschema)[0]
+ else:
+ return eschema.object_relation(rschema).subjects(eschema)[0]
+
+ # dict compat
+ def __getitem__(self, key):
+ if isinstance(key, basestring):
+ key = (key,)
+ return self.get_tags(*key)
+
+ __contains__ = __getitem__
+
+ def get_tags(self, rtype, targettype=None, role='subject'):
+ rschema = self.schema.rschema(rtype)
+ if targettype is None:
+ tschema = self._default_target(rschema, role)
+ else:
+ tschema = self.schema.eschema(targettype)
+ return self._tagdefs[(rtype, tschema.type, role)]
+
+ __call__ = get_tags
+
+ def get_mode(self, rtype, targettype=None, role='subject'):
+ # XXX: should we make an assertion on rtype not being final ?
+ # assert not rschema.is_final()
+ tags = self.get_tags(rtype, targettype, role)
+ # do not change the intersection order !
+ modes = tags & self.MODE_TAGS
+ assert len(modes) == 1
+ return modes.pop()
+
+ def get_category(self, rtype, targettype=None, role='subject'):
+ tags = self.get_tags(rtype, targettype, role)
+ categories = tags & self.CATEGORY_TAGS
+ assert len(categories) == 1
+ return categories.pop()
+
+ def is_inlined(self, rtype, targettype=None, role='subject'):
+ # return set(('primary', 'secondary')) & self.get_tags(rtype, targettype)
+ return 'inlineview' in self.get_tags(rtype, targettype, role)
+
+
+class metaentity(autoselectors):
+ """this metaclass sets the relation tags on the entity class
+ and deals with the `widgets` attribute
+ """
+ def __new__(mcs, name, bases, classdict):
+ # collect baseclass' rtags
+ tagdefs = {}
+ widgets = {}
+ for base in bases:
+ tagdefs.update(getattr(base, '__rtags__', {}))
+ widgets.update(getattr(base, 'widgets', {}))
+ # update with the class' own rtgas
+ tagdefs.update(classdict.get('__rtags__', {}))
+ widgets.update(classdict.get('widgets', {}))
+ # XXX decide whether or not it's a good idea to replace __rtags__
+ # good point: transparent support for inheritance levels >= 2
+ # bad point: we loose the information of which tags are specific
+ # to this entity class
+ classdict['__rtags__'] = tagdefs
+ classdict['widgets'] = widgets
+ eclass = super(metaentity, mcs).__new__(mcs, name, bases, classdict)
+ # adds the "rtags" attribute
+ eclass.rtags = RelationTags(eclass, tagdefs)
+ return eclass
+
+
+class Entity(AppRsetObject, dict):
+ """an entity instance has e_schema automagically set on
+ the class and instances has access to their issuing cursor.
+
+ A property is set for each attribute and relation on each entity's type
+ class. Becare that among attributes, 'eid' is *NEITHER* stored in the
+ dict containment (which acts as a cache for other attributes dynamically
+ fetched)
+
+ :type e_schema: `cubicweb.schema.EntitySchema`
+ :ivar e_schema: the entity's schema
+
+ :type rest_var: str
+ :cvar rest_var: indicates which attribute should be used to build REST urls
+ If None is specified, the first non-meta attribute will
+ be used
+
+ :type skip_copy_for: list
+ :cvar skip_copy_for: a list of relations that should be skipped when copying
+ this kind of entity. Note that some relations such
+ as composite relations or relations that have '?1' as object
+ cardinality
+ """
+ __metaclass__ = metaentity
+ __registry__ = 'etypes'
+ __registerer__ = id_registerer
+ __selectors__ = (yes_selector,)
+ widgets = {}
+ id = None
+ e_schema = None
+ eid = None
+ rest_attr = None
+ skip_copy_for = ()
+
+ @classmethod
+ def registered(cls, registry):
+ """build class using descriptor at registration time"""
+ assert cls.id is not None
+ super(Entity, cls).registered(registry)
+ if cls.id != 'Any':
+ cls.__initialize__()
+ return cls
+
+ MODE_TAGS = set(('link', 'create'))
+ CATEGORY_TAGS = set(('primary', 'secondary', 'generic', 'generated')) # , 'metadata'))
+ @classmethod
+ def __initialize__(cls):
+ """initialize a specific entity class by adding descriptors to access
+ entity type's attributes and relations
+ """
+ etype = cls.id
+ assert etype != 'Any', etype
+ cls.e_schema = eschema = cls.schema.eschema(etype)
+ for rschema, _ in eschema.attribute_definitions():
+ if rschema.type == 'eid':
+ continue
+ setattr(cls, rschema.type, Attribute(rschema.type))
+ mixins = []
+ for rschema, _, x in eschema.relation_definitions():
+ if (rschema, x) in MI_REL_TRIGGERS:
+ mixin = MI_REL_TRIGGERS[(rschema, x)]
+ if not (issubclass(cls, mixin) or mixin in mixins): # already mixed ?
+ mixins.append(mixin)
+ for iface in getattr(mixin, '__implements__', ()):
+ if not interface.implements(cls, iface):
+ interface.extend(cls, iface)
+ if x == 'subject':
+ setattr(cls, rschema.type, SubjectRelation(rschema))
+ else:
+ attr = 'reverse_%s' % rschema.type
+ setattr(cls, attr, ObjectRelation(rschema))
+ if mixins:
+ cls.__bases__ = tuple(mixins + [p for p in cls.__bases__ if not p is object])
+ cls.debug('plugged %s mixins on %s', mixins, etype)
+ cls.rtags.__initialize__()
+
+ @classmethod
+ def fetch_rql(cls, user, restriction=None, fetchattrs=None, mainvar='X',
+ settype=True, ordermethod='fetch_order'):
+ """return a rql to fetch all entities of the class type"""
+ restrictions = restriction or []
+ if settype:
+ restrictions.append('%s is %s' % (mainvar, cls.id))
+ if fetchattrs is None:
+ fetchattrs = cls.fetch_attrs
+ selection = [mainvar]
+ orderby = []
+ # start from 26 to avoid possible conflicts with X
+ varmaker = rqlvar_maker(index=26)
+ cls._fetch_restrictions(mainvar, varmaker, fetchattrs, selection,
+ orderby, restrictions, user, ordermethod)
+ rql = 'Any %s' % ','.join(selection)
+ if orderby:
+ rql += ' ORDERBY %s' % ','.join(orderby)
+ rql += ' WHERE %s' % ', '.join(restrictions)
+ return rql
+
+ @classmethod
+ def _fetch_restrictions(cls, mainvar, varmaker, fetchattrs,
+ selection, orderby, restrictions, user,
+ ordermethod='fetch_order', visited=None):
+ eschema = cls.e_schema
+ if visited is None:
+ visited = set((eschema.type,))
+ elif eschema.type in visited:
+ # avoid infinite recursion
+ return
+ else:
+ visited.add(eschema.type)
+ _fetchattrs = []
+ for attr in fetchattrs:
+ try:
+ rschema = eschema.subject_relation(attr)
+ except KeyError:
+ cls.warning('skipping fetch_attr %s defined in %s (not found in schema)',
+ attr, cls.id)
+ continue
+ if not user.matching_groups(rschema.get_groups('read')):
+ continue
+ var = varmaker.next()
+ selection.append(var)
+ restriction = '%s %s %s' % (mainvar, attr, var)
+ restrictions.append(restriction)
+ if not rschema.is_final():
+ # XXX this does not handle several destination types
+ desttype = rschema.objects(eschema.type)[0]
+ card = rschema.rproperty(eschema, desttype, 'cardinality')[0]
+ if card not in '?1':
+ selection.pop()
+ restrictions.pop()
+ continue
+ if card == '?':
+ restrictions[-1] += '?' # left outer join if not mandatory
+ destcls = cls.vreg.etype_class(desttype)
+ destcls._fetch_restrictions(var, varmaker, destcls.fetch_attrs,
+ selection, orderby, restrictions,
+ user, ordermethod, visited=visited)
+ orderterm = getattr(cls, ordermethod)(attr, var)
+ if orderterm:
+ orderby.append(orderterm)
+ return selection, orderby, restrictions
+
+ def __init__(self, req, rset, row=None, col=0):
+ AppRsetObject.__init__(self, req, rset)
+ dict.__init__(self)
+ self.row, self.col = row, col
+ self._related_cache = {}
+ if rset is not None:
+ self.eid = rset[row][col]
+ else:
+ self.eid = None
+ self._is_saved = True
+
+ def __repr__(self):
+ return '' % (
+ self.e_schema, self.eid, self.keys(), id(self))
+
+ def __nonzero__(self):
+ return True
+
+ def __hash__(self):
+ return id(self)
+
+ def pre_add_hook(self):
+ """hook called by the repository before doing anything to add the entity
+ (before_add entity hooks have not been called yet). This give the
+ occasion to do weird stuff such as autocast (File -> Image for instance).
+
+ This method must return the actual entity to be added.
+ """
+ return self
+
+ def set_eid(self, eid):
+ self.eid = self['eid'] = eid
+
+ def has_eid(self):
+ """return True if the entity has an attributed eid (False
+ meaning that the entity has to be created
+ """
+ try:
+ int(self.eid)
+ return True
+ except (ValueError, TypeError):
+ return False
+
+ def is_saved(self):
+ """during entity creation, there is some time during which the entity
+ has an eid attributed though it's not saved (eg during before_add_entity
+ hooks). You can use this method to ensure the entity has an eid *and* is
+ saved in its source.
+ """
+ return self.has_eid() and self._is_saved
+
+ @cached
+ def metainformation(self):
+ res = dict(zip(('type', 'source', 'extid'), self.req.describe(self.eid)))
+ res['source'] = self.req.source_defs()[res['source']]
+ return res
+
+ def check_perm(self, action):
+ self.e_schema.check_perm(self.req, action, self.eid)
+
+ def has_perm(self, action):
+ return self.e_schema.has_perm(self.req, action, self.eid)
+
+ def view(self, vid, __registry='views', **kwargs):
+ """shortcut to apply a view on this entity"""
+ return self.vreg.render(__registry, vid, self.req, rset=self.rset,
+ row=self.row, col=self.col, **kwargs)
+
+ def absolute_url(self, method=None, **kwargs):
+ """return an absolute url to view this entity"""
+ # in linksearch mode, we don't want external urls else selecting
+ # the object for use in the relation is tricky
+ # XXX search_state is web specific
+ if getattr(self.req, 'search_state', ('normal',))[0] == 'normal':
+ kwargs['base_url'] = self.metainformation()['source'].get('base-url')
+ if method is None or method == 'view':
+ kwargs['_restpath'] = self.rest_path()
+ else:
+ kwargs['rql'] = 'Any X WHERE X eid %s' % self.eid
+ return self.build_url(method, **kwargs)
+
+ def rest_path(self):
+ """returns a REST-like (relative) path for this entity"""
+ mainattr, needcheck = self._rest_attr_info()
+ etype = str(self.e_schema)
+ if mainattr == 'eid':
+ value = self.eid
+ else:
+ value = getattr(self, mainattr)
+ if value is None:
+ return '%s/eid/%s' % (etype.lower(), self.eid)
+ if needcheck:
+ # make sure url is not ambiguous
+ rql = 'Any COUNT(X) WHERE X is %s, X %s %%(value)s' % (etype, mainattr)
+ if value is not None:
+ nbresults = self.req.execute(rql, {'value' : value})[0][0]
+ # may an assertion that nbresults is not 0 would be a good idea
+ if nbresults != 1: # no ambiguity
+ return '%s/eid/%s' % (etype.lower(), self.eid)
+ return '%s/%s' % (etype.lower(), self.req.url_quote(value))
+
+ @classmethod
+ def _rest_attr_info(cls):
+ mainattr, needcheck = 'eid', True
+ if cls.rest_attr:
+ mainattr = cls.rest_attr
+ needcheck = not cls.e_schema.has_unique_values(mainattr)
+ else:
+ for rschema in cls.e_schema.subject_relations():
+ if rschema.is_final() and rschema != 'eid' and cls.e_schema.has_unique_values(rschema):
+ mainattr = str(rschema)
+ needcheck = False
+ break
+ if mainattr == 'eid':
+ needcheck = False
+ return mainattr, needcheck
+
+ @cached
+ def formatted_attrs(self):
+ """returns the list of attributes which have some format information
+ (i.e. rich text strings)
+ """
+ attrs = []
+ for rschema, attrschema in self.e_schema.attribute_definitions():
+ if attrschema.type == 'String' and self.has_format(rschema):
+ attrs.append(rschema.type)
+ return attrs
+
+ def format(self, attr):
+ """return the mime type format for an attribute (if specified)"""
+ return getattr(self, '%s_format' % attr, None)
+
+ def text_encoding(self, attr):
+ """return the text encoding for an attribute, default to site encoding
+ """
+ encoding = getattr(self, '%s_encoding' % attr, None)
+ return encoding or self.vreg.property_value('ui.encoding')
+
+ def has_format(self, attr):
+ """return true if this entity's schema has a format field for the given
+ attribute
+ """
+ return self.e_schema.has_subject_relation('%s_format' % attr)
+
+ def has_text_encoding(self, attr):
+ """return true if this entity's schema has ab encoding field for the
+ given attribute
+ """
+ return self.e_schema.has_subject_relation('%s_encoding' % attr)
+
+ def printable_value(self, attr, value=_marker, attrtype=None,
+ format='text/html', displaytime=True):
+ """return a displayable value (i.e. unicode string) which may contains
+ html tags
+ """
+ attr = str(attr)
+ if value is _marker:
+ value = getattr(self, attr)
+ if isinstance(value, basestring):
+ value = value.strip()
+ if value is None or value == '': # don't use "not", 0 is an acceptable value
+ return u''
+ if attrtype is None:
+ attrtype = self.e_schema.destination(attr)
+ props = self.e_schema.rproperties(attr)
+ if attrtype == 'String':
+ # internalinalized *and* formatted string such as schema
+ # description...
+ if props.get('internationalizable'):
+ value = self.req._(value)
+ attrformat = self.format(attr)
+ if attrformat:
+ return self.mtc_transform(value, attrformat, format,
+ self.req.encoding)
+ elif attrtype == 'Bytes':
+ attrformat = self.format(attr)
+ if attrformat:
+ try:
+ encoding = getattr(self, '%s_encoding' % attr)
+ except AttributeError:
+ encoding = self.req.encoding
+ return self.mtc_transform(value.getvalue(), attrformat, format,
+ encoding)
+ return u''
+ value = printable_value(self.req, attrtype, value, props, displaytime)
+ if format == 'text/html':
+ value = html_escape(value)
+ return value
+
+ def mtc_transform(self, data, format, target_format, encoding,
+ _engine=ENGINE):
+ trdata = TransformData(data, format, encoding, appobject=self)
+ data = _engine.convert(trdata, target_format).decode()
+ if format == 'text/html':
+ data = soup2xhtml(data, self.req.encoding)
+ return data
+
+ # entity cloning ##########################################################
+
+ def copy_relations(self, ceid):
+ """copy relations of the object with the given eid on this object
+
+ By default meta and composite relations are skipped.
+ Overrides this if you want another behaviour
+ """
+ assert self.has_eid()
+ execute = self.req.execute
+ for rschema in self.e_schema.subject_relations():
+ if rschema.meta or rschema.is_final():
+ continue
+ # skip already defined relations
+ if getattr(self, rschema.type):
+ continue
+ if rschema.type in self.skip_copy_for:
+ continue
+ if rschema.type == 'in_state':
+ # if the workflow is defining an initial state (XXX AND we are
+ # not in the managers group? not done to be more consistent)
+ # don't try to copy in_state
+ if execute('Any S WHERE S state_of ET, ET initial_state S,'
+ 'ET name %(etype)s', {'etype': str(self.e_schema)}):
+ continue
+ # skip composite relation
+ if self.e_schema.subjrproperty(rschema, 'composite'):
+ continue
+ # skip relation with card in ?1 else we either change the copied
+ # object (inlined relation) or inserting some inconsistency
+ if self.e_schema.subjrproperty(rschema, 'cardinality')[1] in '?1':
+ continue
+ rql = 'SET X %s V WHERE X eid %%(x)s, Y eid %%(y)s, Y %s V' % (
+ rschema.type, rschema.type)
+ execute(rql, {'x': self.eid, 'y': ceid}, ('x', 'y'))
+ self.clear_related_cache(rschema.type, 'subject')
+ for rschema in self.e_schema.object_relations():
+ if rschema.meta:
+ continue
+ # skip already defined relations
+ if getattr(self, 'reverse_%s' % rschema.type):
+ continue
+ # skip composite relation
+ if self.e_schema.objrproperty(rschema, 'composite'):
+ continue
+ # skip relation with card in ?1 else we either change the copied
+ # object (inlined relation) or inserting some inconsistency
+ if self.e_schema.objrproperty(rschema, 'cardinality')[0] in '?1':
+ continue
+ rql = 'SET V %s X WHERE X eid %%(x)s, Y eid %%(y)s, V %s Y' % (
+ rschema.type, rschema.type)
+ execute(rql, {'x': self.eid, 'y': ceid}, ('x', 'y'))
+ self.clear_related_cache(rschema.type, 'object')
+
+ # data fetching methods ###################################################
+
+ @cached
+ def as_rset(self):
+ """returns a resultset containing `self` information"""
+ rset = ResultSet([(self.eid,)], 'Any X WHERE X eid %(x)s',
+ {'x': self.eid}, [(self.id,)])
+ return self.req.decorate_rset(rset)
+
+ def to_complete_relations(self):
+ """by default complete final relations to when calling .complete()"""
+ for rschema in self.e_schema.subject_relations():
+ if rschema.is_final():
+ continue
+ if len(rschema.objects(self.e_schema)) > 1:
+ # ambigous relations, the querier doesn't handle
+ # outer join correctly in this case
+ continue
+ if rschema.inlined:
+ matching_groups = self.req.user.matching_groups
+ if matching_groups(rschema.get_groups('read')) and \
+ all(matching_groups(es.get_groups('read'))
+ for es in rschema.objects(self.e_schema)):
+ yield rschema, 'subject'
+
+ def to_complete_attributes(self, skip_bytes=True):
+ for rschema, attrschema in self.e_schema.attribute_definitions():
+ # skip binary data by default
+ if skip_bytes and attrschema.type == 'Bytes':
+ continue
+ attr = rschema.type
+ if attr == 'eid':
+ continue
+ # password retreival is blocked at the repository server level
+ if not self.req.user.matching_groups(rschema.get_groups('read')) \
+ or attrschema.type == 'Password':
+ self[attr] = None
+ continue
+ yield attr
+
+ def complete(self, attributes=None, skip_bytes=True):
+ """complete this entity by adding missing attributes (i.e. query the
+ repository to fill the entity)
+
+ :type skip_bytes: bool
+ :param skip_bytes:
+ if true, attribute of type Bytes won't be considered
+ """
+ assert self.has_eid()
+ varmaker = rqlvar_maker()
+ V = varmaker.next()
+ rql = ['WHERE %s eid %%(x)s' % V]
+ selected = []
+ for attr in (attributes or self.to_complete_attributes(skip_bytes)):
+ # if attribute already in entity, nothing to do
+ if self.has_key(attr):
+ continue
+ # case where attribute must be completed, but is not yet in entity
+ var = varmaker.next()
+ rql.append('%s %s %s' % (V, attr, var))
+ selected.append((attr, var))
+ # +1 since this doen't include the main variable
+ lastattr = len(selected) + 1
+ if attributes is None:
+ # fetch additional relations (restricted to 0..1 relations)
+ for rschema, role in self.to_complete_relations():
+ rtype = rschema.type
+ if self.relation_cached(rtype, role):
+ continue
+ var = varmaker.next()
+ if role == 'subject':
+ targettype = rschema.objects(self.e_schema)[0]
+ card = rschema.rproperty(self.e_schema, targettype,
+ 'cardinality')[0]
+ if card == '1':
+ rql.append('%s %s %s' % (V, rtype, var))
+ else: # '?"
+ rql.append('%s %s %s?' % (V, rtype, var))
+ else:
+ targettype = rschema.subjects(self.e_schema)[1]
+ card = rschema.rproperty(self.e_schema, targettype,
+ 'cardinality')[1]
+ if card == '1':
+ rql.append('%s %s %s' % (var, rtype, V))
+ else: # '?"
+ rql.append('%s? %s %s' % (var, rtype, V))
+ assert card in '1?', '%s %s %s %s' % (self.e_schema, rtype,
+ role, card)
+ selected.append(((rtype, role), var))
+ if selected:
+ # select V, we need it as the left most selected variable
+ # if some outer join are included to fetch inlined relations
+ rql = 'Any %s,%s %s' % (V, ','.join(var for attr, var in selected),
+ ','.join(rql))
+ execute = getattr(self.req, 'unsafe_execute', self.req.execute)
+ rset = execute(rql, {'x': self.eid}, 'x', build_descr=False)[0]
+ # handle attributes
+ for i in xrange(1, lastattr):
+ self[str(selected[i-1][0])] = rset[i]
+ # handle relations
+ for i in xrange(lastattr, len(rset)):
+ rtype, x = selected[i-1][0]
+ value = rset[i]
+ if value is None:
+ rrset = ResultSet([], rql, {'x': self.eid})
+ self.req.decorate_rset(rrset)
+ else:
+ rrset = self.req.eid_rset(value)
+ self.set_related_cache(rtype, x, rrset)
+
+ def get_value(self, name):
+ """get value for the attribute relation , query the repository
+ to get the value if necessary.
+
+ :type name: str
+ :param name: name of the attribute to get
+ """
+ try:
+ value = self[name]
+ except KeyError:
+ if not self.is_saved():
+ return None
+ rql = "Any A WHERE X eid %%(x)s, X %s A" % name
+ # XXX should we really use unsafe_execute here??
+ execute = getattr(self.req, 'unsafe_execute', self.req.execute)
+ try:
+ rset = execute(rql, {'x': self.eid}, 'x')
+ except Unauthorized:
+ self[name] = value = None
+ else:
+ assert rset.rowcount <= 1, (self, rql, rset.rowcount)
+ try:
+ self[name] = value = rset.rows[0][0]
+ except IndexError:
+ # probably a multisource error
+ self.critical("can't get value for attribute %s of entity with eid %s",
+ name, self.eid)
+ if self.e_schema.destination(name) == 'String':
+ self[name] = value = self.req._('unaccessible')
+ else:
+ self[name] = value = None
+ return value
+
+ def related(self, rtype, role='subject', limit=None, entities=False):
+ """returns a resultset of related entities
+
+ :param role: is the role played by 'self' in the relation ('subject' or 'object')
+ :param limit: resultset's maximum size
+ :param entities: if True, the entites are returned; if False, a result set is returned
+ """
+ try:
+ return self.related_cache(rtype, role, entities, limit)
+ except KeyError:
+ pass
+ assert self.has_eid()
+ rql = self.related_rql(rtype, role)
+ rset = self.req.execute(rql, {'x': self.eid}, 'x')
+ self.set_related_cache(rtype, role, rset)
+ return self.related(rtype, role, limit, entities)
+
+ def related_rql(self, rtype, role='subject'):
+ rschema = self.schema[rtype]
+ if role == 'subject':
+ targettypes = rschema.objects(self.e_schema)
+ restriction = 'E eid %%(x)s, E %s X' % rtype
+ card = greater_card(rschema, (self.e_schema,), targettypes, 0)
+ else:
+ targettypes = rschema.subjects(self.e_schema)
+ restriction = 'E eid %%(x)s, X %s E' % rtype
+ card = greater_card(rschema, targettypes, (self.e_schema,), 1)
+ if len(targettypes) > 1:
+ fetchattrs = set()
+ for ttype in targettypes:
+ etypecls = self.vreg.etype_class(ttype)
+ fetchattrs &= frozenset(etypecls.fetch_attrs)
+ rql = etypecls.fetch_rql(self.req.user, [restriction], fetchattrs,
+ settype=False)
+ else:
+ etypecls = self.vreg.etype_class(targettypes[0])
+ rql = etypecls.fetch_rql(self.req.user, [restriction], settype=False)
+ # optimisation: remove ORDERBY if cardinality is 1 or ? (though
+ # greater_card return 1 for those both cases)
+ if card == '1':
+ if ' ORDERBY ' in rql:
+ rql = '%s WHERE %s' % (rql.split(' ORDERBY ', 1)[0],
+ rql.split(' WHERE ', 1)[1])
+ elif not ' ORDERBY ' in rql:
+ args = tuple(rql.split(' WHERE ', 1))
+ rql = '%s ORDERBY Z DESC WHERE X modification_date Z, %s' % args
+ return rql
+
+ # generic vocabulary methods ##############################################
+
+ def vocabulary(self, rtype, role='subject', limit=None):
+ """vocabulary functions must return a list of couples
+ (label, eid) that will typically be used to fill the
+ edition view's combobox.
+
+ If `eid` is None in one of these couples, it should be
+ interpreted as a separator in case vocabulary results are grouped
+ """
+ try:
+ vocabfunc = getattr(self, '%s_%s_vocabulary' % (role, rtype))
+ except AttributeError:
+ vocabfunc = getattr(self, '%s_relation_vocabulary' % role)
+ # NOTE: it is the responsibility of `vocabfunc` to sort the result
+ # (direclty through RQL or via a python sort). This is also
+ # important because `vocabfunc` might return a list with
+ # couples (label, None) which act as separators. In these
+ # cases, it doesn't make sense to sort results afterwards.
+ return vocabfunc(rtype, limit)
+
+ def subject_relation_vocabulary(self, rtype, limit=None):
+ """defaut vocabulary method for the given relation, looking for
+ relation's object entities (i.e. self is the subject)
+ """
+ if isinstance(rtype, basestring):
+ rtype = self.schema.rschema(rtype)
+ done = None
+ assert not rtype.is_final(), rtype
+ if self.has_eid():
+ done = set(e.eid for e in getattr(self, str(rtype)))
+ result = []
+ rsetsize = None
+ for objtype in rtype.objects(self.e_schema):
+ if limit is not None:
+ rsetsize = limit - len(result)
+ result += self.relation_vocabulary(rtype, objtype, 'subject',
+ rsetsize, done)
+ if limit is not None and len(result) >= limit:
+ break
+ return result
+
+ def object_relation_vocabulary(self, rtype, limit=None):
+ """defaut vocabulary method for the given relation, looking for
+ relation's subject entities (i.e. self is the object)
+ """
+ if isinstance(rtype, basestring):
+ rtype = self.schema.rschema(rtype)
+ done = None
+ if self.has_eid():
+ done = set(e.eid for e in getattr(self, 'reverse_%s' % rtype))
+ result = []
+ rsetsize = None
+ for subjtype in rtype.subjects(self.e_schema):
+ if limit is not None:
+ rsetsize = limit - len(result)
+ result += self.relation_vocabulary(rtype, subjtype, 'object',
+ rsetsize, done)
+ if limit is not None and len(result) >= limit:
+ break
+ return result
+
+ def relation_vocabulary(self, rtype, targettype, role,
+ limit=None, done=None):
+ if done is None:
+ done = set()
+ req = self.req
+ rset = self.unrelated(rtype, targettype, role, limit)
+ res = []
+ for entity in rset.entities():
+ if entity.eid in done:
+ continue
+ done.add(entity.eid)
+ res.append((entity.view('combobox'), entity.eid))
+ return res
+
+ def unrelated_rql(self, rtype, targettype, role, ordermethod=None,
+ vocabconstraints=True):
+ """build a rql to fetch `targettype` entities unrelated to this entity
+ using (rtype, role) relation
+ """
+ ordermethod = ordermethod or 'fetch_unrelated_order'
+ if isinstance(rtype, basestring):
+ rtype = self.schema.rschema(rtype)
+ if role == 'subject':
+ evar, searchedvar = 'S', 'O'
+ subjtype, objtype = self.e_schema, targettype
+ else:
+ searchedvar, evar = 'S', 'O'
+ objtype, subjtype = self.e_schema, targettype
+ if self.has_eid():
+ restriction = ['NOT S %s O' % rtype, '%s eid %%(x)s' % evar]
+ else:
+ restriction = []
+ constraints = rtype.rproperty(subjtype, objtype, 'constraints')
+ if vocabconstraints:
+ # RQLConstraint is a subclass for RQLVocabularyConstraint, so they
+ # will be included as well
+ restriction += [cstr.restriction for cstr in constraints
+ if isinstance(cstr, RQLVocabularyConstraint)]
+ else:
+ restriction += [cstr.restriction for cstr in constraints
+ if isinstance(cstr, RQLConstraint)]
+ etypecls = self.vreg.etype_class(targettype)
+ rql = etypecls.fetch_rql(self.req.user, restriction,
+ mainvar=searchedvar, ordermethod=ordermethod)
+ # ensure we have an order defined
+ if not ' ORDERBY ' in rql:
+ before, after = rql.split(' WHERE ', 1)
+ rql = '%s ORDERBY %s WHERE %s' % (before, searchedvar, after)
+ return rql
+
+ def unrelated(self, rtype, targettype, role='subject', limit=None,
+ ordermethod=None):
+ """return a result set of target type objects that may be related
+ by a given relation, with self as subject or object
+ """
+ rql = self.unrelated_rql(rtype, targettype, role, ordermethod)
+ if limit is not None:
+ before, after = rql.split(' WHERE ', 1)
+ rql = '%s LIMIT %s WHERE %s' % (before, limit, after)
+ if self.has_eid():
+ return self.req.execute(rql, {'x': self.eid})
+ return self.req.execute(rql)
+
+ # relations cache handling ################################################
+
+ def relation_cached(self, rtype, role):
+ """return true if the given relation is already cached on the instance
+ """
+ return '%s_%s' % (rtype, role) in self._related_cache
+
+ def related_cache(self, rtype, role, entities=True, limit=None):
+ """return values for the given relation if it's cached on the instance,
+ else raise `KeyError`
+ """
+ res = self._related_cache['%s_%s' % (rtype, role)][entities]
+ if limit:
+ if entities:
+ res = res[:limit]
+ else:
+ res = res.limit(limit)
+ return res
+
+ def set_related_cache(self, rtype, role, rset, col=0):
+ """set cached values for the given relation"""
+ if rset:
+ related = list(rset.entities(col))
+ rschema = self.schema.rschema(rtype)
+ if role == 'subject':
+ rcard = rschema.rproperty(self.e_schema, related[0].e_schema,
+ 'cardinality')[1]
+ target = 'object'
+ else:
+ rcard = rschema.rproperty(related[0].e_schema, self.e_schema,
+ 'cardinality')[0]
+ target = 'subject'
+ if rcard in '?1':
+ for rentity in related:
+ rentity._related_cache['%s_%s' % (rtype, target)] = (self.as_rset(), [self])
+ else:
+ related = []
+ self._related_cache['%s_%s' % (rtype, role)] = (rset, related)
+
+ def clear_related_cache(self, rtype=None, role=None):
+ """clear cached values for the given relation or the entire cache if
+ no relation is given
+ """
+ if rtype is None:
+ self._related_cache = {}
+ else:
+ assert role
+ self._related_cache.pop('%s_%s' % (rtype, role), None)
+
+ # raw edition utilities ###################################################
+
+ def set_attributes(self, **kwargs):
+ assert kwargs
+ relations = []
+ for key in kwargs:
+ relations.append('X %s %%(%s)s' % (key, key))
+ kwargs['x'] = self.eid
+ self.req.execute('SET %s WHERE X eid %%(x)s' % ','.join(relations),
+ kwargs, 'x')
+ for key, val in kwargs.iteritems():
+ self[key] = val
+
+ def delete(self):
+ assert self.has_eid(), self.eid
+ self.req.execute('DELETE %s X WHERE X eid %%(x)s' % self.e_schema,
+ {'x': self.eid})
+
+ # server side utilities ###################################################
+
+ def set_defaults(self):
+ """set default values according to the schema"""
+ self._default_set = set()
+ for attr, value in self.e_schema.defaults():
+ if not self.has_key(attr):
+ self[str(attr)] = value
+ self._default_set.add(attr)
+
+ def check(self, creation=False):
+ """check this entity against its schema. Only final relation
+ are checked here, constraint on actual relations are checked in hooks
+ """
+ # necessary since eid is handled specifically and yams require it to be
+ # in the dictionary
+ if self.req is None:
+ _ = unicode
+ else:
+ _ = self.req._
+ self.e_schema.check(self, creation=creation, _=_)
+
+ def fti_containers(self, _done=None):
+ if _done is None:
+ _done = set()
+ _done.add(self.eid)
+ containers = tuple(self.e_schema.fulltext_containers())
+ if containers:
+ for rschema, target in containers:
+ if target == 'object':
+ targets = getattr(self, rschema.type)
+ else:
+ targets = getattr(self, 'reverse_%s' % rschema)
+ for entity in targets:
+ if entity.eid in _done:
+ continue
+ for container in entity.fti_containers(_done):
+ yield container
+ else:
+ yield self
+
+ def get_words(self):
+ """used by the full text indexer to get words to index
+
+ this method should only be used on the repository side since it depends
+ on the indexer package
+
+ :rtype: list
+ :return: the list of indexable word of this entity
+ """
+ from indexer.query_objects import tokenize
+ words = []
+ for rschema in self.e_schema.indexable_attributes():
+ try:
+ value = self.printable_value(rschema, format='text/plain')
+ except TransformError, ex:
+ continue
+ except:
+ self.exception("can't add value of %s to text index for entity %s",
+ rschema, self.eid)
+ continue
+ if value:
+ words += tokenize(value)
+
+ for rschema, role in self.e_schema.fulltext_relations():
+ if role == 'subject':
+ for entity in getattr(self, rschema.type):
+ words += entity.get_words()
+ else: # if role == 'object':
+ for entity in getattr(self, 'reverse_%s' % rschema.type):
+ words += entity.get_words()
+ return words
+
+
+# attribute and relation descriptors ##########################################
+
+class Attribute(object):
+ """descriptor that controls schema attribute access"""
+
+ def __init__(self, attrname):
+ assert attrname != 'eid'
+ self._attrname = attrname
+
+ def __get__(self, eobj, eclass):
+ if eobj is None:
+ return self
+ return eobj.get_value(self._attrname)
+
+ def __set__(self, eobj, value):
+ # XXX bw compat
+ # would be better to generate UPDATE queries than the current behaviour
+ eobj.warning("deprecated usage, don't use 'entity.attr = val' notation)")
+ eobj[self._attrname] = value
+
+
+class Relation(object):
+ """descriptor that controls schema relation access"""
+ _role = None # for pylint
+
+ def __init__(self, rschema):
+ self._rschema = rschema
+ self._rtype = rschema.type
+
+ def __get__(self, eobj, eclass):
+ if eobj is None:
+ raise AttributeError('%s cannot be only be accessed from instances'
+ % self._rtype)
+ return eobj.related(self._rtype, self._role, entities=True)
+
+ def __set__(self, eobj, value):
+ raise NotImplementedError
+
+
+class SubjectRelation(Relation):
+ """descriptor that controls schema relation access"""
+ _role = 'subject'
+
+class ObjectRelation(Relation):
+ """descriptor that controls schema relation access"""
+ _role = 'object'
+
+from logging import getLogger
+from cubicweb import set_log_methods
+set_log_methods(Entity, getLogger('cubicweb.entity'))
diff -r 000000000000 -r b97547f5f1fa common/html4zope.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/common/html4zope.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,153 @@
+# Author: David Goodger
+# Contact: goodger@users.sourceforge.net
+# Revision: $Revision: 1.2 $
+# Date: $Date: 2005-07-04 16:36:50 $
+# Copyright: This module has been placed in the public domain.
+
+"""
+Simple HyperText Markup Language document tree Writer.
+
+The output conforms to the HTML 4.01 Transitional DTD and to the Extensible
+HTML version 1.0 Transitional DTD (*almost* strict). The output contains a
+minimum of formatting information. A cascading style sheet ("default.css" by
+default) is required for proper viewing with a modern graphical browser.
+
+http://cvs.zope.org/Zope/lib/python/docutils/writers/Attic/html4zope.py?rev=1.1.2.2&only_with_tag=ajung-restructuredtext-integration-branch&content-type=text/vnd.viewcvs-markup
+"""
+
+__docformat__ = 'reStructuredText'
+
+from logilab.mtconverter import html_escape
+
+from docutils import nodes
+from docutils.writers.html4css1 import Writer as CSS1Writer
+from docutils.writers.html4css1 import HTMLTranslator as CSS1HTMLTranslator
+import os
+
+default_level = int(os.environ.get('STX_DEFAULT_LEVEL', 3))
+
+class Writer(CSS1Writer):
+ """css writer using our html translator"""
+ def __init__(self, base_url):
+ CSS1Writer.__init__(self)
+ self.translator_class = URLBinder(base_url, HTMLTranslator)
+
+ def apply_template(self):
+ """overriding this is necessary with docutils >= 0.5"""
+ return self.visitor.astext()
+
+class URLBinder:
+ def __init__(self, url, klass):
+ self.base_url = url
+ self.translator_class = HTMLTranslator
+
+ def __call__(self, document):
+ translator = self.translator_class(document)
+ translator.base_url = self.base_url
+ return translator
+
+class HTMLTranslator(CSS1HTMLTranslator):
+ """ReST tree to html translator"""
+
+ def astext(self):
+ """return the extracted html"""
+ return ''.join(self.body)
+
+ def visit_title(self, node):
+ """Only 6 section levels are supported by HTML."""
+ if isinstance(node.parent, nodes.topic):
+ self.body.append(
+ self.starttag(node, 'p', '', CLASS='topic-title'))
+ if node.parent.hasattr('id'):
+ self.body.append(
+ self.starttag({}, 'a', '', name=node.parent['id']))
+ self.context.append('
')
+
+ def cell_call(self, row, col=0, vid=None, done=None, **kwargs):
+ done, entity = _done_init(done, self, row, col)
+ if done is None:
+ # entity is actually an error message
+ self.w(u'%s' % entity)
+ return
+ parent = entity.parent()
+ if parent:
+ parent.view(self.id, w=self.w, done=done)
+ self.w(self.separator)
+ entity.view(vid or self.item_vid, w=self.w)
+
+
+class ProgressMixIn(object):
+ """provide default implementations for IProgress interface methods"""
+
+ @property
+ @cached
+ def cost(self):
+ return self.progress_info()['estimated']
+
+ @property
+ @cached
+ def revised_cost(self):
+ return self.progress_info().get('estimatedcorrected', self.cost)
+
+ @property
+ @cached
+ def done(self):
+ return self.progress_info()['done']
+
+ @property
+ @cached
+ def todo(self):
+ return self.progress_info()['todo']
+
+ @cached
+ def progress_info(self):
+ raise NotImplementedError()
+
+ def finished(self):
+ return not self.in_progress()
+
+ def in_progress(self):
+ raise NotImplementedError()
+
+ def progress(self):
+ try:
+ return 100. * self.done / self.revised_cost
+ except ZeroDivisionError:
+ # total cost is 0 : if everything was estimated, task is completed
+ if self.progress_info().get('notestmiated'):
+ return 0.
+ return 100
diff -r 000000000000 -r b97547f5f1fa common/mttransforms.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/common/mttransforms.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,82 @@
+"""mime type transformation engine for cubicweb, based on mtconverter
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logilab import mtconverter
+
+from logilab.mtconverter.engine import TransformEngine
+from logilab.mtconverter.transform import Transform
+from logilab.mtconverter import (register_base_transforms,
+ register_pil_transforms,
+ register_pygments_transforms)
+
+from cubicweb.common.uilib import rest_publish, html_publish, remove_html_tags
+
+HTML_MIMETYPES = ('text/html', 'text/xhtml', 'application/xhtml+xml')
+
+# CubicWeb specific transformations
+
+class rest_to_html(Transform):
+ inputs = ('text/rest', 'text/x-rst')
+ output = 'text/html'
+ def _convert(self, trdata):
+ return rest_publish(trdata.appobject, trdata.decode())
+
+class html_to_html(Transform):
+ inputs = HTML_MIMETYPES
+ output = 'text/html'
+ def _convert(self, trdata):
+ return html_publish(trdata.appobject, trdata.data)
+
+class ept_to_html(Transform):
+ inputs = ('text/cubicweb-page-template',)
+ output = 'text/html'
+ output_encoding = 'utf-8'
+ def _convert(self, trdata):
+ from cubicweb.common.tal import compile_template
+ value = trdata.encode(self.output_encoding)
+ return trdata.appobject.tal_render(compile_template(value), {})
+
+
+# Instantiate and configure the transformation engine
+
+mtconverter.UNICODE_POLICY = 'replace'
+
+ENGINE = TransformEngine()
+ENGINE.add_transform(rest_to_html())
+ENGINE.add_transform(html_to_html())
+ENGINE.add_transform(ept_to_html())
+
+if register_pil_transforms(ENGINE, verb=False):
+ HAS_PIL_TRANSFORMS = True
+else:
+ HAS_PIL_TRANSFORMS = False
+
+try:
+ from logilab.mtconverter.transforms import pygmentstransforms
+ for mt in ('text/plain',) + HTML_MIMETYPES:
+ try:
+ pygmentstransforms.mimetypes.remove(mt)
+ except ValueError:
+ continue
+ register_pygments_transforms(ENGINE, verb=False)
+
+ def patch_convert(cls):
+ def _convert(self, trdata, origconvert=cls._convert):
+ try:
+ trdata.appobject.req.add_css('pygments.css')
+ except AttributeError: # session has no add_css, only http request
+ pass
+ return origconvert(self, trdata)
+ cls._convert = _convert
+ patch_convert(pygmentstransforms.PygmentsHTMLTransform)
+
+ HAS_PYGMENTS_TRANSFORMS = True
+except ImportError:
+ HAS_PYGMENTS_TRANSFORMS = False
+
+register_base_transforms(ENGINE, verb=False)
diff -r 000000000000 -r b97547f5f1fa common/registerers.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/common/registerers.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,205 @@
+"""This file contains some basic registerers required by application objects
+registry to handle registration at startup time.
+
+A registerer is responsible to tell if an object should be registered according
+to the application's schema or to already registered object
+
+:organization: Logilab
+:copyright: 2006-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from cubicweb.vregistry import registerer
+
+
+def _accepts_interfaces(obj):
+ return sorted(getattr(obj, 'accepts_interfaces', ()))
+
+
+class yes_registerer(registerer):
+ """register without any other action"""
+ def do_it_yourself(self, registered):
+ return self.vobject
+
+class priority_registerer(registerer):
+ """systematically kick previous registered class and register the
+ wrapped class (based on the fact that directory containing vobjects
+ are loaded from the most generic to the most specific).
+
+ This is usually for templates or startup views where we want to
+ keep only the latest in the load path
+ """
+ def do_it_yourself(self, registered):
+ if registered:
+ if len(registered) > 1:
+ self.warning('priority_registerer found more than one registered objects '
+ '(registerer monkey patch ?)')
+ for regobj in registered[:]:
+ self.kick(registered, regobj)
+ return self.vobject
+
+ def remove_equivalents(self, registered):
+ for _obj in registered[:]:
+ if self.equivalent(_obj):
+ self.kick(registered, _obj)
+ break
+
+ def remove_all_equivalents(self, registered):
+ for _obj in registered[:]:
+ if _obj is self.vobject:
+ continue
+ if self.equivalent(_obj):
+ self.kick(registered, _obj)
+
+ def equivalent(self, other):
+ raise NotImplementedError(self, self.vobject)
+
+
+class kick_registerer(registerer):
+ """systematically kick previous registered class and don't register the
+ wrapped class. This is temporarily used to discard library object registrable
+ but that we don't want to use
+ """
+ def do_it_yourself(self, registered):
+ if registered:
+ self.kick(registered, registered[-1])
+ return
+
+
+class accepts_registerer(priority_registerer):
+ """register according to the .accepts attribute of the wrapped
+ class, which should be a tuple refering some entity's types
+
+ * if no type is defined the application'schema, skip the wrapped
+ class
+ * if the class defines a requires attribute, each entity type defined
+ in the requires list must be in the schema
+ * if an object previously registered has equivalent .accepts
+ attribute, kick it out
+ * register
+ """
+ def do_it_yourself(self, registered):
+ # if object is accepting interface, we have register it now and
+ # remove it latter if no object is implementing accepted interfaces
+ if _accepts_interfaces(self.vobject):
+ return self.vobject
+ if not 'Any' in self.vobject.accepts:
+ for ertype in self.vobject.accepts:
+ if ertype in self.schema:
+ break
+ else:
+ self.skip()
+ return None
+ for required in getattr(self.vobject, 'requires', ()):
+ if required not in self.schema:
+ self.skip()
+ return
+ self.remove_equivalents(registered)
+ return self.vobject
+
+ def equivalent(self, other):
+ if _accepts_interfaces(self.vobject) != _accepts_interfaces(other):
+ return False
+ try:
+ newaccepts = list(other.accepts)
+ for etype in self.vobject.accepts:
+ try:
+ newaccepts.remove(etype)
+ except ValueError:
+ continue
+ if newaccepts:
+ other.accepts = tuple(newaccepts)
+ return False
+ return True
+ except AttributeError:
+ return False
+
+
+class id_registerer(priority_registerer):
+ """register according to the "id" attribute of the wrapped class,
+ refering to an entity type.
+
+ * if the type is not Any and is not defined the application'schema,
+ skip the wrapped class
+ * if an object previously registered has the same .id attribute,
+ kick it out
+ * register
+ """
+ def do_it_yourself(self, registered):
+ etype = self.vobject.id
+ if etype != 'Any' and not self.schema.has_entity(etype):
+ self.skip()
+ return
+ self.remove_equivalents(registered)
+ return self.vobject
+
+ def equivalent(self, other):
+ return other.id == self.vobject.id
+
+
+class etype_rtype_registerer(registerer):
+ """registerer handling optional .etype and .rtype attributes.:
+
+ * if .etype is set and is not an entity type defined in the
+ application schema, skip the wrapped class
+ * if .rtype or .relname is set and is not a relation type defined in
+ the application schema, skip the wrapped class
+ * register
+ """
+ def do_it_yourself(self, registered):
+ cls = self.vobject
+ if hasattr(cls, 'etype'):
+ if not self.schema.has_entity(cls.etype):
+ return
+ rtype = getattr(cls, 'rtype', None)
+ if rtype and not self.schema.has_relation(rtype):
+ return
+ return cls
+
+class etype_rtype_priority_registerer(etype_rtype_registerer):
+ """add priority behaviour to the etype_rtype_registerer
+ """
+ def do_it_yourself(self, registered):
+ cls = super(etype_rtype_priority_registerer, self).do_it_yourself(registered)
+ if cls:
+ registerer = priority_registerer(self.registry, cls)
+ cls = registerer.do_it_yourself(registered)
+ return cls
+
+class action_registerer(etype_rtype_registerer):
+ """'all in one' actions registerer, handling optional .accepts,
+ .etype and .rtype attributes:
+
+ * if .etype is set and is not an entity type defined in the
+ application schema, skip the wrapped class
+ * if .rtype or .relname is set and is not a relation type defined in
+ the application schema, skip the wrapped class
+ * if .accepts is set, delegate to the accepts_registerer
+ * register
+ """
+ def do_it_yourself(self, registered):
+ cls = super(action_registerer, self).do_it_yourself(registered)
+ if hasattr(cls, 'accepts'):
+ registerer = accepts_registerer(self.registry, cls)
+ cls = registerer.do_it_yourself(registered)
+ return cls
+
+
+class extresources_registerer(priority_registerer):
+ """'registerer according to a .need_resources attributes which
+ should list necessary resource identifiers for the wrapped object.
+ If one of its resources is missing, don't register
+ """
+ def do_it_yourself(self, registered):
+ if not hasattr(self.config, 'has_resource'):
+ return
+ for resourceid in self.vobject.need_resources:
+ if not self.config.has_resource(resourceid):
+ return
+ return super(extresources_registerer, self).do_it_yourself(registered)
+
+
+__all__ = [cls.__name__ for cls in globals().values()
+ if isinstance(cls, type) and issubclass(cls, registerer)
+ and not cls is registerer]
diff -r 000000000000 -r b97547f5f1fa common/rest.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/common/rest.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,223 @@
+"""rest publishing functions
+
+contains some functions and setup of docutils for cubicweb
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from cStringIO import StringIO
+from itertools import chain
+from logging import getLogger
+from os.path import join
+
+from docutils import statemachine, nodes, utils, io
+from docutils.core import publish_string
+from docutils.parsers.rst import Parser, states, directives
+from docutils.parsers.rst.roles import register_canonical_role, set_classes
+
+from logilab.mtconverter import html_escape
+
+from cubicweb.common.html4zope import Writer
+
+# We provide our own parser as an attempt to get rid of
+# state machine reinstanciation
+
+import re
+# compile states.Body patterns
+for k, v in states.Body.patterns.items():
+ if isinstance(v, str):
+ states.Body.patterns[k] = re.compile(v)
+
+# register ReStructured Text mimetype / extensions
+import mimetypes
+mimetypes.add_type('text/rest', '.rest')
+mimetypes.add_type('text/rest', '.rst')
+
+
+LOGGER = getLogger('cubicweb.rest')
+
+def eid_reference_role(role, rawtext, text, lineno, inliner,
+ options={}, content=[]):
+ try:
+ try:
+ eid_num, rest = text.split(u':', 1)
+ except:
+ eid_num, rest = text, '#'+text
+ eid_num = int(eid_num)
+ if eid_num < 0:
+ raise ValueError
+ except ValueError:
+ msg = inliner.reporter.error(
+ 'EID number must be a positive number; "%s" is invalid.'
+ % text, line=lineno)
+ prb = inliner.problematic(rawtext, rawtext, msg)
+ return [prb], [msg]
+ # Base URL mainly used by inliner.pep_reference; so this is correct:
+ context = inliner.document.settings.context
+ refedentity = context.req.eid_rset(eid_num).get_entity(0, 0)
+ ref = refedentity.absolute_url()
+ set_classes(options)
+ return [nodes.reference(rawtext, utils.unescape(rest), refuri=ref,
+ **options)], []
+
+register_canonical_role('eid', eid_reference_role)
+
+
+def card_reference_role(role, rawtext, text, lineno, inliner,
+ options={}, content=[]):
+ text = text.strip()
+ try:
+ wikiid, rest = text.split(u':', 1)
+ except:
+ wikiid, rest = text, text
+ context = inliner.document.settings.context
+ cardrset = context.req.execute('Card X WHERE X wikiid %(id)s',
+ {'id': wikiid})
+ if cardrset:
+ ref = cardrset.get_entity(0, 0).absolute_url()
+ else:
+ schema = context.schema
+ if schema.eschema('Card').has_perm(context.req, 'add'):
+ ref = context.req.build_url('view', vid='creation', etype='Card', wikiid=wikiid)
+ else:
+ ref = '#'
+ set_classes(options)
+ return [nodes.reference(rawtext, utils.unescape(rest), refuri=ref,
+ **options)], []
+
+register_canonical_role('card', card_reference_role)
+
+
+def winclude_directive(name, arguments, options, content, lineno,
+ content_offset, block_text, state, state_machine):
+ """Include a reST file as part of the content of this reST file.
+
+ same as standard include directive but using config.locate_doc_resource to
+ get actual file to include.
+
+ Most part of this implementation is copied from `include` directive defined
+ in `docutils.parsers.rst.directives.misc`
+ """
+ context = state.document.settings.context
+ source = state_machine.input_lines.source(
+ lineno - state_machine.input_offset - 1)
+ #source_dir = os.path.dirname(os.path.abspath(source))
+ fid = arguments[0]
+ for lang in chain((context.req.lang, context.vreg.property_value('ui.language')),
+ context.config.available_languages()):
+ rid = '%s_%s.rst' % (fid, lang)
+ resourcedir = context.config.locate_doc_file(rid)
+ if resourcedir:
+ break
+ else:
+ severe = state_machine.reporter.severe(
+ 'Problems with "%s" directive path:\nno resource matching %s.'
+ % (name, fid),
+ nodes.literal_block(block_text, block_text), line=lineno)
+ return [severe]
+ path = join(resourcedir, rid)
+ encoding = options.get('encoding', state.document.settings.input_encoding)
+ try:
+ state.document.settings.record_dependencies.add(path)
+ include_file = io.FileInput(
+ source_path=path, encoding=encoding,
+ error_handler=state.document.settings.input_encoding_error_handler,
+ handle_io_errors=None)
+ except IOError, error:
+ severe = state_machine.reporter.severe(
+ 'Problems with "%s" directive path:\n%s: %s.'
+ % (name, error.__class__.__name__, error),
+ nodes.literal_block(block_text, block_text), line=lineno)
+ return [severe]
+ try:
+ include_text = include_file.read()
+ except UnicodeError, error:
+ severe = state_machine.reporter.severe(
+ 'Problem with "%s" directive:\n%s: %s'
+ % (name, error.__class__.__name__, error),
+ nodes.literal_block(block_text, block_text), line=lineno)
+ return [severe]
+ if options.has_key('literal'):
+ literal_block = nodes.literal_block(include_text, include_text,
+ source=path)
+ literal_block.line = 1
+ return literal_block
+ else:
+ include_lines = statemachine.string2lines(include_text,
+ convert_whitespace=1)
+ state_machine.insert_input(include_lines, path)
+ return []
+
+winclude_directive.arguments = (1, 0, 1)
+winclude_directive.options = {'literal': directives.flag,
+ 'encoding': directives.encoding}
+directives.register_directive('winclude', winclude_directive)
+
+class CubicWebReSTParser(Parser):
+ """The (customized) reStructuredText parser."""
+
+ def __init__(self):
+ self.initial_state = 'Body'
+ self.state_classes = states.state_classes
+ self.inliner = states.Inliner()
+ self.statemachine = states.RSTStateMachine(
+ state_classes=self.state_classes,
+ initial_state=self.initial_state,
+ debug=0)
+
+ def parse(self, inputstring, document):
+ """Parse `inputstring` and populate `document`, a document tree."""
+ self.setup_parse(inputstring, document)
+ inputlines = statemachine.string2lines(inputstring,
+ convert_whitespace=1)
+ self.statemachine.run(inputlines, document, inliner=self.inliner)
+ self.finish_parse()
+
+
+_REST_PARSER = CubicWebReSTParser()
+
+def rest_publish(context, data):
+ """publish a string formatted as ReStructured Text to HTML
+
+ :type context: a cubicweb application object
+
+ :type data: str
+ :param data: some ReST text
+
+ :rtype: unicode
+ :return:
+ the data formatted as HTML or the original data if an error occured
+ """
+ req = context.req
+ if isinstance(data, unicode):
+ encoding = 'unicode'
+ else:
+ encoding = req.encoding
+ settings = {'input_encoding': encoding, 'output_encoding': 'unicode',
+ 'warning_stream': StringIO(), 'context': context,
+ # dunno what's the max, severe is 4, and we never want a crash
+ # (though try/except may be a better option...)
+ 'halt_level': 10,
+ }
+ if context:
+ if hasattr(req, 'url'):
+ base_url = req.url()
+ elif hasattr(context, 'absolute_url'):
+ base_url = context.absolute_url()
+ else:
+ base_url = req.base_url()
+ else:
+ base_url = None
+ try:
+ return publish_string(writer=Writer(base_url=base_url),
+ parser=_REST_PARSER, source=data,
+ settings_overrides=settings)
+ except Exception:
+ LOGGER.exception('error while publishing ReST text')
+ if not isinstance(data, unicode):
+ data = unicode(data, encoding, 'replace')
+ return html_escape(req._('error while publishing ReST text')
+ + '\n\n' + data)
diff -r 000000000000 -r b97547f5f1fa common/schema.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/common/schema.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,3 @@
+from warnings import warn
+warn('moved to cubicweb.schema', DeprecationWarning, stacklevel=2)
+from cubicweb.schema import *
diff -r 000000000000 -r b97547f5f1fa common/selectors.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/common/selectors.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,413 @@
+"""This file contains some basic selectors required by application objects.
+
+A selector is responsible to score how well an object may be used with a
+given result set (publishing time selection)
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+
+__docformat__ = "restructuredtext en"
+
+from logilab.common.compat import all
+
+from cubicweb import Unauthorized
+from cubicweb.cwvreg import DummyCursorError
+from cubicweb.vregistry import chainall, chainfirst
+from cubicweb.cwconfig import CubicWebConfiguration
+from cubicweb.schema import split_expression
+
+
+def lltrace(selector):
+ # don't wrap selectors if not in development mode
+ if CubicWebConfiguration.mode == 'installed':
+ return selector
+ def traced(cls, *args, **kwargs):
+ ret = selector(cls, *args, **kwargs)
+ cls.lldebug('selector %s returned %s for %s', selector.__name__, ret, cls)
+ return ret
+ return traced
+
+# very basic selectors ########################################################
+
+def yes_selector(cls, *args, **kwargs):
+ """accept everything"""
+ return 1
+
+@lltrace
+def norset_selector(cls, req, rset, *args, **kwargs):
+ """accept no result set"""
+ if rset is None:
+ return 1
+ return 0
+
+@lltrace
+def rset_selector(cls, req, rset, *args, **kwargs):
+ """accept result set, whatever the number of result"""
+ if rset is not None:
+ return 1
+ return 0
+
+@lltrace
+def anyrset_selector(cls, req, rset, *args, **kwargs):
+ """accept any non empty result set"""
+ if rset and rset.rowcount: # XXX if rset is not None and rset.rowcount > 0:
+ return 1
+ return 0
+
+@lltrace
+def emptyrset_selector(cls, req, rset, *args, **kwargs):
+ """accept empty result set"""
+ if rset is not None and rset.rowcount == 0:
+ return 1
+ return 0
+
+@lltrace
+def onelinerset_selector(cls, req, rset, row=None, *args, **kwargs):
+ """accept result set with a single line of result"""
+ if rset is not None and (row is not None or rset.rowcount == 1):
+ return 1
+ return 0
+
+@lltrace
+def twolinerset_selector(cls, req, rset, *args, **kwargs):
+ """accept result set with at least two lines of result"""
+ if rset is not None and rset.rowcount > 1:
+ return 1
+ return 0
+
+@lltrace
+def twocolrset_selector(cls, req, rset, *args, **kwargs):
+ """accept result set with at least one line and two columns of result"""
+ if rset is not None and rset.rowcount > 0 and len(rset.rows[0]) > 1:
+ return 1
+ return 0
+
+@lltrace
+def largerset_selector(cls, req, rset, *args, **kwargs):
+ """accept result sets with more rows than the page size
+ """
+ if rset is None or len(rset) <= req.property_value('navigation.page-size'):
+ return 0
+ return 1
+
+@lltrace
+def sortedrset_selector(cls, req, rset, row=None, col=None):
+ """accept sorted result set"""
+ rqlst = rset.syntax_tree()
+ if len(rqlst.children) > 1 or not rqlst.children[0].orderby:
+ return 0
+ return 2
+
+@lltrace
+def oneetyperset_selector(cls, req, rset, *args, **kwargs):
+ """accept result set where entities in the first columns are all of the
+ same type
+ """
+ if len(rset.column_types(0)) != 1:
+ return 0
+ return 1
+
+@lltrace
+def multitype_selector(cls, req, rset, **kwargs):
+ """accepts resultsets containing several entity types"""
+ if rset:
+ etypes = rset.column_types(0)
+ if len(etypes) > 1:
+ return 1
+ return 0
+
+@lltrace
+def searchstate_selector(cls, req, rset, row=None, col=None, **kwargs):
+ """extend the anyrset_selector by checking if the current search state
+ is in a .search_states attribute of the wrapped class
+
+ search state should be either 'normal' or 'linksearch' (eg searching for an
+ object to create a relation with another)
+ """
+ try:
+ if not req.search_state[0] in cls.search_states:
+ return 0
+ except AttributeError:
+ return 1 # class don't care about search state, accept it
+ return 1
+
+@lltrace
+def anonymous_selector(cls, req, *args, **kwargs):
+ """accept if user is anonymous"""
+ if req.cnx.anonymous_connection:
+ return 1
+ return 0
+
+@lltrace
+def not_anonymous_selector(cls, req, *args, **kwargs):
+ """accept if user is anonymous"""
+ return not anonymous_selector(cls, req, *args, **kwargs)
+
+
+# not so basic selectors ######################################################
+
+@lltrace
+def req_form_params_selector(cls, req, *args, **kwargs):
+ """check if parameters specified by the form_params attribute on
+ the wrapped class are specified in request form parameters
+ """
+ score = 0
+ for param in cls.form_params:
+ val = req.form.get(param)
+ if not val:
+ return 0
+ score += 1
+ return score + 1
+
+@lltrace
+def kwargs_selector(cls, req, *args, **kwargs):
+ """check if arguments specified by the expected_kwargs attribute on
+ the wrapped class are specified in given named parameters
+ """
+ values = []
+ for arg in cls.expected_kwargs:
+ if not arg in kwargs:
+ return 0
+ return 1
+
+@lltrace
+def etype_form_selector(cls, req, *args, **kwargs):
+ """check etype presence in request form *and* accepts conformance"""
+ if 'etype' not in req.form and 'etype' not in kwargs:
+ return 0
+ try:
+ etype = req.form['etype']
+ except KeyError:
+ etype = kwargs['etype']
+ # value is a list or a tuple if web request form received several
+ # values for etype parameter
+ assert isinstance(etype, basestring), "got multiple etype parameters in req.form"
+ if 'Any' in cls.accepts:
+ return 1
+ # no Any found, we *need* exact match
+ if etype not in cls.accepts:
+ return 0
+ # exact match must return a greater value than 'Any'-match
+ return 2
+
+@lltrace
+def _nfentity_selector(cls, req, rset, row=None, col=None, **kwargs):
+ """accept non final entities
+ if row is not specified, use the first one
+ if col is not specified, use the first one
+ """
+ etype = rset.description[row or 0][col or 0]
+ if etype is None: # outer join
+ return 0
+ if cls.schema.eschema(etype).is_final():
+ return 0
+ return 1
+
+@lltrace
+def _rqlcondition_selector(cls, req, rset, row=None, col=None, **kwargs):
+ """accept single entity result set if the entity match an rql condition
+ """
+ if cls.condition:
+ eid = rset[row or 0][col or 0]
+ if 'U' in frozenset(split_expression(cls.condition)):
+ rql = 'Any X WHERE X eid %%(x)s, U eid %%(u)s, %s' % cls.condition
+ else:
+ rql = 'Any X WHERE X eid %%(x)s, %s' % cls.condition
+ try:
+ return len(req.execute(rql, {'x': eid, 'u': req.user.eid}, 'x'))
+ except Unauthorized:
+ return 0
+
+ return 1
+
+@lltrace
+def _interface_selector(cls, req, rset, row=None, col=None, **kwargs):
+ """accept uniform result sets, and apply the following rules:
+
+ * wrapped class must have a accepts_interfaces attribute listing the
+ accepted ORed interfaces
+ * if row is None, return the sum of values returned by the method
+ for each entity's class in the result set. If any score is 0,
+ return 0.
+ * if row is specified, return the value returned by the method with
+ the entity's class of this row
+ """
+ score = 0
+ # check 'accepts' to give priority to more specific classes
+ if row is None:
+ for etype in rset.column_types(col or 0):
+ eclass = cls.vreg.etype_class(etype)
+ escore = 0
+ for iface in cls.accepts_interfaces:
+ escore += iface.is_implemented_by(eclass)
+ if not escore:
+ return 0
+ score += escore
+ if eclass.id in getattr(cls, 'accepts', ()):
+ score += 2
+ return score + 1
+ etype = rset.description[row][col or 0]
+ if etype is None: # outer join
+ return 0
+ eclass = cls.vreg.etype_class(etype)
+ for iface in cls.accepts_interfaces:
+ score += iface.is_implemented_by(eclass)
+ if score:
+ if eclass.id in getattr(cls, 'accepts', ()):
+ score += 2
+ else:
+ score += 1
+ return score
+
+@lltrace
+def score_entity_selector(cls, req, rset, row=None, col=None, **kwargs):
+ if row is None:
+ rows = xrange(rset.rowcount)
+ else:
+ rows = (row,)
+ for row in rows:
+ try:
+ score = cls.score_entity(rset.get_entity(row, col or 0))
+ except DummyCursorError:
+ # get a dummy cursor error, that means we are currently
+ # using a dummy rset to list possible views for an entity
+ # type, not for an actual result set. In that case, we
+ # don't care of the value, consider the object as selectable
+ return 1
+ if not score:
+ return 0
+ return 1
+
+@lltrace
+def accept_rset_selector(cls, req, rset, row=None, col=None, **kwargs):
+ """simply delegate to cls.accept_rset method"""
+ return cls.accept_rset(req, rset, row=row, col=col)
+
+@lltrace
+def but_etype_selector(cls, req, rset, row=None, col=None, **kwargs):
+ """restrict the searchstate_accept_one_selector to exclude entity's type
+ refered by the .etype attribute
+ """
+ if rset.description[row or 0][col or 0] == cls.etype:
+ return 0
+ return 1
+
+@lltrace
+def etype_rtype_selector(cls, req, rset, row=None, col=None, **kwargs):
+ """only check if the user has read access on the entity's type refered
+ by the .etype attribute and on the relations's type refered by the
+ .rtype attribute if set.
+ """
+ schema = cls.schema
+ perm = getattr(cls, 'require_permission', 'read')
+ if hasattr(cls, 'etype'):
+ eschema = schema.eschema(cls.etype)
+ if not (eschema.has_perm(req, perm) or eschema.has_local_role(perm)):
+ return 0
+ if hasattr(cls, 'rtype'):
+ if not schema.rschema(cls.rtype).has_perm(req, perm):
+ return 0
+ return 1
+
+@lltrace
+def accept_rtype_selector(cls, req, rset, row=None, col=None, **kwargs):
+ if hasattr(cls, 'rtype'):
+ if row is None:
+ for etype in rset.column_types(col or 0):
+ if not cls.relation_possible(etype):
+ return 0
+ elif not cls.relation_possible(rset.description[row][col or 0]):
+ return 0
+ return 1
+
+@lltrace
+def one_has_relation_selector(cls, req, rset, row=None, col=None, **kwargs):
+ """check if the user has read access on the relations's type refered by the
+ .rtype attribute of the class, and if at least one entity type in the
+ result set has this relation.
+ """
+ schema = cls.schema
+ perm = getattr(cls, 'require_permission', 'read')
+ if not schema.rschema(cls.rtype).has_perm(req, perm):
+ return 0
+ if row is None:
+ for etype in rset.column_types(col or 0):
+ if cls.relation_possible(etype):
+ return 1
+ elif cls.relation_possible(rset.description[row][col or 0]):
+ return 1
+ return 0
+
+@lltrace
+def in_group_selector(cls, req, rset=None, row=None, col=None, **kwargs):
+ """select according to user's groups"""
+ if not cls.require_groups:
+ return 1
+ user = req.user
+ if user is None:
+ return int('guests' in cls.require_groups)
+ score = 0
+ if 'owners' in cls.require_groups and rset:
+ if row is not None:
+ eid = rset[row][col or 0]
+ if user.owns(eid):
+ score = 1
+ else:
+ score = all(user.owns(r[col or 0]) for r in rset)
+ score += user.matching_groups(cls.require_groups)
+ if score:
+ # add 1 so that an object with one matching group take priority
+ # on an object without require_groups
+ return score + 1
+ return 0
+
+@lltrace
+def add_etype_selector(cls, req, rset, row=None, col=None, **kwargs):
+ """only check if the user has add access on the entity's type refered
+ by the .etype attribute.
+ """
+ if not cls.schema.eschema(cls.etype).has_perm(req, 'add'):
+ return 0
+ return 1
+
+@lltrace
+def contextprop_selector(cls, req, rset, row=None, col=None, context=None,
+ **kwargs):
+ propval = req.property_value('%s.%s.context' % (cls.__registry__, cls.id))
+ if not propval:
+ propval = cls.context
+ if context is not None and propval is not None and context != propval:
+ return 0
+ return 1
+
+@lltrace
+def primaryview_selector(cls, req, rset, row=None, col=None, view=None,
+ **kwargs):
+ if view is not None and not view.is_primary():
+ return 0
+ return 1
+
+
+# compound selectors ##########################################################
+
+nfentity_selector = chainall(anyrset_selector, _nfentity_selector)
+interface_selector = chainall(nfentity_selector, _interface_selector)
+
+accept_selector = chainall(nfentity_selector, accept_rset_selector)
+accept_one_selector = chainall(onelinerset_selector, accept_selector)
+
+rqlcondition_selector = chainall(nfentity_selector,
+ onelinerset_selector,
+ _rqlcondition_selector)
+
+searchstate_accept_selector = chainall(anyrset_selector, searchstate_selector,
+ accept_selector)
+searchstate_accept_one_selector = chainall(anyrset_selector, searchstate_selector,
+ accept_selector, rqlcondition_selector)
+searchstate_accept_one_but_etype_selector = chainall(searchstate_accept_one_selector,
+ but_etype_selector)
+
+__all__ = [name for name in globals().keys() if name.endswith('selector')]
+__all__ += ['chainall', 'chainfirst']
diff -r 000000000000 -r b97547f5f1fa common/tal.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/common/tal.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,256 @@
+"""provides simpleTAL extensions for CubicWeb
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+
+__docformat__ = "restructuredtext en"
+
+import sys
+import re
+from os.path import exists, isdir, join
+from logging import getLogger
+from StringIO import StringIO
+
+from simpletal import simpleTAL, simpleTALES
+
+from logilab.common.decorators import cached
+
+LOGGER = getLogger('cubicweb.tal')
+
+
+class LoggerAdapter(object):
+ def __init__(self, tal_logger):
+ self.tal_logger = tal_logger
+
+ def debug(self, msg):
+ LOGGER.debug(msg)
+
+ def warn(self, msg):
+ LOGGER.warning(msg)
+
+ def __getattr__(self, attrname):
+ return getattr(self.tal_logger, attrname)
+
+
+class CubicWebContext(simpleTALES.Context):
+ """add facilities to access entity / resultset"""
+
+ def __init__(self, options=None, allowPythonPath=1):
+ simpleTALES.Context.__init__(self, options, allowPythonPath)
+ self.log = LoggerAdapter(self.log)
+
+ def update(self, context):
+ for varname, value in context.items():
+ self.addGlobal(varname, value)
+
+ def addRepeat(self, name, var, initialValue):
+ simpleTALES.Context.addRepeat(self, name, var, initialValue)
+
+# XXX FIXME need to find a clean to define OPCODE values for extensions
+I18N_CONTENT = 18
+I18N_REPLACE = 19
+RQL_EXECUTE = 20
+# simpleTAL uses the OPCODE values to define priority over commands.
+# TAL_ITER should have the same priority than TAL_REPEAT (i.e. 3), but
+# we can't use the same OPCODE for two different commands without changing
+# the simpleTAL implementation. Another solution would be to totally override
+# the REPEAT implementation with the ITER one, but some specific operations
+# (involving len() for instance) are not implemented for ITER, so we prefer
+# to keep both implementations for now, and to fool simpleTAL by using a float
+# number between 3 and 4
+TAL_ITER = 3.1
+
+
+# FIX simpleTAL HTML 4.01 stupidity
+# (simpleTAL never closes tags like INPUT, IMG, HR ...)
+simpleTAL.HTML_FORBIDDEN_ENDTAG.clear()
+
+class CubicWebTemplateCompiler(simpleTAL.HTMLTemplateCompiler):
+ """extends default compiler by adding i18n:content commands"""
+
+ def __init__(self):
+ simpleTAL.HTMLTemplateCompiler.__init__(self)
+ self.commandHandler[I18N_CONTENT] = self.compile_cmd_i18n_content
+ self.commandHandler[I18N_REPLACE] = self.compile_cmd_i18n_replace
+ self.commandHandler[RQL_EXECUTE] = self.compile_cmd_rql
+ self.commandHandler[TAL_ITER] = self.compile_cmd_tal_iter
+
+ def setTALPrefix(self, prefix):
+ simpleTAL.TemplateCompiler.setTALPrefix(self, prefix)
+ self.tal_attribute_map['i18n:content'] = I18N_CONTENT
+ self.tal_attribute_map['i18n:replace'] = I18N_REPLACE
+ self.tal_attribute_map['rql:execute'] = RQL_EXECUTE
+ self.tal_attribute_map['tal:iter'] = TAL_ITER
+
+ def compile_cmd_i18n_content(self, argument):
+ # XXX tal:content structure=, text= should we support this ?
+ structure_flag = 0
+ return (I18N_CONTENT, (argument, False, structure_flag, self.endTagSymbol))
+
+ def compile_cmd_i18n_replace(self, argument):
+ # XXX tal:content structure=, text= should we support this ?
+ structure_flag = 0
+ return (I18N_CONTENT, (argument, True, structure_flag, self.endTagSymbol))
+
+ def compile_cmd_rql(self, argument):
+ return (RQL_EXECUTE, (argument, self.endTagSymbol))
+
+ def compile_cmd_tal_iter(self, argument):
+ original_id, (var_name, expression, end_tag_symbol) = \
+ simpleTAL.HTMLTemplateCompiler.compileCmdRepeat(self, argument)
+ return (TAL_ITER, (var_name, expression, self.endTagSymbol))
+
+ def getTemplate(self):
+ return CubicWebTemplate(self.commandList, self.macroMap, self.symbolLocationTable)
+
+ def compileCmdAttributes (self, argument):
+ """XXX modified to support single attribute
+ definition ending by a ';'
+
+ backport this to simpleTAL
+ """
+ # Compile tal:attributes into attribute command
+ # Argument: [(attributeName, expression)]
+
+ # Break up the list of attribute settings first
+ commandArgs = []
+ # We only want to match semi-colons that are not escaped
+ argumentSplitter = re.compile(r'(? 1
+ peschema.subject_relation('travaille').set_rproperty(peschema, seschema, 'cardinality', '**')
+ self.assertEquals(Personne.fetch_rql(user),
+ 'Any X,AA,AB ORDERBY AA ASC WHERE X is Personne, X nom AA, X prenom AB')
+ # XXX test unauthorized attribute
+ finally:
+ Personne.fetch_attrs = pfetch_attrs
+ Societe.fetch_attrs = sfetch_attrs
+
+
+ def test_entity_unrelated(self):
+ p = self.add_entity('Personne', nom=u'di mascio', prenom=u'adrien')
+ e = self.add_entity('Tag', name=u'x')
+ rschema = e.e_schema.subject_relation('tags')
+ related = [r.eid for r in e.tags]
+ self.failUnlessEqual(related, [])
+ unrelated = [reid for rview, reid in e.vocabulary(rschema, 'subject')]
+ self.failUnless(p.eid in unrelated)
+ self.execute('SET X tags Y WHERE X is Tag, Y is Personne')
+ e = self.entity('Any X WHERE X is Tag')
+ unrelated = [reid for rview, reid in e.vocabulary(rschema, 'subject')]
+ self.failIf(p.eid in unrelated)
+
+ def test_entity_unrelated_limit(self):
+ e = self.add_entity('Tag', name=u'x')
+ self.add_entity('Personne', nom=u'di mascio', prenom=u'adrien')
+ self.add_entity('Personne', nom=u'di mascio', prenom=u'gwen')
+ rschema = e.e_schema.subject_relation('tags')
+ self.assertEquals(len(e.vocabulary(rschema, 'subject', limit=1)),
+ 1)
+
+ def test_new_entity_unrelated(self):
+ e = self.etype_instance('EUser')
+ rschema = e.e_schema.subject_relation('in_group')
+ unrelated = [reid for rview, reid in e.vocabulary(rschema, 'subject')]
+ # should be default groups but owners, i.e. managers, users, guests
+ self.assertEquals(len(unrelated), 3)
+
+
+ def test_rtags_expansion(self):
+ from cubicweb.entities import AnyEntity
+ class Personne(AnyEntity):
+ id = 'Personne'
+ __rtags__ = {
+ ('travaille', 'Societe', 'subject') : set(('primary',)),
+ ('evaluee', '*', 'subject') : set(('secondary',)),
+ 'ecrit_par' : set(('inlineview',)),
+ }
+ self.vreg.register_vobject_class(Personne)
+ rtags = Personne.rtags
+ self.assertEquals(rtags.get_tags('evaluee', 'Note', 'subject'), set(('secondary', 'link')))
+ self.assertEquals(rtags.is_inlined('evaluee', 'Note', 'subject'), False)
+ self.assertEquals(rtags.get_tags('evaluee', 'Personne', 'subject'), set(('secondary', 'link')))
+ self.assertEquals(rtags.is_inlined('evaluee', 'Personne', 'subject'), False)
+ self.assertEquals(rtags.get_tags('ecrit_par', 'Note', 'object'), set(('inlineview', 'link')))
+ self.assertEquals(rtags.is_inlined('ecrit_par', 'Note', 'object'), True)
+ class Personne2(Personne):
+ id = 'Personne'
+ __rtags__ = {
+ ('evaluee', 'Note', 'subject') : set(('inlineview',)),
+ }
+ self.vreg.register_vobject_class(Personne2)
+ rtags = Personne2.rtags
+ self.assertEquals(rtags.get_tags('evaluee', 'Note', 'subject'), set(('inlineview', 'link')))
+ self.assertEquals(rtags.is_inlined('evaluee', 'Note', 'subject'), True)
+ self.assertEquals(rtags.get_tags('evaluee', 'Personne', 'subject'), set(('secondary', 'link')))
+ self.assertEquals(rtags.is_inlined('evaluee', 'Personne', 'subject'), False)
+
+ def test_relations_by_category(self):
+ e = self.etype_instance('EUser')
+ def rbc(iterable):
+ return [(rschema.type, x) for rschema, tschemas, x in iterable]
+ self.assertEquals(rbc(e.relations_by_category('primary')),
+ [('login', 'subject'), ('upassword', 'subject'),
+ ('in_group', 'subject'), ('in_state', 'subject'),
+ ('eid', 'subject'),])
+ # firstname and surname are put in secondary category in views.entities.EUserEntity
+ self.assertListEquals(rbc(e.relations_by_category('secondary')),
+ [('firstname', 'subject'), ('surname', 'subject')])
+ self.assertListEquals(rbc(e.relations_by_category('generic')),
+ [('primary_email', 'subject'),
+ ('evaluee', 'subject'),
+ ('for_user', 'object'),
+ ('bookmarked_by', 'object')])
+ # owned_by is defined both as subject and object relations on EUser
+ self.assertListEquals(rbc(e.relations_by_category('generated')),
+ [('last_login_time', 'subject'),
+ ('created_by', 'subject'),
+ ('creation_date', 'subject'),
+ ('is', 'subject'),
+ ('is_instance_of', 'subject'),
+ ('modification_date', 'subject'),
+ ('owned_by', 'subject'),
+ ('created_by', 'object'),
+ ('wf_info_for', 'object'),
+ ('owned_by', 'object')])
+ e = self.etype_instance('Personne')
+ self.assertListEquals(rbc(e.relations_by_category('primary')),
+ [('nom', 'subject'), ('eid', 'subject')])
+ self.assertListEquals(rbc(e.relations_by_category('secondary')),
+ [('prenom', 'subject'),
+ ('sexe', 'subject'),
+ ('promo', 'subject'),
+ ('titre', 'subject'),
+ ('adel', 'subject'),
+ ('ass', 'subject'),
+ ('web', 'subject'),
+ ('tel', 'subject'),
+ ('fax', 'subject'),
+ ('datenaiss', 'subject'),
+ ('test', 'subject'),
+ ('description', 'subject'),
+ ('salary', 'subject')])
+ self.assertListEquals(rbc(e.relations_by_category('generic')),
+ [('concerne', 'subject'),
+ ('connait', 'subject'),
+ ('evaluee', 'subject'),
+ ('travaille', 'subject'),
+ ('ecrit_par', 'object'),
+ ('evaluee', 'object'),
+ ('tags', 'object')])
+ self.assertListEquals(rbc(e.relations_by_category('generated')),
+ [('created_by', 'subject'),
+ ('creation_date', 'subject'),
+ ('is', 'subject'),
+ ('is_instance_of', 'subject'),
+ ('modification_date', 'subject'),
+ ('owned_by', 'subject')])
+
+
+ def test_printable_value_string(self):
+ e = self.add_entity('Card', title=u'rest test', content=u'du :eid:`1:*ReST*`',
+ content_format=u'text/rest')
+ self.assertEquals(e.printable_value('content'),
+ '
'),
+ ]
+ for text, expected in data:
+ got = uilib.safe_cut(text, 8)
+ self.assertEquals(got, expected)
+
+ def test_cut(self):
+ """tests uilib.safe_cut() behaviour"""
+ data = [
+ ('hello', 'hello'),
+ ('hello world', 'hello...'),
+ ("hellO' world", "hell<..."),
+ ]
+ for text, expected in data:
+ got = uilib.cut(text, 8)
+ self.assertEquals(got, expected)
+
+ def test_text_cut_no_text(self):
+ """tests uilib.text_cut() behaviour with no text"""
+ data = [('','')]
+ for text, expected in data:
+ got = uilib.text_cut(text, 8)
+ self.assertEquals(got, expected)
+
+ def test_text_cut_long_text(self):
+ """tests uilib.text_cut() behaviour with long text"""
+ data = [("""Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
+tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,
+quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo
+consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse
+cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non
+proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
+Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
+tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,
+quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo
+consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse
+cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non
+proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
+""","""Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
+tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,
+quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo
+consequat.""")]
+ for text, expected in data:
+ got = uilib.text_cut(text, 30)
+ self.assertEquals(got, expected)
+
+ def test_text_cut_no_point(self):
+ """tests uilib.text_cut() behaviour with no point"""
+ data = [("""Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
+tempor incididunt ut labore et dolore magna aliqua Ut enim ad minim veniam,
+quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo
+consequat Duis aute irure dolor in reprehenderit in voluptate velit esse
+cillum dolore eu fugiat nulla pariatur Excepteur sint occaecat cupidatat non
+proident, sunt in culpa qui officia deserunt mollit anim id est laborum
+Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
+tempor incididunt ut labore et dolore magna aliqua Ut enim ad minim veniam,
+quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo
+consequat Duis aute irure dolor in reprehenderit in voluptate velit esse
+cillum dolore eu fugiat nulla pariatur Excepteur sint occaecat cupidatat non
+proident, sunt in culpa qui officia deserunt mollit anim id est laborum
+""","""Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
+tempor incididunt ut labore et dolore magna aliqua Ut enim ad minim veniam,
+quis nostrud exercitation ullamco laboris nisi""")]
+ for text, expected in data:
+ got = uilib.text_cut(text, 30)
+ self.assertEquals(got, expected)
+
+ def test_ajax_replace_url(self):
+ # NOTE: for the simplest use cases, we could use doctest
+ arurl = uilib.ajax_replace_url
+ self.assertEquals(arurl('foo', 'Person P'),
+ "javascript: replacePageChunk('foo', 'Person%20P');")
+ self.assertEquals(arurl('foo', 'Person P', 'oneline'),
+ "javascript: replacePageChunk('foo', 'Person%20P', 'oneline');")
+ self.assertEquals(arurl('foo', 'Person P', 'oneline', name='bar', age=12),
+ 'javascript: replacePageChunk(\'foo\', \'Person%20P\', \'oneline\', {"age": 12, "name": "bar"});')
+ self.assertEquals(arurl('foo', 'Person P', name='bar', age=12),
+ 'javascript: replacePageChunk(\'foo\', \'Person%20P\', \'null\', {"age": 12, "name": "bar"});')
+
+tree = ('root', (
+ ('child_1_1', (
+ ('child_2_1', ()), ('child_2_2', (
+ ('child_3_1', ()),
+ ('child_3_2', ()),
+ ('child_3_3', ()),
+ )))),
+ ('child_1_2', (('child_2_3', ()),))))
+
+generated_html = """\
+
+
root
child_1_1
child_2_1
+
+
child_2_2
child_3_1
+
+
child_3_2
+
+
child_3_3
+
+
child_1_2
child_2_3
+
+
\
+"""
+
+def make_tree(tuple):
+ n = Node(tuple[0])
+ for child in tuple[1]:
+ n.append(make_tree(child))
+ return n
+
+class UIlibHTMLGenerationTC(TestCase):
+ """ a basic tree node, caracterised by an id"""
+ def setUp(self):
+ """ called before each test from this class """
+ self.o = make_tree(tree)
+
+ def test_generated_html(self):
+ s = uilib.render_HTML_tree(self.o, selected_node="child_2_2")
+ self.assertTextEqual(s, generated_html)
+
+
+if __name__ == '__main__':
+ unittest_main()
+
diff -r 000000000000 -r b97547f5f1fa common/test/unittest_utils.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/common/test/unittest_utils.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,47 @@
+"""unit tests for module cubicweb.common.utils"""
+
+from logilab.common.testlib import TestCase, unittest_main
+
+from cubicweb.common.utils import make_uid, UStringIO, SizeConstrainedList
+
+
+class MakeUidTC(TestCase):
+ def test_1(self):
+ self.assertNotEquals(make_uid('xyz'), make_uid('abcd'))
+ self.assertNotEquals(make_uid('xyz'), make_uid('xyz'))
+
+ def test_2(self):
+ d = {}
+ while len(d)<10000:
+ uid = make_uid('xyz')
+ if d.has_key(uid):
+ self.fail(len(d))
+ d[uid] = 1
+
+
+class UStringIOTC(TestCase):
+ def test_boolean_value(self):
+ self.assert_(UStringIO())
+
+
+class SizeConstrainedListTC(TestCase):
+
+ def test_append(self):
+ l = SizeConstrainedList(10)
+ for i in xrange(12):
+ l.append(i)
+ self.assertEquals(l, range(2, 12))
+
+ def test_extend(self):
+ testdata = [(range(5), range(5)),
+ (range(10), range(10)),
+ (range(12), range(2, 12)),
+ ]
+ for extension, expected in testdata:
+ l = SizeConstrainedList(10)
+ l.extend(extension)
+ yield self.assertEquals, l, expected
+
+
+if __name__ == '__main__':
+ unittest_main()
diff -r 000000000000 -r b97547f5f1fa common/uilib.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/common/uilib.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,473 @@
+# -*- coding: utf-8 -*-
+"""user interface libraries
+
+contains some functions designed to help implementation of cubicweb user interface
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import csv
+import decimal
+import locale
+import re
+from urllib import quote as urlquote
+from cStringIO import StringIO
+from xml.parsers.expat import ExpatError
+
+import simplejson
+
+from mx.DateTime import DateTimeType, DateTimeDeltaType
+
+from logilab.common.textutils import unormalize
+
+def ustrftime(date, fmt='%Y-%m-%d'):
+ """like strftime, but returns a unicode string instead of an encoded
+ string which may be problematic with localized date.
+
+ encoding is guessed by locale.getpreferredencoding()
+ """
+ # date format may depend on the locale
+ encoding = locale.getpreferredencoding(do_setlocale=False) or 'UTF-8'
+ return unicode(date.strftime(fmt), encoding)
+
+
+def rql_for_eid(eid):
+ """return the rql query necessary to fetch entity with the given eid. This
+ function should only be used to generate link with rql inside, not to give
+ to cursor.execute (in which case you won't benefit from rql cache).
+
+ :Parameters:
+ - `eid`: the eid of the entity we should search
+ :rtype: str
+ :return: the rql query
+ """
+ return 'Any X WHERE X eid %s' % eid
+
+
+def printable_value(req, attrtype, value, props=None, displaytime=True):
+ """return a displayable value (i.e. unicode string)"""
+ if value is None or attrtype == 'Bytes':
+ return u''
+ if attrtype == 'String':
+ # don't translate empty value if you don't want strange results
+ if props is not None and value and props.get('internationalizable'):
+ return req._(value)
+
+ return value
+ if attrtype == 'Date':
+ return ustrftime(value, req.property_value('ui.date-format'))
+ if attrtype == 'Time':
+ return ustrftime(value, req.property_value('ui.time-format'))
+ if attrtype == 'Datetime':
+ if not displaytime:
+ return ustrftime(value, req.property_value('ui.date-format'))
+ return ustrftime(value, req.property_value('ui.datetime-format'))
+ if attrtype == 'Boolean':
+ if value:
+ return req._('yes')
+ return req._('no')
+ if attrtype == 'Float':
+ value = req.property_value('ui.float-format') % value
+ return unicode(value)
+
+
+# text publishing #############################################################
+
+try:
+ from cubicweb.common.rest import rest_publish # pylint: disable-msg=W0611
+except ImportError:
+ def rest_publish(entity, data):
+ """default behaviour if docutils was not found"""
+ return data
+
+TAG_PROG = re.compile(r'?.*?>', re.U)
+def remove_html_tags(text):
+ """Removes HTML tags from text
+
+ >>> remove_html_tags('
' % data)
+ # NOTE: lxml 1.1 (etch platforms) doesn't recognize
+ # the encoding=unicode parameter (lxml 2.0 does), this is
+ # why we specify an encoding and re-decode to unicode later
+ body = etree.tostring(xmltree[0], encoding=encoding)
+ # remove and and decode to unicode
+ return body[11:-13].decode(encoding)
+
+
+# HTML generation helper functions ############################################
+
+from logilab.mtconverter import html_escape
+
+def tooltipize(text, tooltip, url=None):
+ """make an HTML tooltip"""
+ url = url or '#'
+ return u'%s' % (url, tooltip, text)
+
+def toggle_action(nodeid):
+ """builds a HTML link that uses the js toggleVisibility function"""
+ return u"javascript: toggleVisibility('%s')" % nodeid
+
+def toggle_link(nodeid, label):
+ """builds a HTML link that uses the js toggleVisibility function"""
+ return u'%s' % (toggle_action(nodeid), label)
+
+def ajax_replace_url(nodeid, rql, vid=None, swap=False, **extraparams):
+ """builds a replacePageChunk-like url
+ >>> ajax_replace_url('foo', 'Person P')
+ "javascript: replacePageChunk('foo', 'Person%20P');"
+ >>> ajax_replace_url('foo', 'Person P', 'oneline')
+ "javascript: replacePageChunk('foo', 'Person%20P', 'oneline');"
+ >>> ajax_replace_url('foo', 'Person P', 'oneline', name='bar', age=12)
+ "javascript: replacePageChunk('foo', 'Person%20P', 'oneline', {'age':12, 'name':'bar'});"
+ >>> ajax_replace_url('foo', 'Person P', name='bar', age=12)
+ "javascript: replacePageChunk('foo', 'Person%20P', 'null', {'age':12, 'name':'bar'});"
+ """
+ params = [repr(nodeid), repr(urlquote(rql))]
+ if extraparams and not vid:
+ params.append("'null'")
+ elif vid:
+ params.append(repr(vid))
+ if extraparams:
+ params.append(simplejson.dumps(extraparams))
+ if swap:
+ params.append('true')
+ return "javascript: replacePageChunk(%s);" % ', '.join(params)
+
+def safe_cut(text, length):
+ """returns a string of length based on , removing any html
+ tags from given text if cut is necessary.
+ """
+ if text is None:
+ return u''
+ text_nohtml = remove_html_tags(text)
+ # try to keep html tags if text is short enough
+ if len(text_nohtml) <= length:
+ return text
+ # else if un-tagged text is too long, cut it
+ return text_nohtml[:length-3] + u'...'
+
+def text_cut(text, nbwords=30):
+ if text is None:
+ return u''
+ minlength = len(' '.join(text.split()[:nbwords]))
+ textlength = text.find('.', minlength) + 1
+ if textlength == 0: # no point found
+ textlength = minlength
+ return text[:textlength]
+
+
+def cut(text, length):
+ """returns a string of length based on
+ post:
+ len(__return__) <= length
+ """
+ if text is None:
+ return u''
+ if len(text) <= length:
+ return text
+ # else if un-tagged text is too long, cut it
+ return text[:length-3] + u'...'
+
+
+from StringIO import StringIO
+
+def ureport_as_html(layout):
+ from logilab.common.ureports import HTMLWriter
+ formater = HTMLWriter(True)
+ stream = StringIO() #UStringIO() don't want unicode assertion
+ formater.format(layout, stream)
+ res = stream.getvalue()
+ if isinstance(res, str):
+ res = unicode(res, 'UTF8')
+ return res
+
+def render_HTML_tree(tree, selected_node=None, render_node=None, caption=None):
+ """
+ Generate a pure HTML representation of a tree given as an instance
+ of a logilab.common.tree.Node
+
+ selected_node is the currently selected node (if any) which will
+ have its surrounding
have id="selected" (which default
+ to a bold border libe with the default CSS).
+
+ render_node is a function that should take a Node content (Node.id)
+ as parameter and should return a string (what will be displayed
+ in the cell).
+
+ Warning: proper rendering of the generated html code depends on html_tree.css
+ """
+ tree_depth = tree.depth_down()
+ if render_node is None:
+ render_node = str
+
+ # helper function that build a matrix from the tree, like:
+ # +------+-----------+-----------+
+ # | root | child_1_1 | child_2_1 |
+ # | root | child_1_1 | child_2_2 |
+ # | root | child_1_2 | |
+ # | root | child_1_3 | child_2_3 |
+ # | root | child_1_3 | child_2_4 |
+ # +------+-----------+-----------+
+ # from:
+ # root -+- child_1_1 -+- child_2_1
+ # | |
+ # | +- child_2_2
+ # +- child_1_2
+ # |
+ # +- child1_3 -+- child_2_3
+ # |
+ # +- child_2_2
+ def build_matrix(path, matrix):
+ if path[-1].is_leaf():
+ matrix.append(path[:])
+ else:
+ for child in path[-1].children:
+ build_matrix(path[:] + [child], matrix)
+
+ matrix = []
+ build_matrix([tree], matrix)
+
+ # make all lines in the matrix have the same number of columns
+ for line in matrix:
+ line.extend([None]*(tree_depth-len(line)))
+ for i in range(len(matrix)-1, 0, -1):
+ prev_line, line = matrix[i-1:i+1]
+ for j in range(len(line)):
+ if line[j] == prev_line[j]:
+ line[j] = None
+
+ # We build the matrix of link types (between 2 cells on a line of the matrix)
+ # link types are :
+ link_types = {(True, True, True ): 1, # T
+ (False, False, True ): 2, # |
+ (False, True, True ): 3, # + (actually, vert. bar with horiz. bar on the right)
+ (False, True, False): 4, # L
+ (True, True, False): 5, # -
+ }
+ links = []
+ for i, line in enumerate(matrix):
+ links.append([])
+ for j in range(tree_depth-1):
+ cell_11 = line[j] is not None
+ cell_12 = line[j+1] is not None
+ cell_21 = line[j+1] is not None and line[j+1].next_sibling() is not None
+ link_type = link_types.get((cell_11, cell_12, cell_21), 0)
+ if link_type == 0 and i > 0 and links[i-1][j] in (1,2,3):
+ link_type = 2
+ links[-1].append(link_type)
+
+
+ # We can now generate the HTML code for the
+ s = u'
\n'
+ if caption:
+ s += '
%s
\n' % caption
+
+ for i, link_line in enumerate(links):
+ line = matrix[i]
+
+ s += '
'
+ for j, link_cell in enumerate(link_line):
+ cell = line[j]
+ if cell:
+ if cell.id == selected_node:
+ s += '
%s
' % (render_node(cell.id))
+ else:
+ s += '
%s
' % (render_node(cell.id))
+ else:
+ s += '
'
+ s += '
' % link_cell
+ s += '
' % link_cell
+
+ cell = line[-1]
+ if cell:
+ if cell.id == selected_node:
+ s += '
%s
' % (render_node(cell.id))
+ else:
+ s += '
%s
' % (render_node(cell.id))
+ else:
+ s += '
'
+
+ s += '
\n'
+ if link_line:
+ s += '
'
+ for j, link_cell in enumerate(link_line):
+ s += '
' % link_cell
+ s += '
' % link_cell
+ s += '
\n'
+
+ s += '
'
+ return s
+
+
+
+# traceback formatting ########################################################
+
+import traceback
+
+def rest_traceback(info, exception):
+ """return a ReST formated traceback"""
+ res = [u'Traceback\n---------\n::\n']
+ for stackentry in traceback.extract_tb(info[2]):
+ res.append(u'\tFile %s, line %s, function %s' % tuple(stackentry[:3]))
+ if stackentry[3]:
+ res.append(u'\t %s' % stackentry[3].decode('utf-8', 'replace'))
+ res.append(u'\n')
+ try:
+ res.append(u'\t Error: %s\n' % exception)
+ except:
+ pass
+ return u'\n'.join(res)
+
+
+def html_traceback(info, exception, title='',
+ encoding='ISO-8859-1', body=''):
+ """ return an html formatted traceback from python exception infos.
+ """
+ tcbk = info[2]
+ stacktb = traceback.extract_tb(tcbk)
+ strings = []
+ if body:
+ strings.append(u'
')
+ for index, stackentry in enumerate(stacktb):
+ strings.append(u'File%s, line '
+ u'%s, function '
+ u'%s: '%(
+ html_escape(stackentry[0]), stackentry[1], html_escape(stackentry[2])))
+ if stackentry[3]:
+ string = html_escape(stackentry[3]).decode('utf-8', 'replace')
+ strings.append(u' %s \n' % (string))
+ # add locals info for each entry
+ try:
+ local_context = tcbk.tb_frame.f_locals
+ html_info = []
+ chars = 0
+ for name, value in local_context.iteritems():
+ value = html_escape(repr(value))
+ info = u'%s=%s, ' % (name, value)
+ line_length = len(name) + len(value)
+ chars += line_length
+ # 150 is the result of *years* of research ;-) (CSS might be helpful here)
+ if chars > 150:
+ info = u' ' + info
+ chars = line_length
+ html_info.append(info)
+ boxid = 'ctxlevel%d' % index
+ strings.append(u'[%s]' % toggle_link(boxid, '+'))
+ strings.append(u'
%s
' %
+ (boxid, ''.join(html_info)))
+ tcbk = tcbk.tb_next
+ except Exception:
+ pass # doesn't really matter if we have no context info
+ strings.append(u'
')
+ return '\n'.join(strings)
+
+# csv files / unicode support #################################################
+
+class UnicodeCSVWriter:
+ """proxies calls to csv.writer.writerow to be able to deal with unicode"""
+
+ def __init__(self, wfunc, encoding, **kwargs):
+ self.writer = csv.writer(self, **kwargs)
+ self.wfunc = wfunc
+ self.encoding = encoding
+
+ def write(self, data):
+ self.wfunc(data)
+
+ def writerow(self, row):
+ csvrow = []
+ for elt in row:
+ if isinstance(elt, unicode):
+ csvrow.append(elt.encode(self.encoding))
+ else:
+ csvrow.append(str(elt))
+ self.writer.writerow(csvrow)
+
+ def writerows(self, rows):
+ for row in rows:
+ self.writerow(row)
+
+
+# some decorators #############################################################
+
+class limitsize(object):
+ def __init__(self, maxsize):
+ self.maxsize = maxsize
+
+ def __call__(self, function):
+ def newfunc(*args, **kwargs):
+ ret = function(*args, **kwargs)
+ if isinstance(ret, basestring):
+ return ret[:self.maxsize]
+ return ret
+ return newfunc
+
+
+def jsonize(function):
+ import simplejson
+ def newfunc(*args, **kwargs):
+ ret = function(*args, **kwargs)
+ if isinstance(ret, decimal.Decimal):
+ ret = float(ret)
+ elif isinstance(ret, DateTimeType):
+ ret = ret.strftime('%Y-%m-%d %H:%M')
+ elif isinstance(ret, DateTimeDeltaType):
+ ret = ret.seconds
+ try:
+ return simplejson.dumps(ret)
+ except TypeError:
+ return simplejson.dumps(repr(ret))
+ return newfunc
+
+
+def htmlescape(function):
+ def newfunc(*args, **kwargs):
+ ret = function(*args, **kwargs)
+ assert isinstance(ret, basestring)
+ return html_escape(ret)
+ return newfunc
diff -r 000000000000 -r b97547f5f1fa common/utils.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/common/utils.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,257 @@
+"""Some utilities for CubicWeb server/clients.
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from md5 import md5
+from time import time
+from random import randint, seed
+
+# initialize random seed from current time
+seed()
+
+def make_uid(key):
+ """forge a unique identifier"""
+ msg = str(key) + "%.10f"%time() + str(randint(0, 1000000))
+ return md5(msg).hexdigest()
+
+def working_hours(mxdate):
+ """
+ Predicate returning True is the date's hour is in working hours (8h->20h)
+ """
+ if mxdate.hour > 7 and mxdate.hour < 21:
+ return True
+ return False
+
+def date_range(begin, end, incr=1, include=None):
+ """yields each date between begin and end
+ :param begin: the start date
+ :param end: the end date
+ :param incr: the step to use to iterate over dates. Default is
+ one day.
+ :param include: None (means no exclusion) or a function taking a
+ date as parameter, and returning True if the date
+ should be included.
+ """
+ date = begin
+ while date <= end:
+ if include is None or include(date):
+ yield date
+ date += incr
+
+
+def dump_class(cls, clsname):
+ """create copy of a class by creating an empty class inheriting
+ from the given cls.
+
+ Those class will be used as place holder for attribute and relation
+ description
+ """
+ # type doesn't accept unicode name
+ # return type.__new__(type, str(clsname), (cls,), {})
+ # __autogenerated__ attribute is just a marker
+ return type(str(clsname), (cls,), {'__autogenerated__': True})
+
+
+def merge_dicts(dict1, dict2):
+ """update a copy of `dict1` with `dict2`"""
+ dict1 = dict(dict1)
+ dict1.update(dict2)
+ return dict1
+
+
+class SizeConstrainedList(list):
+ """simple list that makes sure the list does not get bigger
+ than a given size.
+
+ when the list is full and a new element is added, the first
+ element of the list is removed before appending the new one
+
+ >>> l = SizeConstrainedList(2)
+ >>> l.append(1)
+ >>> l.append(2)
+ >>> l
+ [1, 2]
+ >>> l.append(3)
+ [2, 3]
+ """
+ def __init__(self, maxsize):
+ self.maxsize = maxsize
+
+ def append(self, element):
+ if len(self) == self.maxsize:
+ del self[0]
+ super(SizeConstrainedList, self).append(element)
+
+ def extend(self, sequence):
+ super(SizeConstrainedList, self).extend(sequence)
+ keepafter = len(self) - self.maxsize
+ if keepafter > 0:
+ del self[:keepafter]
+
+ __iadd__ = extend
+
+
+class UStringIO(list):
+ """a file wrapper which automatically encode unicode string to an encoding
+ specifed in the constructor
+ """
+
+ def __nonzero__(self):
+ return True
+
+ def write(self, value):
+ assert isinstance(value, unicode), u"unicode required not %s : %s"\
+ % (type(value).__name__, repr(value))
+ self.append(value)
+
+ def getvalue(self):
+ return u''.join(self)
+
+ def __repr__(self):
+ return '<%s at %#x>' % (self.__class__.__name__, id(self))
+
+
+class HTMLHead(UStringIO):
+ """wraps HTML header's stream
+
+ Request objects use a HTMLHead instance to ease adding of
+ javascripts and stylesheets
+ """
+ js_unload_code = u'jQuery(window).unload(unloadPageData);'
+
+ def __init__(self):
+ super(HTMLHead, self).__init__()
+ self.jsvars = []
+ self.jsfiles = []
+ self.cssfiles = []
+ self.ie_cssfiles = []
+ self.post_inlined_scripts = []
+ self.pagedata_unload = False
+
+
+ def add_raw(self, rawheader):
+ self.write(rawheader)
+
+ def define_var(self, var, value):
+ self.jsvars.append( (var, value) )
+
+ def add_post_inline_script(self, content):
+ self.post_inlined_scripts.append(content)
+
+ def add_js(self, jsfile):
+ """adds `jsfile` to the list of javascripts used in the webpage
+
+ This function checks if the file has already been added
+ :param jsfile: the script's URL
+ """
+ if jsfile not in self.jsfiles:
+ self.jsfiles.append(jsfile)
+
+ def add_css(self, cssfile, media):
+ """adds `cssfile` to the list of javascripts used in the webpage
+
+ This function checks if the file has already been added
+ :param cssfile: the stylesheet's URL
+ """
+ if (cssfile, media) not in self.cssfiles:
+ self.cssfiles.append( (cssfile, media) )
+
+ def add_ie_css(self, cssfile, media='all'):
+ """registers some IE specific CSS"""
+ if (cssfile, media) not in self.ie_cssfiles:
+ self.ie_cssfiles.append( (cssfile, media) )
+
+ def add_unload_pagedata(self):
+ """registers onunload callback to clean page data on server"""
+ if not self.pagedata_unload:
+ self.post_inlined_scripts.append(self.js_unload_code)
+ self.pagedata_unload = True
+
+ def getvalue(self):
+ """reimplement getvalue to provide a consistent (and somewhat browser
+ optimzed cf. http://stevesouders.com/cuzillion) order in external
+ resources declaration
+ """
+ w = self.write
+ # 1/ variable declaration if any
+ if self.jsvars:
+ from simplejson import dumps
+ w(u'\n')
+ # 2/ css files
+ for cssfile, media in self.cssfiles:
+ w(u'\n' %
+ (media, cssfile))
+ # 3/ ie css if necessary
+ if self.ie_cssfiles:
+ w(u' \n')
+ # 4/ js files
+ for jsfile in self.jsfiles:
+ w(u'\n' % jsfile)
+ # 5/ post inlined scripts (i.e. scripts depending on other JS files)
+ if self.post_inlined_scripts:
+ w(u'\n')
+ return u'\n%s\n' % super(HTMLHead, self).getvalue()
+
+
+class HTMLStream(object):
+ """represents a HTML page.
+
+ This is used my main templates so that HTML headers can be added
+ at any time during the page generation.
+
+ HTMLStream uses the (U)StringIO interface to be compliant with
+ existing code.
+ """
+
+ def __init__(self, req):
+ # stream for
+ self.head = req.html_headers
+ # main stream
+ self.body = UStringIO()
+ self.doctype = u''
+ # xmldecl and html opening tag
+ self.xmldecl = u'\n' % req.encoding
+ self.htmltag = u'' % (req.lang, req.lang)
+
+
+ def write(self, data):
+ """StringIO interface: this method will be assigned to self.w
+ """
+ self.body.write(data)
+
+ def getvalue(self):
+ """writes HTML headers, closes tag and writes HTML body"""
+ return u'%s\n%s\n%s\n%s\n%s\n' % (self.xmldecl, self.doctype,
+ self.htmltag,
+ self.head.getvalue(),
+ self.body.getvalue())
+
+
+class AcceptMixIn(object):
+ """Mixin class for vobjects defining the 'accepts' attribute describing
+ a set of supported entity type (Any by default).
+ """
+ # XXX deprecated, no more necessary
+
+
+from logilab.common.deprecation import moved, class_moved
+rql_for_eid = moved('cubicweb.common.uilib', 'rql_for_eid')
+ajax_replace_url = moved('cubicweb.common.uilib', 'ajax_replace_url')
+
+import cubicweb
+Binary = class_moved(cubicweb.Binary)
diff -r 000000000000 -r b97547f5f1fa common/view.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/common/view.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,462 @@
+"""abstract views and templates classes for CubicWeb web client
+
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from cStringIO import StringIO
+
+from logilab.mtconverter import html_escape
+
+from cubicweb import NotAnEntity, NoSelectableObject
+from cubicweb.common.registerers import accepts_registerer, priority_registerer
+from cubicweb.common.selectors import (in_group_selector, anyrset_selector,
+ emptyrset_selector, accept_selector,
+ norset_selector, chainfirst)
+from cubicweb.common.appobject import AppRsetObject, ComponentMixIn
+from cubicweb.common.utils import UStringIO, HTMLStream
+
+_ = unicode
+
+# robots control
+NOINDEX = u''
+NOFOLLOW = u''
+
+CW_XHTML_EXTENSIONS = '''[
+
+
+ ] '''
+
+TRANSITIONAL_DOCTYPE = u'\n'
+
+STRICT_DOCTYPE = u'\n'
+
+class View(AppRsetObject):
+ """abstract view class, used as base for every renderable object such
+ as views, templates, some components...web
+
+ A view is instantiated to render a [part of a] result set. View
+ subclasses may be parametred using the following class attributes:
+
+ * `templatable` indicates if the view may be embeded in a main
+ template or if it has to be rendered standalone (i.e. XML for
+ instance)
+ * if the view is not templatable, it should set the `content_type` class
+ attribute to the correct MIME type (text/xhtml by default)
+ * the `category` attribute may be used in the interface to regroup related
+ objects together
+
+ At instantiation time, the standard `req`, `rset`, and `cursor`
+ attributes are added and the `w` attribute will be set at rendering
+ time to a write function to use.
+ """
+ __registry__ = 'views'
+
+ templatable = True
+ need_navigation = True
+ # content_type = 'application/xhtml+xml' # text/xhtml'
+ binary = False
+ add_to_breadcrumbs = True
+ category = 'view'
+
+ def __init__(self, req, rset):
+ super(View, self).__init__(req, rset)
+ self.w = None
+
+ @property
+ def content_type(self):
+ if self.req.xhtml_browser():
+ return 'application/xhtml+xml'
+ return 'text/html'
+
+ def set_stream(self, w=None):
+ if self.w is not None:
+ return
+ if w is None:
+ if self.binary:
+ self._stream = stream = StringIO()
+ else:
+ self._stream = stream = UStringIO()
+ w = stream.write
+ else:
+ stream = None
+ self.w = w
+ return stream
+
+ # main view interface #####################################################
+
+ def dispatch(self, w=None, **context):
+ """called to render a view object for a result set.
+
+ This method is a dispatched to an actual method selected
+ according to optional row and col parameters, which are locating
+ a particular row or cell in the result set:
+
+ * if row [and col] are specified, `cell_call` is called
+ * if none of them is supplied, the view is considered to apply on
+ the whole result set (which may be None in this case), `call` is
+ called
+ """
+ row, col = context.get('row'), context.get('col')
+ if row is not None:
+ context.setdefault('col', 0)
+ view_func = self.cell_call
+ else:
+ view_func = self.call
+ stream = self.set_stream(w)
+ # stream = self.set_stream(context)
+ view_func(**context)
+ # return stream content if we have created it
+ if stream is not None:
+ return self._stream.getvalue()
+
+ # should default .call() method add a
around each
+ # rset item
+ add_div_section = True
+
+ def call(self, **kwargs):
+ """the view is called for an entire result set, by default loop
+ other rows of the result set and call the same view on the
+ particular row
+
+ Views applicable on None result sets have to override this method
+ """
+ rset = self.rset
+ if rset is None:
+ raise NotImplementedError, self
+ wrap = self.templatable and len(rset) > 1 and self.add_div_section
+ for i in xrange(len(rset)):
+ if wrap:
+ self.w(u'
")
+
+ def cell_call(self, row, col, **kwargs):
+ """the view is called for a particular result set cell"""
+ raise NotImplementedError, self
+
+ def linkable(self):
+ """return True if the view may be linked in a menu
+
+ by default views without title are not meant to be displayed
+ """
+ if not getattr(self, 'title', None):
+ return False
+ return True
+
+ def is_primary(self):
+ return self.id == 'primary'
+
+ def url(self):
+ """return the url associated with this view. Should not be
+ necessary for non linkable views, but a default implementation
+ is provided anyway.
+ """
+ try:
+ return self.build_url(vid=self.id, rql=self.req.form['rql'])
+ except KeyError:
+ return self.build_url(vid=self.id)
+
+ def set_request_content_type(self):
+ """set the content type returned by this view"""
+ self.req.set_content_type(self.content_type)
+
+ # view utilities ##########################################################
+
+ def view(self, __vid, rset, __fallback_vid=None, **kwargs):
+ """shortcut to self.vreg.render method avoiding to pass self.req"""
+ try:
+ view = self.vreg.select_view(__vid, self.req, rset, **kwargs)
+ except NoSelectableObject:
+ if __fallback_vid is None:
+ raise
+ view = self.vreg.select_view(__fallback_vid, self.req, rset, **kwargs)
+ return view.dispatch(**kwargs)
+
+ def wview(self, __vid, rset, __fallback_vid=None, **kwargs):
+ """shortcut to self.view method automatically passing self.w as argument
+ """
+ self.view(__vid, rset, __fallback_vid, w=self.w, **kwargs)
+
+ def whead(self, data):
+ self.req.html_headers.write(data)
+
+ def wdata(self, data):
+ """simple helper that escapes `data` and writes into `self.w`"""
+ self.w(html_escape(data))
+
+ def action(self, actionid, row=0):
+ """shortcut to get action object with id `actionid`"""
+ return self.vreg.select_action(actionid, self.req, self.rset,
+ row=row)
+
+ def action_url(self, actionid, label=None, row=0):
+ """simple method to be able to display `actionid` as a link anywhere
+ """
+ action = self.vreg.select_action(actionid, self.req, self.rset,
+ row=row)
+ if action:
+ label = label or self.req._(action.title)
+ return u'%s' % (html_escape(action.url()), label)
+ return u''
+
+ def html_headers(self):
+ """return a list of html headers (eg something to be inserted between
+ and of the returned page
+
+ by default return a meta tag to disable robot indexation of the page
+ """
+ return [NOINDEX]
+
+ def page_title(self):
+ """returns a title according to the result set - used for the
+ title in the HTML header
+ """
+ vtitle = self.req.form.get('vtitle')
+ if vtitle:
+ return self.req._(vtitle)
+ # class defined title will only be used if the resulting title doesn't
+ # seem clear enough
+ vtitle = getattr(self, 'title', None) or u''
+ if vtitle:
+ vtitle = self.req._(vtitle)
+ rset = self.rset
+ if rset and rset.rowcount:
+ if rset.rowcount == 1:
+ try:
+ entity = self.complete_entity(0)
+ # use long_title to get context information if any
+ clabel = entity.dc_long_title()
+ except NotAnEntity:
+ clabel = display_name(self.req, rset.description[0][0])
+ clabel = u'%s (%s)' % (clabel, vtitle)
+ else :
+ etypes = rset.column_types(0)
+ if len(etypes) == 1:
+ etype = iter(etypes).next()
+ clabel = display_name(self.req, etype, 'plural')
+ else :
+ clabel = u'#[*] (%s)' % vtitle
+ else:
+ clabel = vtitle
+ return u'%s (%s)' % (clabel, self.req.property_value('ui.site-title'))
+
+ def output_url_builder( self, name, url, args ):
+ self.w(u'\n')
+
+ def create_url(self, etype, **kwargs):
+ """ return the url of the entity creation form for a given entity type"""
+ return self.req.build_url('add/%s'%etype, **kwargs)
+
+
+# concrete views base classes #################################################
+
+class EntityView(View):
+ """base class for views applying on an entity (i.e. uniform result set)
+ """
+ __registerer__ = accepts_registerer
+ __selectors__ = (accept_selector,)
+ category = 'entityview'
+
+ def field(self, label, value, row=True, show_label=True, w=None, tr=True):
+ """ read-only field """
+ if w is None:
+ w = self.w
+ if row:
+ w(u'
')
+ if show_label:
+ if tr:
+ label = display_name(self.req, label)
+ w(u'%s' % label)
+ w(u'
%s
' % value)
+ if row:
+ w(u'
')
+
+
+class StartupView(View):
+ """base class for views which doesn't need a particular result set
+ to be displayed (so they can always be displayed !)
+ """
+ __registerer__ = priority_registerer
+ __selectors__ = (in_group_selector, norset_selector)
+ require_groups = ()
+ category = 'startupview'
+
+ def url(self):
+ """return the url associated with this view. We can omit rql here"""
+ return self.build_url('view', vid=self.id)
+
+ def html_headers(self):
+ """return a list of html headers (eg something to be inserted between
+ and of the returned page
+
+ by default startup views are indexed
+ """
+ return []
+
+
+class EntityStartupView(EntityView):
+ """base class for entity views which may also be applied to None
+ result set (usually a default rql is provided by the view class)
+ """
+ __registerer__ = accepts_registerer
+ __selectors__ = (chainfirst(norset_selector, accept_selector),)
+
+ default_rql = None
+
+ def __init__(self, req, rset):
+ super(EntityStartupView, self).__init__(req, rset)
+ if rset is None:
+ # this instance is not in the "entityview" category
+ self.category = 'startupview'
+
+ def startup_rql(self):
+ """return some rql to be executedif the result set is None"""
+ return self.default_rql
+
+ def call(self, **kwargs):
+ """override call to execute rql returned by the .startup_rql
+ method if necessary
+ """
+ if self.rset is None:
+ self.rset = self.req.execute(self.startup_rql())
+ rset = self.rset
+ for i in xrange(len(rset)):
+ self.wview(self.id, rset, row=i, **kwargs)
+
+ def url(self):
+ """return the url associated with this view. We can omit rql if we
+ are on a result set on which we do not apply.
+ """
+ if not self.__select__(self.req, self.rset):
+ return self.build_url(vid=self.id)
+ return super(EntityStartupView, self).url()
+
+
+class AnyRsetView(View):
+ """base class for views applying on any non empty result sets"""
+ __registerer__ = priority_registerer
+ __selectors__ = (anyrset_selector,)
+
+ category = 'anyrsetview'
+
+
+class EmptyRsetView(View):
+ """base class for views applying on any empty result sets"""
+ __registerer__ = priority_registerer
+ __selectors__ = (emptyrset_selector,)
+
+
+# concrete template base classes ##############################################
+
+class Template(View):
+ """a template is almost like a view, except that by default a template
+ is only used globally (i.e. no result set adaptation)
+ """
+ __registry__ = 'templates'
+ __registerer__ = priority_registerer
+ __selectors__ = (in_group_selector,)
+
+ require_groups = ()
+
+ def template(self, oid, **kwargs):
+ """shortcut to self.registry.render method on the templates registry"""
+ w = kwargs.pop('w', self.w)
+ self.vreg.render('templates', oid, self.req, w=w, **kwargs)
+
+
+class MainTemplate(Template):
+ """main template are primary access point to render a full HTML page.
+ There is usually at least a regular main template and a simple fallback
+ one to display error if the first one failed
+ """
+
+ base_doctype = STRICT_DOCTYPE
+
+ @property
+ def doctype(self):
+ if self.req.xhtml_browser():
+ return self.base_doctype % CW_XHTML_EXTENSIONS
+ return self.base_doctype % ''
+
+ def set_stream(self, w=None, templatable=True):
+ if templatable and self.w is not None:
+ return
+
+ if w is None:
+ if self.binary:
+ self._stream = stream = StringIO()
+ elif not templatable:
+ # not templatable means we're using a non-html view, we don't
+ # want the HTMLStream stuff to interfere during data generation
+ self._stream = stream = UStringIO()
+ else:
+ self._stream = stream = HTMLStream(self.req)
+ w = stream.write
+ else:
+ stream = None
+ self.w = w
+ return stream
+
+ def write_doctype(self, xmldecl=True):
+ assert isinstance(self._stream, HTMLStream)
+ self._stream.doctype = self.doctype
+ if not xmldecl:
+ self._stream.xmldecl = u''
+
+# viewable components base classes ############################################
+
+class VComponent(ComponentMixIn, View):
+ """base class for displayable components"""
+ property_defs = {
+ 'visible': dict(type='Boolean', default=True,
+ help=_('display the component or not')),}
+
+class SingletonVComponent(VComponent):
+ """base class for displayable unique components"""
+ __registerer__ = priority_registerer
diff -r 000000000000 -r b97547f5f1fa cwconfig.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cwconfig.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,856 @@
+"""common configuration utilities for cubicweb
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import sys
+import os
+import logging
+from os.path import exists, join, expanduser, abspath, basename
+
+from logilab.common.decorators import cached
+from logilab.common.configuration import (Configuration, Method,
+ ConfigurationMixIn, merge_options)
+
+from cubicweb import CW_SOFTWARE_ROOT, CW_MIGRATION_MAP, ConfigurationError
+from cubicweb.toolsutils import env_path, read_config, create_dir
+
+CONFIGURATIONS = []
+
+_ = unicode
+
+class metaconfiguration(type):
+ """metaclass to automaticaly register configuration"""
+ def __new__(mcs, name, bases, classdict):
+ cls = super(metaconfiguration, mcs).__new__(mcs, name, bases, classdict)
+ if classdict.get('name'):
+ CONFIGURATIONS.append(cls)
+ return cls
+
+def configuration_cls(name):
+ """return the configuration class registered with the given name"""
+ try:
+ return [c for c in CONFIGURATIONS if c.name == name][0]
+ except IndexError:
+ raise ConfigurationError('no such config %r (check it exists with "cubicweb-ctl list")' % name)
+
+def possible_configurations(directory):
+ """return a list of installed configurations in a directory
+ according to *-ctl files
+ """
+ return [name for name in ('repository', 'twisted', 'all-in-one')
+ if exists(join(directory, '%s.conf' % name))]
+
+def guess_configuration(directory):
+ """try to guess the configuration to use for a directory. If multiple
+ configurations are found, ConfigurationError is raised
+ """
+ modes = possible_configurations(directory)
+ if len(modes) != 1:
+ raise ConfigurationError('unable to guess configuration from %r %s'
+ % (directory, modes))
+ return modes[0]
+
+# XXX generate this according to the configuration (repository/all-in-one/web)
+VREGOPTIONS = []
+for registry in ('etypes', 'hooks', 'controllers', 'actions', 'components',
+ 'views', 'templates', 'boxes', 'contentnavigation', 'urlrewriting',
+ 'facets'):
+ VREGOPTIONS.append(('disable-%s'%registry,
+ {'type' : 'csv', 'default': (),
+ 'help': 'list of identifier of application objects from the %s registry to disable'%registry,
+ 'group': 'appobjects', 'inputlevel': 2,
+ }))
+VREGOPTIONS = tuple(VREGOPTIONS)
+
+# persistent options definition
+PERSISTENT_OPTIONS = (
+ ('encoding',
+ {'type' : 'string',
+ 'default': 'UTF-8',
+ 'help': _('user interface encoding'),
+ 'group': 'ui', 'sitewide': True,
+ }),
+ ('language',
+ {'type' : 'string',
+ 'default': 'en',
+ 'vocabulary': Method('available_languages'),
+ 'help': _('language of the user interface'),
+ 'group': 'ui',
+ }),
+ ('date-format',
+ {'type' : 'string',
+ 'default': '%Y/%m/%d',
+ 'help': _('how to format date in the ui ("man strftime" for format description)'),
+ 'group': 'ui',
+ }),
+ ('datetime-format',
+ {'type' : 'string',
+ 'default': '%Y/%m/%d %H:%M',
+ 'help': _('how to format date and time in the ui ("man strftime" for format description)'),
+ 'group': 'ui',
+ }),
+ ('time-format',
+ {'type' : 'string',
+ 'default': '%H:%M',
+ 'help': _('how to format time in the ui ("man strftime" for format description)'),
+ 'group': 'ui',
+ }),
+ ('float-format',
+ {'type' : 'string',
+ 'default': '%.3f',
+ 'help': _('how to format float numbers in the ui'),
+ 'group': 'ui',
+ }),
+ ('default-text-format',
+ {'type' : 'choice',
+ 'choices': ('text/plain', 'text/rest', 'text/html'),
+ 'default': 'text/html', # use fckeditor in the web ui
+ 'help': _('default text format for rich text fields.'),
+ 'group': 'ui',
+ }),
+ ('short-line-size',
+ {'type' : 'int',
+ 'default': 40,
+ 'help': _('maximum number of characters in short description'),
+ 'group': 'navigation',
+ }),
+ )
+
+def register_persistent_options(options):
+ global PERSISTENT_OPTIONS
+ PERSISTENT_OPTIONS = merge_options(PERSISTENT_OPTIONS + options)
+
+CFGTYPE2ETYPE_MAP = {
+ 'string': 'String',
+ 'choice': 'String',
+ 'yn': 'Boolean',
+ 'int': 'Int',
+ 'float' : 'Float',
+ }
+
+class CubicWebNoAppConfiguration(ConfigurationMixIn):
+ """base class for cubicweb configuration without a specific instance directory
+ """
+ __metaclass__ = metaconfiguration
+ # to set in concrete configuration
+ name = None
+ # log messages format (see logging module documentation for available keys)
+ log_format = '%(asctime)s - (%(name)s) %(levelname)s: %(message)s'
+ # nor remove vobjects based on unused interface
+ cleanup_interface_sobjects = True
+
+ if os.environ.get('APYCOT_ROOT'):
+ mode = 'test'
+ CUBES_DIR = '%(APYCOT_ROOT)s/local/share/cubicweb/cubes/' % os.environ
+ elif exists(join(CW_SOFTWARE_ROOT, '.hg')):
+ mode = 'dev'
+ CUBES_DIR = join(CW_SOFTWARE_ROOT, '../cubes')
+ else:
+ mode = 'installed'
+ CUBES_DIR = '/usr/share/cubicweb/cubes/'
+
+ options = VREGOPTIONS + (
+ ('log-threshold',
+ {'type' : 'string', # XXX use a dedicated type?
+ 'default': 'ERROR',
+ 'help': 'server\'s log level',
+ 'group': 'main', 'inputlevel': 1,
+ }),
+ # pyro name server
+ ('pyro-ns-host',
+ {'type' : 'string',
+ 'default': '',
+ 'help': 'Pyro name server\'s host. If not set, will be detected by a \
+broadcast query',
+ 'group': 'pyro-name-server', 'inputlevel': 1,
+ }),
+ ('pyro-ns-port',
+ {'type' : 'int',
+ 'default': None,
+ 'help': 'Pyro name server\'s listening port. If not set, default \
+port will be used.',
+ 'group': 'pyro-name-server', 'inputlevel': 1,
+ }),
+ ('pyro-ns-group',
+ {'type' : 'string',
+ 'default': 'cubicweb',
+ 'help': 'Pyro name server\'s group where the repository will be \
+registered.',
+ 'group': 'pyro-name-server', 'inputlevel': 1,
+ }),
+ # common configuration options which are potentially required as soon as
+ # you're using "base" application objects (ie to really server/web
+ # specific)
+ ('base-url',
+ {'type' : 'string',
+ 'default': None,
+ 'help': 'web server root url',
+ 'group': 'main', 'inputlevel': 1,
+ }),
+ ('mangle-emails',
+ {'type' : 'yn',
+ 'default': False,
+ 'help': "don't display actual email addresses but mangle them if \
+this option is set to yes",
+ 'group': 'email', 'inputlevel': 2,
+ }),
+ )
+ # static and class methods used to get application independant resources ##
+
+ @staticmethod
+ def cubicweb_version():
+ """return installed cubicweb version"""
+ from logilab.common.changelog import Version
+ from cubicweb import __pkginfo__
+ version = __pkginfo__.numversion
+ assert len(version) == 3, version
+ return Version(version)
+
+ @staticmethod
+ def persistent_options_configuration():
+ return Configuration(options=PERSISTENT_OPTIONS)
+
+ @classmethod
+ def shared_dir(cls):
+ """return the shared data directory (i.e. directory where standard
+ library views and data may be found)
+ """
+ if cls.mode in ('dev', 'test') and not os.environ.get('APYCOT_ROOT'):
+ return join(CW_SOFTWARE_ROOT, 'web')
+ return join(cls.cubes_dir(), 'shared')
+
+ @classmethod
+ def i18n_lib_dir(cls):
+ """return application's i18n directory"""
+ if cls.mode in ('dev', 'test') and not os.environ.get('APYCOT_ROOT'):
+ return join(CW_SOFTWARE_ROOT, 'i18n')
+ return join(cls.shared_dir(), 'i18n')
+
+ @classmethod
+ def available_cubes(cls):
+ cubes_dir = cls.cubes_dir()
+ return sorted(cube for cube in os.listdir(cubes_dir)
+ if os.path.isdir(os.path.join(cubes_dir, cube))
+ and not cube in ('CVS', '.svn', 'shared', '.hg'))
+
+ @classmethod
+ def cubes_dir(cls):
+ """return the application cubes directory"""
+ return env_path('CW_CUBES', cls.CUBES_DIR, 'cubes')
+
+ @classmethod
+ def cube_dir(cls, cube):
+ """return the cube directory for the given cube id,
+ raise ConfigurationError if it doesn't exists
+ """
+ cube_dir = join(cls.cubes_dir(), cube)
+ if not exists(cube_dir):
+ raise ConfigurationError('no cube %s in %s' % (
+ cube, cls.cubes_dir()))
+ return cube_dir
+
+ @classmethod
+ def cube_migration_scripts_dir(cls, cube):
+ """cube migration scripts directory"""
+ return join(cls.cube_dir(cube), 'migration')
+
+ @classmethod
+ def cube_pkginfo(cls, cube):
+ """return the information module for the given cube"""
+ cube = CW_MIGRATION_MAP.get(cube, cube)
+ try:
+ return getattr(__import__('cubes.%s.__pkginfo__' % cube), cube).__pkginfo__
+ except ImportError:
+ raise ConfigurationError('unable to find packaging information for '
+ 'cube %s' % cube)
+
+ @classmethod
+ def cube_version(cls, cube):
+ """return the version of the cube located in the given directory
+ """
+ from logilab.common.changelog import Version
+ version = cls.cube_pkginfo(cube).numversion
+ assert len(version) == 3, version
+ return Version(version)
+
+ @classmethod
+ def cube_dependencies(cls, cube):
+ """return cubicweb cubes used by the given cube"""
+ return getattr(cls.cube_pkginfo(cube), '__use__', ())
+
+ @classmethod
+ def cube_recommends(cls, cube):
+ """return cubicweb cubes recommended by the given cube"""
+ return getattr(cls.cube_pkginfo(cube), '__recommend__', ())
+
+ @classmethod
+ def expand_cubes(cls, cubes):
+ """expand the given list of top level cubes used by adding recursivly
+ each cube dependencies
+ """
+ cubes = list(cubes)
+ todo = cubes[:]
+ while todo:
+ cube = todo.pop(0)
+ for depcube in cls.cube_dependencies(cube):
+ if depcube not in cubes:
+ depcube = CW_MIGRATION_MAP.get(depcube, depcube)
+ cubes.append(depcube)
+ todo.append(depcube)
+ return cubes
+
+ @classmethod
+ def reorder_cubes(cls, cubes):
+ """reorder cubes from the top level cubes to inner dependencies
+ cubes
+ """
+ from logilab.common.graph import get_cycles
+ graph = {}
+ for cube in cubes:
+ cube = CW_MIGRATION_MAP.get(cube, cube)
+ deps = cls.cube_dependencies(cube) + \
+ cls.cube_recommends(cube)
+ graph[cube] = set(dep for dep in deps if dep in cubes)
+ cycles = get_cycles(graph)
+ if cycles:
+ cycles = '\n'.join(' -> '.join(cycle) for cycle in cycles)
+ raise ConfigurationError('cycles in cubes dependencies: %s'
+ % cycles)
+ cubes = []
+ while graph:
+ # sorted to get predictable results
+ for cube, deps in sorted(graph.items()):
+ if not deps:
+ cubes.append(cube)
+ del graph[cube]
+ for deps in graph.itervalues():
+ try:
+ deps.remove(cube)
+ except KeyError:
+ continue
+ return tuple(reversed(cubes))
+
+ @classmethod
+ def cls_adjust_sys_path(cls):
+ """update python path if necessary"""
+ try:
+ templdir = abspath(join(cls.cubes_dir(), '..'))
+ if not templdir in sys.path:
+ sys.path.insert(0, templdir)
+ except ConfigurationError:
+ return # cube dir doesn't exists
+
+ @classmethod
+ def load_cwctl_plugins(cls):
+ from logilab.common.modutils import load_module_from_file
+ cls.cls_adjust_sys_path()
+ for ctlfile in ('web/webctl.py', 'etwist/twctl.py',
+ 'server/serverctl.py', 'hercule.py',
+ 'devtools/devctl.py', 'goa/goactl.py'):
+ if exists(join(CW_SOFTWARE_ROOT, ctlfile)):
+ load_module_from_file(join(CW_SOFTWARE_ROOT, ctlfile))
+ cls.info('loaded cubicweb-ctl plugin %s', ctlfile)
+ templdir = cls.cubes_dir()
+ for cube in cls.available_cubes():
+ pluginfile = join(templdir, cube, 'ecplugin.py')
+ initfile = join(templdir, cube, '__init__.py')
+ if exists(pluginfile):
+ try:
+ __import__('cubes.%s.ecplugin' % cube)
+ cls.info('loaded cubicweb-ctl plugin from %s', cube)
+ except:
+ cls.exception('while loading plugin %s', pluginfile)
+ elif exists(initfile):
+ try:
+ __import__('cubes.%s' % cube)
+ except:
+ cls.exception('while loading cube %s', cube)
+ else:
+ cls.warning('no __init__ file in cube %s', cube)
+
+ @classmethod
+ def init_available_cubes(cls):
+ """cubes may register some sources (svnfile for instance) in their
+ __init__ file, so they should be loaded early in the startup process
+ """
+ for cube in cls.available_cubes():
+ try:
+ __import__('cubes.%s' % cube)
+ except Exception, ex:
+ cls.warning("can't init cube %s: %s", cube, ex)
+
+ cubicweb_vobject_path = set(['entities'])
+ cube_vobject_path = set(['entities'])
+
+ @classmethod
+ def build_vregistry_path(cls, templpath, evobjpath=None, tvobjpath=None):
+ """given a list of directories, return a list of sub files and
+ directories that should be loaded by the application objects registry.
+
+ :param evobjpath:
+ optional list of sub-directories (or files without the .py ext) of
+ the cubicweb library that should be tested and added to the output list
+ if they exists. If not give, default to `cubicweb_vobject_path` class
+ attribute.
+ :param tvobjpath:
+ optional list of sub-directories (or files without the .py ext) of
+ directories given in `templpath` that should be tested and added to
+ the output list if they exists. If not give, default to
+ `cube_vobject_path` class attribute.
+ """
+ vregpath = cls.build_vregistry_cubicweb_path(evobjpath)
+ vregpath += cls.build_vregistry_cube_path(templpath, tvobjpath)
+ return vregpath
+
+ @classmethod
+ def build_vregistry_cubicweb_path(cls, evobjpath=None):
+ vregpath = []
+ if evobjpath is None:
+ evobjpath = cls.cubicweb_vobject_path
+ for subdir in evobjpath:
+ path = join(CW_SOFTWARE_ROOT, subdir)
+ if exists(path):
+ vregpath.append(path)
+ return vregpath
+
+ @classmethod
+ def build_vregistry_cube_path(cls, templpath, tvobjpath=None):
+ vregpath = []
+ if tvobjpath is None:
+ tvobjpath = cls.cube_vobject_path
+ for directory in templpath:
+ for subdir in tvobjpath:
+ path = join(directory, subdir)
+ if exists(path):
+ vregpath.append(path)
+ elif exists(path + '.py'):
+ vregpath.append(path + '.py')
+ return vregpath
+
+ def __init__(self):
+ ConfigurationMixIn.__init__(self)
+ self.adjust_sys_path()
+ self.load_defaults()
+ self.translations = {}
+
+ def adjust_sys_path(self):
+ self.cls_adjust_sys_path()
+
+ def init_log(self, logthreshold=None, debug=False,
+ logfile=None, syslog=False):
+ """init the log service"""
+ if os.environ.get('APYCOT_ROOT'):
+ logthreshold = logging.CRITICAL
+ # redirect logs to stdout to avoid apycot output parsing failure
+ handler = logging.StreamHandler(sys.stdout)
+ else:
+ if debug:
+ if logthreshold is None:
+ logthreshold = logging.DEBUG # LLDEBUG
+ handler = logging.StreamHandler()
+ elif logfile is None:
+ if syslog:
+ from logging import handlers
+ handler = handlers.SysLogHandler()
+ else:
+ handler = logging.StreamHandler()
+ else:
+ try:
+ handler = logging.FileHandler(logfile)
+ except IOError:
+ handler = logging.StreamHandler()
+ if logthreshold is None:
+ thresholdname = self['log-threshold']
+ logthreshold = getattr(logging, THRESHOLD_MAP.get(thresholdname,
+ thresholdname))
+ # configure the root logger
+ logger = logging.getLogger()
+ logger.setLevel(logthreshold)
+ # only addHandler and removeHandler method while I would like a
+ # setHandler method, so do it this way :$
+ logger.handlers = [handler]
+ isatty = hasattr(sys.__stdout__, 'isatty') and sys.__stdout__.isatty()
+ if debug and isatty:
+ from logilab.common.logging_ext import ColorFormatter
+ fmt = ColorFormatter(self.log_format, '%Y-%m-%d %H:%M:%S')
+ def col_fact(record):
+ if 'XXX' in record.message:
+ return 'cyan'
+ if 'kick' in record.message:
+ return 'red'
+ fmt.colorfilters.append(col_fact)
+ else:
+ fmt = logging.Formatter(self.log_format, '%Y-%m-%d %H:%M:%S')
+ logger.handlers[0].setFormatter(fmt)
+ # configure simpleTal logger
+ logging.getLogger('simpleTAL').setLevel(logging.ERROR)
+
+ def vregistry_path(self):
+ """return a list of files or directories where the registry will look
+ for application objects. By default return nothing in NoApp config.
+ """
+ return []
+
+ def eproperty_definitions(self):
+ cfg = self.persistent_options_configuration()
+ for section, options in cfg.options_by_section():
+ section = section.lower()
+ for optname, optdict, value in options:
+ key = '%s.%s' % (section, optname)
+ type, vocab = self.map_option(optdict)
+ default = cfg.option_default(optname, optdict)
+ pdef = {'type': type, 'vocabulary': vocab, 'default': default,
+ 'help': optdict['help'],
+ 'sitewide': optdict.get('sitewide', False)}
+ yield key, pdef
+
+ def map_option(self, optdict):
+ try:
+ vocab = optdict['choices']
+ except KeyError:
+ vocab = optdict.get('vocabulary')
+ if isinstance(vocab, Method):
+ vocab = getattr(self, vocab.method, ())
+ return CFGTYPE2ETYPE_MAP[optdict['type']], vocab
+
+
+class CubicWebConfiguration(CubicWebNoAppConfiguration):
+ """base class for cubicweb server and web configurations"""
+
+ if CubicWebNoAppConfiguration.mode == 'test':
+ root = os.environ['APYCOT_ROOT']
+ REGISTRY_DIR = '%s/etc/cubicweb.d/' % root
+ INSTANCE_DATA_DIR = REGISTRY_DIR
+ RUNTIME_DIR = '/tmp/'
+ MIGRATION_DIR = '%s/local/share/cubicweb/migration/' % root
+ if not exists(REGISTRY_DIR):
+ os.makedirs(REGISTRY_DIR)
+ elif CubicWebNoAppConfiguration.mode == 'dev':
+ REGISTRY_DIR = expanduser('~/etc/cubicweb.d/')
+ INSTANCE_DATA_DIR = REGISTRY_DIR
+ RUNTIME_DIR = '/tmp/'
+ MIGRATION_DIR = join(CW_SOFTWARE_ROOT, 'misc', 'migration')
+ else: #mode = 'installed'
+ REGISTRY_DIR = '/etc/cubicweb.d/'
+ INSTANCE_DATA_DIR = '/var/lib/cubicweb/instances/'
+ RUNTIME_DIR = '/var/run/cubicweb/'
+ MIGRATION_DIR = '/usr/share/cubicweb/migration/'
+
+ # for some commands (creation...) we don't want to initialize gettext
+ set_language = True
+ # set this to true to avoid false error message while creating an application
+ creating = False
+
+ options = CubicWebNoAppConfiguration.options + (
+ ('log-file',
+ {'type' : 'string',
+ 'default': Method('default_log_file'),
+ 'help': 'file where output logs should be written',
+ 'group': 'main', 'inputlevel': 2,
+ }),
+ # email configuration
+ ('smtp-host',
+ {'type' : 'string',
+ 'default': 'mail',
+ 'help': 'hostname of the SMTP mail server',
+ 'group': 'email', 'inputlevel': 1,
+ }),
+ ('smtp-port',
+ {'type' : 'int',
+ 'default': 25,
+ 'help': 'listening port of the SMTP mail server',
+ 'group': 'email', 'inputlevel': 1,
+ }),
+ ('sender-name',
+ {'type' : 'string',
+ 'default': Method('default_application_id'),
+ 'help': 'name used as HELO name for outgoing emails from the \
+repository.',
+ 'group': 'email', 'inputlevel': 2,
+ }),
+ ('sender-addr',
+ {'type' : 'string',
+ 'default': 'devel@logilab.fr',
+ 'help': 'email address used as HELO address for outgoing emails from \
+the repository',
+ 'group': 'email', 'inputlevel': 1,
+ }),
+ )
+
+ @classmethod
+ def runtime_dir(cls):
+ """run time directory for pid file..."""
+ return env_path('CW_RUNTIME', cls.RUNTIME_DIR, 'run time')
+
+ @classmethod
+ def registry_dir(cls):
+ """return the control directory"""
+ return env_path('CW_REGISTRY', cls.REGISTRY_DIR, 'registry')
+
+ @classmethod
+ def instance_data_dir(cls):
+ """return the instance data directory"""
+ return env_path('CW_INSTANCE_DATA', cls.INSTANCE_DATA_DIR,
+ 'additional data')
+
+ @classmethod
+ def migration_scripts_dir(cls):
+ """cubicweb migration scripts directory"""
+ return env_path('CW_MIGRATION', cls.MIGRATION_DIR, 'migration')
+
+ @classmethod
+ def config_for(cls, appid, config=None):
+ """return a configuration instance for the given application identifier
+ """
+ config = config or guess_configuration(cls.application_home(appid))
+ configcls = configuration_cls(config)
+ return configcls(appid)
+
+ @classmethod
+ def possible_configurations(cls, appid):
+ """return the name of possible configurations for the given
+ application id
+ """
+ home = cls.application_home(appid)
+ return possible_configurations(home)
+
+ @classmethod
+ def application_home(cls, appid):
+ """return the home directory of the application with the given
+ application id
+ """
+ home = join(cls.registry_dir(), appid)
+ if not exists(home):
+ raise ConfigurationError('no such application %s (check it exists with "cubicweb-ctl list")' % appid)
+ return home
+
+ MODES = ('common', 'repository', 'Any', 'web')
+ MCOMPAT = {'all-in-one': MODES,
+ 'repository': ('common', 'repository', 'Any'),
+ 'twisted' : ('common', 'web'),}
+ @classmethod
+ def accept_mode(cls, mode):
+ #assert mode in cls.MODES, mode
+ return mode in cls.MCOMPAT[cls.name]
+
+ # default configuration methods ###########################################
+
+ def default_application_id(self):
+ """return the application identifier, useful for option which need this
+ as default value
+ """
+ return self.appid
+
+ def default_log_file(self):
+ """return default path to the log file of the application'server"""
+ if self.mode == 'dev':
+ basepath = '/tmp/%s-%s' % (basename(self.appid), self.name)
+ path = basepath + '.log'
+ i = 1
+ while exists(path) and i < 100: # arbitrary limit to avoid infinite loop
+ try:
+ file(path, 'a')
+ break
+ except IOError:
+ path = '%s-%s.log' % (basepath, i)
+ i += 1
+ return path
+ return '/var/log/cubicweb/%s-%s.log' % (self.appid, self.name)
+
+ def default_pid_file(self):
+ """return default path to the pid file of the application'server"""
+ return join(self.runtime_dir(), '%s-%s.pid' % (self.appid, self.name))
+
+ # instance methods used to get application specific resources #############
+
+ def __init__(self, appid):
+ self.appid = appid
+ CubicWebNoAppConfiguration.__init__(self)
+ self._cubes = None
+ self._site_loaded = set()
+ self.load_file_configuration(self.main_config_file())
+
+ def adjust_sys_path(self):
+ CubicWebNoAppConfiguration.adjust_sys_path(self)
+ # adding apphome to python path is not usually necessary in production
+ # environments, but necessary for tests
+ if self.apphome and not self.apphome in sys.path:
+ sys.path.insert(0, self.apphome)
+
+ @property
+ def apphome(self):
+ return join(self.registry_dir(), self.appid)
+
+ @property
+ def appdatahome(self):
+ return join(self.instance_data_dir(), self.appid)
+
+ def init_cubes(self, cubes):
+ assert self._cubes is None
+ self._cubes = self.reorder_cubes(cubes)
+ # load cubes'__init__.py file first
+ for cube in cubes:
+ __import__('cubes.%s' % cube)
+ self.load_site_cubicweb()
+ # reload config file in cases options are defined in cubes __init__
+ # or site_cubicweb files
+ self.load_file_configuration(self.main_config_file())
+ # configuration initialization hook
+ self.load_configuration()
+
+ def cubes(self):
+ """return the list of cubes used by this instance
+
+ result is ordered from the top level cubes to inner dependencies
+ cubes
+ """
+ assert self._cubes is not None
+ return self._cubes
+
+ def cubes_path(self):
+ """return the list of path to cubes used by this instance, from outer
+ most to inner most cubes
+ """
+ return [self.cube_dir(p) for p in self.cubes()]
+
+ def add_cubes(self, cubes):
+ """add given cubes to the list of used cubes"""
+ if not isinstance(cubes, list):
+ cubes = list(cubes)
+ self._cubes = self.reorder_cubes(list(self._cubes) + cubes)
+
+ def main_config_file(self):
+ """return application's control configuration file"""
+ return join(self.apphome, '%s.conf' % self.name)
+
+ def save(self):
+ """write down current configuration"""
+ self.generate_config(open(self.main_config_file(), 'w'))
+
+ @cached
+ def instance_md5_version(self):
+ import md5
+ infos = []
+ for pkg in self.cubes():
+ version = self.cube_version(pkg)
+ infos.append('%s-%s' % (pkg, version))
+ return md5.new(';'.join(infos)).hexdigest()
+
+ def load_site_cubicweb(self):
+ """load (web?) application's specific site_cubicweb file"""
+ for path in reversed([self.apphome] + self.cubes_path()):
+ sitefile = join(path, 'site_cubicweb.py')
+ if exists(sitefile) and not sitefile in self._site_loaded:
+ self._load_site_cubicweb(sitefile)
+ self._site_loaded.add(sitefile)
+ else:
+ sitefile = join(path, 'site_erudi.py')
+ if exists(sitefile) and not sitefile in self._site_loaded:
+ self._load_site_cubicweb(sitefile)
+ self._site_loaded.add(sitefile)
+ self.warning('site_erudi.py is deprecated, should be renamed to site_cubicweb.py')
+
+ def _load_site_cubicweb(self, sitefile):
+ context = {}
+ execfile(sitefile, context, context)
+ self.info('%s loaded', sitefile)
+ # cube specific options
+ if context.get('options'):
+ self.register_options(context['options'])
+ self.load_defaults()
+
+ def load_configuration(self):
+ """load application's configuration files"""
+ super(CubicWebConfiguration, self).load_configuration()
+ if self.apphome and self.set_language:
+ # init gettext
+ self._set_language()
+
+ def init_log(self, logthreshold=None, debug=False, force=False):
+ """init the log service"""
+ if not force and hasattr(self, '_logging_initialized'):
+ return
+ self._logging_initialized = True
+ CubicWebNoAppConfiguration.init_log(self, logthreshold, debug,
+ logfile=self.get('log-file'))
+ # read a config file if it exists
+ logconfig = join(self.apphome, 'logging.conf')
+ if exists(logconfig):
+ logging.fileConfig(logconfig)
+
+ def available_languages(self, *args):
+ """return available translation for an application, by looking for
+ compiled catalog
+
+ take *args to be usable as a vocabulary method
+ """
+ from glob import glob
+ yield 'en' # ensure 'en' is yielded even if no .mo found
+ for path in glob(join(self.apphome, 'i18n',
+ '*', 'LC_MESSAGES', 'cubicweb.mo')):
+ lang = path.split(os.sep)[-3]
+ if lang != 'en':
+ yield lang
+
+ def _set_language(self):
+ """set language for gettext"""
+ from gettext import translation
+ path = join(self.apphome, 'i18n')
+ for language in self.available_languages():
+ self.info("loading language %s", language)
+ try:
+ tr = translation('cubicweb', path, languages=[language])
+ self.translations[language] = tr.ugettext
+ except (ImportError, AttributeError, IOError):
+ self.exception('localisation support error for language %s',
+ language)
+
+ def vregistry_path(self):
+ """return a list of files or directories where the registry will look
+ for application objects
+ """
+ templpath = list(reversed(self.cubes_path()))
+ if self.apphome: # may be unset in tests
+ templpath.append(self.apphome)
+ return self.build_vregistry_path(templpath)
+
+ def set_sources_mode(self, sources):
+ if not 'all' in sources:
+ print 'warning: ignoring specified sources, requires a repository '\
+ 'configuration'
+
+ def migration_handler(self):
+ """return a migration handler instance"""
+ from cubicweb.common.migration import MigrationHelper
+ return MigrationHelper(self, verbosity=self.verbosity)
+
+ def i18ncompile(self, langs=None):
+ from cubicweb.common import i18n
+ if langs is None:
+ langs = self.available_languages()
+ i18ndir = join(self.apphome, 'i18n')
+ if not exists(i18ndir):
+ create_dir(i18ndir)
+ sourcedirs = [join(path, 'i18n') for path in self.cubes_path()]
+ sourcedirs.append(self.i18n_lib_dir())
+ return i18n.compile_i18n_catalogs(sourcedirs, i18ndir, langs)
+
+
+# alias to get a configuration instance from an application id
+application_configuration = CubicWebConfiguration.config_for
+
+# map logilab.common.logger thresholds to logging thresholds
+THRESHOLD_MAP = {'LOG_DEBUG': 'DEBUG',
+ 'LOG_INFO': 'INFO',
+ 'LOG_NOTICE': 'INFO',
+ 'LOG_WARN': 'WARNING',
+ 'LOG_ERR': 'ERROR',
+ 'LOG_CRIT': 'CRITICAL',
+ }
+
+from cubicweb import set_log_methods
+set_log_methods(CubicWebConfiguration, logging.getLogger('cubicweb.configuration'))
diff -r 000000000000 -r b97547f5f1fa cwctl.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cwctl.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,809 @@
+"""%%prog %s [options] %s
+
+CubicWeb main applications controller.
+%s"""
+
+import sys
+from os import remove, listdir, system, kill, getpgid
+from os.path import exists, join, isfile, isdir
+
+from cubicweb import ConfigurationError, ExecutionError, BadCommandUsage
+from cubicweb.cwconfig import CubicWebConfiguration, CONFIGURATIONS
+from cubicweb.toolsutils import (Command, register_commands, main_run,
+ rm, create_dir, pop_arg, confirm)
+
+def wait_process_end(pid, maxtry=10, waittime=1):
+ """wait for a process to actually die"""
+ import signal
+ from time import sleep
+ nbtry = 0
+ while nbtry < maxtry:
+ try:
+ kill(pid, signal.SIGUSR1)
+ except OSError:
+ break
+ nbtry += 1
+ sleep(waittime)
+ else:
+ raise ExecutionError('can\'t kill process %s' % pid)
+
+def list_instances(regdir):
+ return sorted(idir for idir in listdir(regdir) if isdir(join(regdir, idir)))
+
+def detect_available_modes(templdir):
+ modes = []
+ for fname in ('schema', 'schema.py'):
+ if exists(join(templdir, fname)):
+ modes.append('repository')
+ break
+ for fname in ('data', 'views', 'views.py'):
+ if exists(join(templdir, fname)):
+ modes.append('web ui')
+ break
+ return modes
+
+
+class ApplicationCommand(Command):
+ """base class for command taking 0 to n application id as arguments
+ (0 meaning all registered applications)
+ """
+ arguments = '[...]'
+ options = (
+ ("force",
+ {'short': 'f', 'action' : 'store_true',
+ 'default': False,
+ 'help': 'force command without asking confirmation',
+ }
+ ),
+ )
+ actionverb = None
+
+ def ordered_instances(self):
+ """return instances in the order in which they should be started,
+ considering $REGISTRY_DIR/startorder file if it exists (useful when
+ some instances depends on another as external source
+ """
+ regdir = CubicWebConfiguration.registry_dir()
+ _allinstances = list_instances(regdir)
+ if isfile(join(regdir, 'startorder')):
+ allinstances = []
+ for line in file(join(regdir, 'startorder')):
+ line = line.strip()
+ if line and not line.startswith('#'):
+ try:
+ _allinstances.remove(line)
+ allinstances.append(line)
+ except ValueError:
+ print 'ERROR: startorder file contains unexistant instance %s' % line
+ allinstances += _allinstances
+ else:
+ allinstances = _allinstances
+ return allinstances
+
+ def run(self, args):
+ """run the _method on each argument (a list of application
+ identifiers)
+ """
+ if not args:
+ args = self.ordered_instances()
+ try:
+ askconfirm = not self.config.force
+ except AttributeError:
+ # no force option
+ askconfirm = False
+ else:
+ askconfirm = False
+ self.run_args(args, askconfirm)
+
+ def run_args(self, args, askconfirm):
+ for appid in args:
+ if askconfirm:
+ print '*'*72
+ if not confirm('%s application %r ?' % (self.name, appid)):
+ continue
+ self.run_arg(appid)
+
+ def run_arg(self, appid):
+ cmdmeth = getattr(self, '%s_application' % self.name)
+ try:
+ cmdmeth(appid)
+ except (KeyboardInterrupt, SystemExit):
+ print >> sys.stderr, '%s aborted' % self.name
+ sys.exit(2) # specific error code
+ except (ExecutionError, ConfigurationError), ex:
+ print >> sys.stderr, 'application %s not %s: %s' % (
+ appid, self.actionverb, ex)
+ except Exception, ex:
+ import traceback
+ traceback.print_exc()
+ print >> sys.stderr, 'application %s not %s: %s' % (
+ appid, self.actionverb, ex)
+
+
+class ApplicationCommandFork(ApplicationCommand):
+ """Same as `ApplicationCommand`, but command is forked in a new environment
+ for each argument
+ """
+
+ def run_args(self, args, askconfirm):
+ if len(args) > 1:
+ forkcmd = ' '.join(w for w in sys.argv if not w in args)
+ else:
+ forkcmd = None
+ for appid in args:
+ if askconfirm:
+ print '*'*72
+ if not confirm('%s application %r ?' % (self.name, appid)):
+ continue
+ if forkcmd:
+ status = system('%s %s' % (forkcmd, appid))
+ if status:
+ sys.exit(status)
+ else:
+ self.run_arg(appid)
+
+# base commands ###############################################################
+
+class ListCommand(Command):
+ """List configurations, componants and applications.
+
+ list available configurations, installed web and server componants, and
+ registered applications
+ """
+ name = 'list'
+ options = (
+ ('verbose',
+ {'short': 'v', 'action' : 'store_true',
+ 'help': "display more information."}),
+ )
+
+ def run(self, args):
+ """run the command with its specific arguments"""
+ if args:
+ raise BadCommandUsage('Too much arguments')
+ print 'CubicWeb version:', CubicWebConfiguration.cubicweb_version()
+ print 'Detected mode:', CubicWebConfiguration.mode
+ print
+ print 'Available configurations:'
+ for config in CONFIGURATIONS:
+ print '*', config.name
+ for line in config.__doc__.splitlines():
+ line = line.strip()
+ if not line:
+ continue
+ print ' ', line
+ print
+ try:
+ cubesdir = CubicWebConfiguration.cubes_dir()
+ namesize = max(len(x) for x in CubicWebConfiguration.available_cubes())
+ except ConfigurationError, ex:
+ print 'No cubes available:', ex
+ except ValueError:
+ print 'No cubes available in %s' % cubesdir
+ else:
+ print 'Available cubes (%s):' % cubesdir
+ for cube in CubicWebConfiguration.available_cubes():
+ if cube in ('CVS', '.svn', 'shared', '.hg'):
+ continue
+ templdir = join(cubesdir, cube)
+ try:
+ tinfo = CubicWebConfiguration.cube_pkginfo(cube)
+ tversion = tinfo.version
+ except ConfigurationError:
+ tinfo = None
+ tversion = '[missing cube information]'
+ print '* %s %s' % (cube.ljust(namesize), tversion)
+ if self.config.verbose:
+ shortdesc = tinfo and (getattr(tinfo, 'short_desc', '')
+ or tinfo.__doc__)
+ if shortdesc:
+ print ' '+ ' \n'.join(shortdesc.splitlines())
+ modes = detect_available_modes(templdir)
+ print ' available modes: %s' % ', '.join(modes)
+ print
+ try:
+ regdir = CubicWebConfiguration.registry_dir()
+ except ConfigurationError, ex:
+ print 'No application available:', ex
+ print
+ return
+ instances = list_instances(regdir)
+ if instances:
+ print 'Available applications (%s):' % regdir
+ for appid in instances:
+ modes = CubicWebConfiguration.possible_configurations(appid)
+ if not modes:
+ print '* %s (BROKEN application, no configuration found)' % appid
+ continue
+ print '* %s (%s)' % (appid, ', '.join(modes))
+ try:
+ config = CubicWebConfiguration.config_for(appid, modes[0])
+ except Exception, exc:
+ print ' (BROKEN application, %s)' % exc
+ continue
+ else:
+ print 'No application available in %s' % regdir
+ print
+
+
+class CreateApplicationCommand(Command):
+ """Create an application from a cube. This is an unified
+ command which can handle web / server / all-in-one installation
+ according to available parts of the software library and of the
+ desired cube.
+
+
+ the name of cube to use (list available cube names using
+ the "list" command). You can use several cubes by separating
+ them using comma (e.g. 'jpl,eemail')
+
+ an identifier for the application to create
+ """
+ name = 'create'
+ arguments = ''
+ options = (
+ ("config-level",
+ {'short': 'l', 'type' : 'int', 'metavar': '',
+ 'default': 0,
+ 'help': 'configuration level (0..2): 0 will ask for essential \
+configuration parameters only while 2 will ask for all parameters',
+ }
+ ),
+ ("config",
+ {'short': 'c', 'type' : 'choice', 'metavar': '',
+ 'choices': ('all-in-one', 'repository', 'twisted'),
+ 'default': 'all-in-one',
+ 'help': 'installation type, telling which part of an application \
+should be installed. You can list available configurations using the "list" \
+command. Default to "all-in-one", e.g. an installation embedding both the RQL \
+repository and the web server.',
+ }
+ ),
+ )
+
+ def run(self, args):
+ """run the command with its specific arguments"""
+ from logilab.common.textutils import get_csv
+ configname = self.config.config
+ cubes = get_csv(pop_arg(args, 1))
+ appid = pop_arg(args)
+ # get the configuration and helper
+ CubicWebConfiguration.creating = True
+ config = CubicWebConfiguration.config_for(appid, configname)
+ config.set_language = False
+ config.init_cubes(config.expand_cubes(cubes))
+ helper = self.config_helper(config)
+ # check the cube exists
+ try:
+ templdirs = [CubicWebConfiguration.cube_dir(cube)
+ for cube in cubes]
+ except ConfigurationError, ex:
+ print ex
+ print '\navailable cubes:',
+ print ', '.join(CubicWebConfiguration.available_cubes())
+ return
+ # create the registry directory for this application
+ create_dir(config.apphome)
+ # load site_cubicweb from the cubes dir (if any)
+ config.load_site_cubicweb()
+ # cubicweb-ctl configuration
+ print '** application\'s %s configuration' % configname
+ print '-' * 72
+ config.input_config('main', self.config.config_level)
+ # configuration'specific stuff
+ print
+ helper.bootstrap(cubes, self.config.config_level)
+ # write down configuration
+ config.save()
+ # handle i18n files structure
+ # XXX currently available languages are guessed from translations found
+ # in the first cube given
+ from cubicweb.common import i18n
+ langs = [lang for lang, _ in i18n.available_catalogs(join(templdirs[0], 'i18n'))]
+ errors = config.i18ncompile(langs)
+ if errors:
+ print '\n'.join(errors)
+ if not confirm('error while compiling message catalogs, '
+ 'continue anyway ?'):
+ print 'creation not completed'
+ return
+ # create the additional data directory for this application
+ if config.appdatahome != config.apphome: # true in dev mode
+ create_dir(config.appdatahome)
+ if config['uid']:
+ from logilab.common.shellutils import chown
+ # this directory should be owned by the uid of the server process
+ print 'set %s as owner of the data directory' % config['uid']
+ chown(config.appdatahome, config['uid'])
+ print
+ print
+ print '*' * 72
+ print 'application %s (%s) created in %r' % (appid, configname,
+ config.apphome)
+ print
+ helper.postcreate()
+
+
+class DeleteApplicationCommand(Command):
+ """Delete an application. Will remove application's files and
+ unregister it.
+ """
+ name = 'delete'
+ arguments = ''
+
+ options = ()
+
+ def run(self, args):
+ """run the command with its specific arguments"""
+ appid = pop_arg(args, msg="No application specified !")
+ configs = [CubicWebConfiguration.config_for(appid, configname)
+ for configname in CubicWebConfiguration.possible_configurations(appid)]
+ if not configs:
+ raise ExecutionError('unable to guess configuration for %s' % appid)
+ for config in configs:
+ helper = self.config_helper(config, required=False)
+ if helper:
+ helper.cleanup()
+ # remove home
+ rm(config.apphome)
+ # remove instance data directory
+ try:
+ rm(config.appdatahome)
+ except OSError, ex:
+ import errno
+ if ex.errno != errno.ENOENT:
+ raise
+ confignames = ', '.join([config.name for config in configs])
+ print 'application %s (%s) deleted' % (appid, confignames)
+
+
+# application commands ########################################################
+
+class StartApplicationCommand(ApplicationCommand):
+ """Start the given applications. If no application is given, start them all.
+
+ ...
+ identifiers of the applications to start. If no application is
+ given, start them all.
+ """
+ name = 'start'
+ actionverb = 'started'
+ options = (
+ ("debug",
+ {'short': 'D', 'action' : 'store_true',
+ 'help': 'start server in debug mode.'}),
+ ("force",
+ {'short': 'f', 'action' : 'store_true',
+ 'default': False,
+ 'help': 'start the application even if it seems to be already \
+running.'}),
+ ('profile',
+ {'short': 'P', 'type' : 'string', 'metavar': '',
+ 'default': None,
+ 'help': 'profile code and use the specified file to store stats',
+ }),
+ )
+
+ def start_application(self, appid):
+ """start the application's server"""
+ # use get() since start may be used from other commands (eg upgrade)
+ # without all options defined
+ debug = self.get('debug')
+ force = self.get('force')
+ config = CubicWebConfiguration.config_for(appid)
+ if self.get('profile'):
+ config.global_set_option('profile', self.config.profile)
+ helper = self.config_helper(config, cmdname='start')
+ pidf = config['pid-file']
+ if exists(pidf) and not force:
+ msg = "%s seems to be running. Remove %s by hand if necessary or use \
+the --force option."
+ raise ExecutionError(msg % (appid, pidf))
+ command = helper.start_command(config, debug)
+ if debug:
+ print "starting server with command :"
+ print command
+ if system(command):
+ print 'an error occured while starting the application, not started'
+ print
+ return False
+ if not debug:
+ print 'application %s started' % appid
+ return True
+
+
+class StopApplicationCommand(ApplicationCommand):
+ """Stop the given applications.
+
+ ...
+ identifiers of the applications to stop. If no application is
+ given, stop them all.
+ """
+ name = 'stop'
+ actionverb = 'stopped'
+
+ def ordered_instances(self):
+ instances = super(StopApplicationCommand, self).ordered_instances()
+ instances.reverse()
+ return instances
+
+ def stop_application(self, appid):
+ """stop the application's server"""
+ config = CubicWebConfiguration.config_for(appid)
+ helper = self.config_helper(config, cmdname='stop')
+ helper.poststop() # do this anyway
+ pidf = config['pid-file']
+ if not exists(pidf):
+ print >> sys.stderr, "%s doesn't exist." % pidf
+ return
+ import signal
+ pid = int(open(pidf).read().strip())
+ try:
+ kill(pid, signal.SIGTERM)
+ except:
+ print >> sys.stderr, "process %s seems already dead." % pid
+ else:
+ try:
+ wait_process_end(pid)
+ except ExecutionError, ex:
+ print >> sys.stderr, ex
+ print >> sys.stderr, 'trying SIGKILL'
+ try:
+ kill(pid, signal.SIGKILL)
+ except:
+ # probably dead now
+ pass
+ wait_process_end(pid)
+ try:
+ remove(pidf)
+ except OSError:
+ # already removed by twistd
+ pass
+ print 'application %s stopped' % appid
+
+
+class RestartApplicationCommand(StartApplicationCommand,
+ StopApplicationCommand):
+ """Restart the given applications.
+
+ ...
+ identifiers of the applications to restart. If no application is
+ given, restart them all.
+ """
+ name = 'restart'
+ actionverb = 'restarted'
+
+ def run_args(self, args, askconfirm):
+ regdir = CubicWebConfiguration.registry_dir()
+ if not isfile(join(regdir, 'startorder')) or len(args) <= 1:
+ # no specific startorder
+ super(RestartApplicationCommand, self).run_args(args, askconfirm)
+ return
+ print ('some specific start order is specified, will first stop all '
+ 'applications then restart them.')
+ # get instances in startorder
+ stopped = []
+ for appid in args:
+ if askconfirm:
+ print '*'*72
+ if not confirm('%s application %r ?' % (self.name, appid)):
+ continue
+ self.stop_application(appid)
+ stopped.append(appid)
+ forkcmd = [w for w in sys.argv if not w in args]
+ forkcmd[1] = 'start'
+ forkcmd = ' '.join(forkcmd)
+ for appid in reversed(args):
+ status = system('%s %s' % (forkcmd, appid))
+ if status:
+ sys.exit(status)
+
+ def restart_application(self, appid):
+ self.stop_application(appid)
+ if self.start_application(appid):
+ print 'application %s %s' % (appid, self.actionverb)
+
+
+class ReloadConfigurationCommand(RestartApplicationCommand):
+ """Reload the given applications. This command is equivalent to a
+ restart for now.
+
+ ...
+ identifiers of the applications to reload. If no application is
+ given, reload them all.
+ """
+ name = 'reload'
+
+ def reload_application(self, appid):
+ self.restart_application(appid)
+
+
+class StatusCommand(ApplicationCommand):
+ """Display status information about the given applications.
+
+ ...
+ identifiers of the applications to status. If no application is
+ given, get status information about all registered applications.
+ """
+ name = 'status'
+ options = ()
+
+ def status_application(self, appid):
+ """print running status information for an application"""
+ for mode in CubicWebConfiguration.possible_configurations(appid):
+ config = CubicWebConfiguration.config_for(appid, mode)
+ print '[%s-%s]' % (appid, mode),
+ try:
+ pidf = config['pid-file']
+ except KeyError:
+ print 'buggy application, pid file not specified'
+ continue
+ if not exists(pidf):
+ print "doesn't seem to be running"
+ continue
+ pid = int(open(pidf).read().strip())
+ # trick to guess whether or not the process is running
+ try:
+ getpgid(pid)
+ except OSError:
+ print "should be running with pid %s but the process can not be found" % pid
+ continue
+ print "running with pid %s" % (pid)
+
+
+class UpgradeApplicationCommand(ApplicationCommandFork,
+ StartApplicationCommand,
+ StopApplicationCommand):
+ """Upgrade an application after cubicweb and/or component(s) upgrade.
+
+ For repository update, you will be prompted for a login / password to use
+ to connect to the system database. For some upgrades, the given user
+ should have create or alter table permissions.
+
+ ...
+ identifiers of the applications to upgrade. If no application is
+ given, upgrade them all.
+ """
+ name = 'upgrade'
+ actionverb = 'upgraded'
+ options = ApplicationCommand.options + (
+ ('force-componant-version',
+ {'short': 't', 'type' : 'csv', 'metavar': 'cube1=X.Y.Z,cube2=X.Y.Z',
+ 'default': None,
+ 'help': 'force migration from the indicated version for the specified cube.'}),
+ ('force-cubicweb-version',
+ {'short': 'e', 'type' : 'string', 'metavar': 'X.Y.Z',
+ 'default': None,
+ 'help': 'force migration from the indicated cubicweb version.'}),
+
+ ('fs-only',
+ {'short': 's', 'action' : 'store_true',
+ 'default': False,
+ 'help': 'only upgrade files on the file system, not the database.'}),
+
+ ('nostartstop',
+ {'short': 'n', 'action' : 'store_true',
+ 'default': False,
+ 'help': 'don\'t try to stop application before migration and to restart it after.'}),
+
+ ('verbosity',
+ {'short': 'v', 'type' : 'int', 'metavar': '<0..2>',
+ 'default': 1,
+ 'help': "0: no confirmation, 1: only main commands confirmed, 2 ask \
+for everything."}),
+
+ ('backup-db',
+ {'short': 'b', 'type' : 'yn', 'metavar': '',
+ 'default': None,
+ 'help': "Backup the application database before upgrade.\n"\
+ "If the option is ommitted, confirmation will be ask.",
+ }),
+
+ ('ext-sources',
+ {'short': 'E', 'type' : 'csv', 'metavar': '',
+ 'default': None,
+ 'help': "For multisources instances, specify to which sources the \
+repository should connect to for upgrading. When unspecified or 'migration' is \
+given, appropriate sources for migration will be automatically selected \
+(recommended). If 'all' is given, will connect to all defined sources.",
+ }),
+ )
+
+ def ordered_instances(self):
+ # need this since mro return StopApplicationCommand implementation
+ return ApplicationCommand.ordered_instances(self)
+
+ def upgrade_application(self, appid):
+ from logilab.common.changelog import Version
+ if not (CubicWebConfiguration.mode == 'dev' or self.config.nostartstop):
+ self.stop_application(appid)
+ config = CubicWebConfiguration.config_for(appid)
+ config.creating = True # notice we're not starting the server
+ config.verbosity = self.config.verbosity
+ config.set_sources_mode(self.config.ext_sources or ('migration',))
+ # get application and installed versions for the server and the componants
+ print 'getting versions configuration from the repository...'
+ mih = config.migration_handler()
+ repo = mih.repo_connect()
+ vcconf = repo.get_versions()
+ print 'done'
+ if self.config.force_componant_version:
+ packversions = {}
+ for vdef in self.config.force_componant_version:
+ componant, version = vdef.split('=')
+ packversions[componant] = Version(version)
+ vcconf.update(packversions)
+ toupgrade = []
+ for cube in config.cubes():
+ installedversion = config.cube_version(cube)
+ try:
+ applversion = vcconf[cube]
+ except KeyError:
+ config.error('no version information for %s' % cube)
+ continue
+ if installedversion > applversion:
+ toupgrade.append( (cube, applversion, installedversion) )
+ cubicwebversion = config.cubicweb_version()
+ if self.config.force_cubicweb_version:
+ applcubicwebversion = Version(self.config.force_cubicweb_version)
+ vcconf['cubicweb'] = applcubicwebversion
+ else:
+ applcubicwebversion = vcconf.get('cubicweb')
+ if cubicwebversion > applcubicwebversion:
+ toupgrade.append( ('cubicweb', applcubicwebversion, cubicwebversion) )
+ if not self.config.fs_only and not toupgrade:
+ print 'no software migration needed for application %s' % appid
+ return
+ for cube, fromversion, toversion in toupgrade:
+ print '**** %s migration %s -> %s' % (cube, fromversion, toversion)
+ # run cubicweb/componants migration scripts
+ mih.migrate(vcconf, reversed(toupgrade), self.config)
+ # rewrite main configuration file
+ mih.rewrite_configuration()
+ # handle i18n upgrade:
+ # * install new languages
+ # * recompile catalogs
+ # XXX currently available languages are guessed from translations found
+ # in the first componant given
+ from cubicweb.common import i18n
+ templdir = CubicWebConfiguration.cube_dir(config.cubes()[0])
+ langs = [lang for lang, _ in i18n.available_catalogs(join(templdir, 'i18n'))]
+ errors = config.i18ncompile(langs)
+ if errors:
+ print '\n'.join(errors)
+ if not confirm('error while compiling message catalogs, '
+ 'continue anyway ?'):
+ print 'migration not completed'
+ return
+ mih.rewrite_vcconfiguration()
+ mih.shutdown()
+ print
+ print 'application migrated'
+ if not (CubicWebConfiguration.mode == 'dev' or self.config.nostartstop):
+ self.start_application(appid)
+ print
+
+
+class ShellCommand(Command):
+ """Run an interactive migration shell. This is a python shell with
+ enhanced migration commands predefined in the namespace. An additional
+ argument may be given corresponding to a file containing commands to
+ execute in batch mode.
+
+
+ the identifier of the application to connect.
+ """
+ name = 'shell'
+ arguments = ' [batch command file]'
+ options = (
+ ('system-only',
+ {'short': 'S', 'action' : 'store_true',
+ 'default': False,
+ 'help': 'only connect to the system source when the instance is '
+ 'using multiple sources. You can\'t use this option and the '
+ '--ext-sources option at the same time.'}),
+
+ ('ext-sources',
+ {'short': 'E', 'type' : 'csv', 'metavar': '',
+ 'default': None,
+ 'help': "For multisources instances, specify to which sources the \
+repository should connect to for upgrading. When unspecified or 'all' given, \
+will connect to all defined sources. If 'migration' is given, appropriate \
+sources for migration will be automatically selected.",
+ }),
+
+ )
+ def run(self, args):
+ appid = pop_arg(args, 99, msg="No application specified !")
+ config = CubicWebConfiguration.config_for(appid)
+ if self.config.ext_sources:
+ assert not self.config.system_only
+ sources = self.config.ext_sources
+ elif self.config.system_only:
+ sources = ('system',)
+ else:
+ sources = ('all',)
+ config.set_sources_mode(sources)
+ mih = config.migration_handler()
+ if args:
+ mih.scripts_session(args)
+ else:
+ mih.interactive_shell()
+ mih.shutdown()
+
+
+class RecompileApplicationCatalogsCommand(ApplicationCommand):
+ """Recompile i18n catalogs for applications.
+
+ ...
+ identifiers of the applications to consider. If no application is
+ given, recompile for all registered applications.
+ """
+ name = 'i18ncompile'
+
+ def i18ncompile_application(self, appid):
+ """recompile application's messages catalogs"""
+ config = CubicWebConfiguration.config_for(appid)
+ try:
+ config.bootstrap_cubes()
+ except IOError, ex:
+ import errno
+ if ex.errno != errno.ENOENT:
+ raise
+ # bootstrap_cubes files doesn't exist
+ # set creating to notify this is not a regular start
+ config.creating = True
+ # create an in-memory repository, will call config.init_cubes()
+ config.repository()
+ except AttributeError:
+ # web only config
+ config.init_cubes(config.repository().get_cubes())
+ errors = config.i18ncompile()
+ if errors:
+ print '\n'.join(errors)
+
+
+class ListInstancesCommand(Command):
+ """list available instances, useful for bash completion."""
+ name = 'listinstances'
+ hidden = True
+
+ def run(self, args):
+ """run the command with its specific arguments"""
+ regdir = CubicWebConfiguration.registry_dir()
+ for appid in sorted(listdir(regdir)):
+ print appid
+
+
+class ListCubesCommand(Command):
+ """list available componants, useful for bash completion."""
+ name = 'listcubes'
+ hidden = True
+
+ def run(self, args):
+ """run the command with its specific arguments"""
+ for cube in CubicWebConfiguration.available_cubes():
+ print cube
+
+register_commands((ListCommand,
+ CreateApplicationCommand,
+ DeleteApplicationCommand,
+ StartApplicationCommand,
+ StopApplicationCommand,
+ RestartApplicationCommand,
+ ReloadConfigurationCommand,
+ StatusCommand,
+ UpgradeApplicationCommand,
+ ShellCommand,
+ RecompileApplicationCatalogsCommand,
+ ListInstancesCommand, ListCubesCommand,
+ ))
+
+
+def run(args):
+ """command line tool"""
+ CubicWebConfiguration.load_cwctl_plugins()
+ main_run(args, __doc__)
+
+if __name__ == '__main__':
+ run(sys.argv[1:])
diff -r 000000000000 -r b97547f5f1fa cwvreg.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/cwvreg.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,386 @@
+"""extend the generic VRegistry with some cubicweb specific stuff
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from warnings import warn
+
+from logilab.common.decorators import cached, clear_cache
+
+from rql import RQLHelper
+
+from cubicweb import Binary, UnknownProperty
+from cubicweb.vregistry import VRegistry, ObjectNotFound, NoSelectableObject
+
+_ = unicode
+
+class DummyCursorError(Exception): pass
+class RaiseCursor:
+ @classmethod
+ def execute(cls, rql, args=None, eid_key=None):
+ raise DummyCursorError()
+
+
+class CubicWebRegistry(VRegistry):
+ """extend the generic VRegistry with some cubicweb specific stuff"""
+
+ def __init__(self, config, debug=None):
+ # first init log service
+ config.init_log(debug=debug)
+ super(CubicWebRegistry, self).__init__(config)
+ self.schema = None
+ self.reset()
+ self.initialized = False
+
+ def items(self):
+ return [item for item in self._registries.items()
+ if not item[0] in ('propertydefs', 'propertyvalues')]
+
+ def values(self):
+ return [value for key,value in self._registries.items()
+ if not key in ('propertydefs', 'propertyvalues')]
+
+ def reset(self):
+ self._registries = {}
+ self._lastmodifs = {}
+ # two special registries, propertydefs which care all the property definitions, and
+ # propertyvals which contains values for those properties
+ self._registries['propertydefs'] = {}
+ self._registries['propertyvalues'] = self.eprop_values = {}
+ for key, propdef in self.config.eproperty_definitions():
+ self.register_property(key, **propdef)
+
+ def set_schema(self, schema):
+ """set application'schema and load application objects"""
+ self.schema = schema
+ clear_cache(self, 'rqlhelper')
+ # now we can load application's web objects
+ self.register_objects(self.config.vregistry_path())
+
+ def update_schema(self, schema):
+ """update .schema attribute on registered objects, necessary for some
+ tests
+ """
+ self.schema = schema
+ for registry, regcontent in self._registries.items():
+ if registry in ('propertydefs', 'propertyvalues'):
+ continue
+ for objects in regcontent.values():
+ for obj in objects:
+ obj.schema = schema
+
+ def register_objects(self, path, force_reload=None):
+ """overriden to handle type class cache issue"""
+ if super(CubicWebRegistry, self).register_objects(path, force_reload):
+ # clear etype cache if you don't want to run into deep weirdness
+ clear_cache(self, 'etype_class')
+ # remove vobjects that don't support any available interface
+ interfaces = set()
+ for classes in self.get('etypes', {}).values():
+ for cls in classes:
+ interfaces.update(cls.__implements__)
+ if not self.config.cleanup_interface_sobjects:
+ return
+ for registry, regcontent in self._registries.items():
+ if registry in ('propertydefs', 'propertyvalues', 'etypes'):
+ continue
+ for oid, objects in regcontent.items():
+ for obj in reversed(objects[:]):
+ if not obj in objects:
+ continue # obj has been kicked by a previous one
+ accepted = set(getattr(obj, 'accepts_interfaces', ()))
+ if accepted:
+ for accepted_iface in accepted:
+ for found_iface in interfaces:
+ if issubclass(found_iface, accepted_iface):
+ # consider priority if necessary
+ if hasattr(obj.__registerer__, 'remove_all_equivalents'):
+ registerer = obj.__registerer__(self, obj)
+ registerer.remove_all_equivalents(objects)
+ break
+ else:
+ self.debug('kicking vobject %s (unsupported interface)', obj)
+ objects.remove(obj)
+ # if objects is empty, remove oid from registry
+ if not objects:
+ del regcontent[oid]
+
+ def eid_rset(self, cursor, eid, etype=None):
+ """return a result set for the given eid without doing actual query
+ (we have the eid, we can suppose it exists and user has access to the
+ entity)
+ """
+ msg = '.eid_rset is deprecated, use req.eid_rset'
+ warn(msg, DeprecationWarning, stacklevel=2)
+ try:
+ return cursor.req.eid_rset(eid, etype)
+ except AttributeError:
+ # cursor is a session
+ return cursor.eid_rset(eid, etype)
+
+ @cached
+ def etype_class(self, etype):
+ """return an entity class for the given entity type.
+ Try to find out a specific class for this kind of entity or
+ default to a dump of the class registered for 'Any'
+ """
+ etype = str(etype)
+ eschema = self.schema.eschema(etype)
+ baseschemas = [eschema] + eschema.ancestors()
+ # browse ancestors from most specific to most generic and
+ # try to find an associated custom entity class
+ for baseschema in baseschemas:
+ btype = str(baseschema)
+ try:
+ return self.select(self.registry_objects('etypes', btype), etype)
+ except ObjectNotFound:
+ pass
+ # no entity class for any of the ancestors, fallback to the default one
+ return self.select(self.registry_objects('etypes', 'Any'), etype)
+
+ def render(self, registry, oid, req, **context):
+ """select an object in a given registry and render it
+
+ - registry: the registry's name
+ - oid : the view to call
+ - req : the HTTP request
+ """
+ objclss = self.registry_objects(registry, oid)
+ try:
+ rset = context.pop('rset')
+ except KeyError:
+ rset = None
+ selected = self.select(objclss, req, rset, **context)
+ return selected.dispatch(**context)
+
+ def main_template(self, req, oid='main', **context):
+ """display query by calling the given template (default to main),
+ and returning the output as a string instead of requiring the [w]rite
+ method as argument
+ """
+ res = self.render('templates', oid, req, **context)
+ if isinstance(res, unicode):
+ return res.encode(req.encoding)
+ assert isinstance(res, str)
+ return res
+
+ def possible_vobjects(self, registry, *args, **kwargs):
+ """return an ordered list of possible app objects in a given registry,
+ supposing they support the 'visible' and 'order' properties (as most
+ visualizable objects)
+ """
+ return [x for x in sorted(self.possible_objects(registry, *args, **kwargs),
+ key=lambda x: x.propval('order'))
+ if x.propval('visible')]
+
+ def possible_actions(self, req, rset, **kwargs):
+ if rset is None:
+ actions = self.possible_vobjects('actions', req, rset)
+ else:
+ actions = rset.possible_actions() # cached implementation
+ result = {}
+ for action in actions:
+ result.setdefault(action.category, []).append(action)
+ return result
+
+ def possible_views(self, req, rset, **kwargs):
+ """return an iterator on possible views for this result set
+
+ views returned are classes, not instances
+ """
+ for vid, views in self.registry('views').items():
+ if vid[0] == '_':
+ continue
+ try:
+ view = self.select(views, req, rset, **kwargs)
+ if view.linkable():
+ yield view
+ except NoSelectableObject:
+ continue
+
+ def select_box(self, oid, *args, **kwargs):
+ """return the most specific view according to the result set"""
+ try:
+ return self.select_object('boxes', oid, *args, **kwargs)
+ except NoSelectableObject:
+ return
+
+ def select_action(self, oid, *args, **kwargs):
+ """return the most specific view according to the result set"""
+ try:
+ return self.select_object('actions', oid, *args, **kwargs)
+ except NoSelectableObject:
+ return
+
+ def select_component(self, cid, *args, **kwargs):
+ """return the most specific component according to the result set"""
+ try:
+ return self.select_object('components', cid, *args, **kwargs)
+ except (NoSelectableObject, ObjectNotFound):
+ return
+
+ def select_view(self, __vid, req, rset, **kwargs):
+ """return the most specific view according to the result set"""
+ views = self.registry_objects('views', __vid)
+ return self.select(views, req, rset, **kwargs)
+
+
+ # properties handling #####################################################
+
+ def user_property_keys(self, withsitewide=False):
+ if withsitewide:
+ return sorted(self['propertydefs'])
+ return sorted(k for k, kd in self['propertydefs'].iteritems()
+ if not kd['sitewide'])
+
+ def register_property(self, key, type, help, default=None, vocabulary=None,
+ sitewide=False):
+ """register a given property"""
+ properties = self._registries['propertydefs']
+ assert type in YAMS_TO_PY
+ properties[key] = {'type': type, 'vocabulary': vocabulary,
+ 'default': default, 'help': help,
+ 'sitewide': sitewide}
+
+ def property_info(self, key):
+ """return dictionary containing description associated to the given
+ property key (including type, defaut value, help and a site wide
+ boolean)
+ """
+ try:
+ return self._registries['propertydefs'][key]
+ except KeyError:
+ if key.startswith('system.version.'):
+ soft = key.split('.')[-1]
+ return {'type': 'String', 'sitewide': True,
+ 'default': None, 'vocabulary': None,
+ 'help': _('%s software version of the database') % soft}
+ raise UnknownProperty('unregistered property %r' % key)
+
+ def property_value(self, key):
+ try:
+ return self._registries['propertyvalues'][key]
+ except KeyError:
+ return self._registries['propertydefs'][key]['default']
+
+ def typed_value(self, key, value):
+ """value is an unicode string, return it correctly typed. Let potential
+ type error propagates.
+ """
+ pdef = self.property_info(key)
+ try:
+ value = YAMS_TO_PY[pdef['type']](value)
+ except (TypeError, ValueError):
+ raise ValueError(_('bad value'))
+ vocab = pdef['vocabulary']
+ if vocab is not None:
+ if callable(vocab):
+ vocab = vocab(key, None) # XXX need a req object
+ if not value in vocab:
+ raise ValueError(_('unauthorized value'))
+ return value
+
+ def init_properties(self, propvalues):
+ """init the property values registry using the given set of couple (key, value)
+ """
+ self.initialized = True
+ values = self._registries['propertyvalues']
+ for key, val in propvalues:
+ try:
+ values[key] = self.typed_value(key, val)
+ except ValueError:
+ self.warning('%s (you should probably delete that property '
+ 'from the database)', ex)
+ except UnknownProperty, ex:
+ self.warning('%s (you should probably delete that property '
+ 'from the database)', ex)
+
+
+ def property_value_widget(self, propkey, req=None, **attrs):
+ """return widget according to key's type / vocab"""
+ from cubicweb.web.widgets import StaticComboBoxWidget, widget_factory
+ if req is None:
+ tr = unicode
+ else:
+ tr = req._
+ try:
+ pdef = self.property_info(propkey)
+ except UnknownProperty, ex:
+ self.warning('%s (you should probably delete that property '
+ 'from the database)', ex)
+ return widget_factory(self, 'EProperty', self.schema['value'], 'String',
+ description=u'', **attrs)
+ req.form['value'] = pdef['default'] # XXX hack to pass the default value
+ vocab = pdef['vocabulary']
+ if vocab is not None:
+ if callable(vocab):
+ # list() just in case its a generator function
+ vocabfunc = lambda e: list(vocab(propkey, req))
+ else:
+ vocabfunc = lambda e: vocab
+ w = StaticComboBoxWidget(self, 'EProperty', self.schema['value'], 'String',
+ vocabfunc=vocabfunc, description=tr(pdef['help']),
+ **attrs)
+ else:
+ w = widget_factory(self, 'EProperty', self.schema['value'], pdef['type'],
+ description=tr(pdef['help']), **attrs)
+ return w
+
+ def parse(self, session, rql, args=None):
+ rqlst = self.rqlhelper.parse(rql)
+ def type_from_eid(eid, session=session):
+ return session.describe(eid)[0]
+ self.rqlhelper.compute_solutions(rqlst, {'eid': type_from_eid}, args)
+ return rqlst
+
+ @property
+ @cached
+ def rqlhelper(self):
+ return RQLHelper(self.schema,
+ special_relations={'eid': 'uid', 'has_text': 'fti'})
+
+class MulCnxCubicWebRegistry(CubicWebRegistry):
+ """special registry to be used when an application has to deal with
+ connections to differents repository. This class add some additional wrapper
+ trying to hide buggy class attributes since classes are not designed to be
+ shared.
+ """
+ def etype_class(self, etype):
+ """return an entity class for the given entity type.
+ Try to find out a specific class for this kind of entity or
+ default to a dump of the class registered for 'Any'
+ """
+ usercls = super(MulCnxCubicWebRegistry, self).etype_class(etype)
+ usercls.e_schema = self.schema.eschema(etype)
+ return usercls
+
+ def select(self, vobjects, *args, **kwargs):
+ """return an instance of the most specific object according
+ to parameters
+
+ raise NoSelectableObject if not object apply
+ """
+ for vobject in vobjects:
+ vobject.vreg = self
+ vobject.schema = self.schema
+ vobject.config = self.config
+ return super(MulCnxCubicWebRegistry, self).select(vobjects, *args, **kwargs)
+
+from mx.DateTime import DateTime, Time, DateTimeDelta
+
+YAMS_TO_PY = {
+ 'Boolean': bool,
+ 'String' : unicode,
+ 'Password': str,
+ 'Bytes': Binary,
+ 'Int': int,
+ 'Float': float,
+ 'Date': DateTime,
+ 'Datetime': DateTime,
+ 'Time': Time,
+ 'Interval': DateTimeDelta,
+ }
+
diff -r 000000000000 -r b97547f5f1fa dbapi.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/dbapi.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,711 @@
+"""DB-API 2.0 compliant module
+
+Take a look at http://www.python.org/peps/pep-0249.html
+
+(most parts of this document are reported here in docstrings)
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logging import getLogger, StreamHandler
+from time import time, clock
+
+from cubicweb import ConnectionError, RequestSessionMixIn, set_log_methods
+from cubicweb.cwvreg import CubicWebRegistry, MulCnxCubicWebRegistry
+from cubicweb.cwconfig import CubicWebNoAppConfiguration
+
+_MARKER = object()
+
+class ConnectionProperties(object):
+ def __init__(self, cnxtype=None, lang=None, close=True, log=False):
+ self.cnxtype = cnxtype or 'pyro'
+ self.lang = lang
+ self.log_queries = log
+ self.close_on_del = close
+
+
+def get_repository(method, database=None, config=None, vreg=None):
+ """get a proxy object to the CubicWeb repository, using a specific RPC method.
+
+ Only 'in-memory' and 'pyro' are supported for now. Either vreg or config
+ argument should be given
+ """
+ assert method in ('pyro', 'inmemory')
+ assert vreg or config
+ if vreg and not config:
+ config = vreg.config
+ if method == 'inmemory':
+ # get local access to the repository
+ from cubicweb.server.repository import Repository
+ return Repository(config, vreg=vreg)
+ else: # method == 'pyro'
+ from Pyro import core, naming, config as pyroconfig
+ from Pyro.errors import NamingError, ProtocolError
+ core.initClient(banner=0)
+ pyroconfig.PYRO_NS_DEFAULTGROUP = ':' + config['pyro-ns-group']
+ locator = naming.NameServerLocator()
+ # resolve the Pyro object
+ try:
+ nshost, nsport = config['pyro-ns-host'], config['pyro-ns-port']
+ uri = locator.getNS(nshost, nsport).resolve(database)
+ except ProtocolError:
+ raise ConnectionError('Could not connect to the Pyro name server '
+ '(host: %s:%i)' % (nshost, nsport))
+ except NamingError:
+ raise ConnectionError('Could not get repository for %s '
+ '(not registered in Pyro),'
+ 'you may have to restart your server-side '
+ 'application' % database)
+ return core.getProxyForURI(uri)
+
+def repo_connect(repo, user, password, cnxprops=None):
+ """Constructor to create a new connection to the CubicWeb repository.
+
+ Returns a Connection instance.
+ """
+ cnxprops = cnxprops or ConnectionProperties('inmemory')
+ cnxid = repo.connect(unicode(user), password, cnxprops=cnxprops)
+ cnx = Connection(repo, cnxid, cnxprops)
+ if cnxprops.cnxtype == 'inmemory':
+ cnx.vreg = repo.vreg
+ return cnx
+
+def connect(database=None, user=None, password=None, host=None,
+ group=None, cnxprops=None, port=None, setvreg=True, mulcnx=True):
+ """Constructor for creating a connection to the CubicWeb repository.
+ Returns a Connection object.
+
+ When method is 'pyro' and setvreg is True, use a special registry class
+ (MulCnxCubicWebRegistry) made to deal with connections to differents instances
+ in the same process unless specified otherwise by setting the mulcnx to
+ False.
+ """
+ config = CubicWebNoAppConfiguration()
+ if host:
+ config.global_set_option('pyro-ns-host', host)
+ if port:
+ config.global_set_option('pyro-ns-port', port)
+ if group:
+ config.global_set_option('pyro-ns-group', group)
+ cnxprops = cnxprops or ConnectionProperties()
+ method = cnxprops.cnxtype
+ repo = get_repository(method, database, config=config)
+ if method == 'inmemory':
+ vreg = repo.vreg
+ elif setvreg:
+ if mulcnx:
+ vreg = MulCnxCubicWebRegistry(config)
+ else:
+ vreg = CubicWebRegistry(config)
+ vreg.set_schema(repo.get_schema())
+ else:
+ vreg = None
+ cnx = repo_connect(repo, user, password, cnxprops)
+ cnx.vreg = vreg
+ return cnx
+
+def in_memory_cnx(config, user, password):
+ """usefull method for testing and scripting to get a dbapi.Connection
+ object connected to an in-memory repository instance
+ """
+ if isinstance(config, CubicWebRegistry):
+ vreg = config
+ config = None
+ else:
+ vreg = None
+ # get local access to the repository
+ repo = get_repository('inmemory', config=config, vreg=vreg)
+ # connection to the CubicWeb repository
+ cnxprops = ConnectionProperties('inmemory')
+ cnx = repo_connect(repo, user, password, cnxprops=cnxprops)
+ return repo, cnx
+
+
+class DBAPIRequest(RequestSessionMixIn):
+
+ def __init__(self, vreg, cnx=None):
+ super(DBAPIRequest, self).__init__(vreg)
+ try:
+ # no vreg or config which doesn't handle translations
+ self.translations = vreg.config.translations
+ except AttributeError:
+ self.translations = {}
+ self.set_default_language(vreg)
+ # cache entities built during the request
+ self._eid_cache = {}
+ # these args are initialized after a connection is
+ # established
+ self.cnx = None # connection associated to the request
+ self._user = None # request's user, set at authentication
+ if cnx is not None:
+ self.set_connection(cnx)
+
+ def base_url(self):
+ return self.vreg.config['base-url']
+
+ def from_controller(self):
+ return 'view'
+
+ def set_connection(self, cnx, user=None):
+ """method called by the session handler when the user is authenticated
+ or an anonymous connection is open
+ """
+ self.cnx = cnx
+ self.cursor = cnx.cursor(self)
+ self.set_user(user)
+
+ def set_default_language(self, vreg):
+ try:
+ self.lang = vreg.property_value('ui.language')
+ except: # property may not be registered
+ self.lang = 'en'
+ # use req.__ to translate a message without registering it to the catalog
+ try:
+ self._ = self.__ = self.translations[self.lang]
+ except KeyError:
+ # this occurs usually during test execution
+ self._ = self.__ = unicode
+ self.debug('request language: %s', self.lang)
+
+ def decorate_rset(self, rset):
+ rset.vreg = self.vreg
+ rset.req = self
+ return rset
+
+ def describe(self, eid):
+ """return a tuple (type, sourceuri, extid) for the entity with id """
+ return self.cnx.describe(eid)
+
+ def source_defs(self):
+ """return the definition of sources used by the repository."""
+ return self.cnx.source_defs()
+
+ # entities cache management ###############################################
+
+ def entity_cache(self, eid):
+ return self._eid_cache[eid]
+
+ def set_entity_cache(self, entity):
+ self._eid_cache[entity.eid] = entity
+
+ def cached_entities(self):
+ return self._eid_cache.values()
+
+ def drop_entity_cache(self, eid=None):
+ if eid is None:
+ self._eid_cache = {}
+ else:
+ del self._eid_cache[eid]
+
+ # low level session data management #######################################
+
+ def session_data(self):
+ """return a dictionnary containing session data"""
+ return self.cnx.session_data()
+
+ def get_session_data(self, key, default=None, pop=False):
+ """return value associated to `key` in session data"""
+ return self.cnx.get_session_data(key, default, pop)
+
+ def set_session_data(self, key, value):
+ """set value associated to `key` in session data"""
+ return self.cnx.set_session_data(key, value)
+
+ def del_session_data(self, key):
+ """remove value associated to `key` in session data"""
+ return self.cnx.del_session_data(key)
+
+ def get_shared_data(self, key, default=None, pop=False):
+ """return value associated to `key` in shared data"""
+ return self.cnx.get_shared_data(key, default, pop)
+
+ def set_shared_data(self, key, value, querydata=False):
+ """set value associated to `key` in shared data
+
+ if `querydata` is true, the value will be added to the repository
+ session's query data which are cleared on commit/rollback of the current
+ transaction, and won't be available through the connexion, only on the
+ repository side.
+ """
+ return self.cnx.set_shared_data(key, value, querydata)
+
+ # server session compat layer #############################################
+
+ @property
+ def user(self):
+ if self._user is None and self.cnx:
+ self.set_user(self.cnx.user(self))
+ return self._user
+
+ def set_user(self, user):
+ self._user = user
+ if user:
+ self.set_entity_cache(user)
+
+ def execute(self, *args, **kwargs):
+ """Session interface compatibility"""
+ return self.cursor.execute(*args, **kwargs)
+
+set_log_methods(DBAPIRequest, getLogger('cubicweb.dbapi'))
+
+
+# exceptions ##################################################################
+
+class ProgrammingError(Exception): #DatabaseError):
+ """Exception raised for errors that are related to the database's operation
+ and not necessarily under the control of the programmer, e.g. an unexpected
+ disconnect occurs, the data source name is not found, a transaction could
+ not be processed, a memory allocation error occurred during processing,
+ etc.
+ """
+
+# module level objects ########################################################
+
+
+apilevel = '2.0'
+
+"""Integer constant stating the level of thread safety the interface supports.
+Possible values are:
+
+ 0 Threads may not share the module.
+ 1 Threads may share the module, but not connections.
+ 2 Threads may share the module and connections.
+ 3 Threads may share the module, connections and
+ cursors.
+
+Sharing in the above context means that two threads may use a resource without
+wrapping it using a mutex semaphore to implement resource locking. Note that
+you cannot always make external resources thread safe by managing access using
+a mutex: the resource may rely on global variables or other external sources
+that are beyond your control.
+"""
+threadsafety = 1
+
+"""String constant stating the type of parameter marker formatting expected by
+the interface. Possible values are :
+
+ 'qmark' Question mark style,
+ e.g. '...WHERE name=?'
+ 'numeric' Numeric, positional style,
+ e.g. '...WHERE name=:1'
+ 'named' Named style,
+ e.g. '...WHERE name=:name'
+ 'format' ANSI C printf format codes,
+ e.g. '...WHERE name=%s'
+ 'pyformat' Python extended format codes,
+ e.g. '...WHERE name=%(name)s'
+"""
+paramstyle = 'pyformat'
+
+
+# connection object ###########################################################
+
+class Connection(object):
+ """DB-API 2.0 compatible Connection object for CubicWebt
+ """
+ # make exceptions available through the connection object
+ ProgrammingError = ProgrammingError
+
+ def __init__(self, repo, cnxid, cnxprops=None):
+ self._repo = repo
+ self.sessionid = cnxid
+ self._close_on_del = getattr(cnxprops, 'close_on_del', True)
+ self._cnxtype = getattr(cnxprops, 'cnxtype', 'pyro')
+ self._closed = None
+ if cnxprops and cnxprops.log_queries:
+ self.executed_queries = []
+ self.cursor_class = LogCursor
+ else:
+ self.cursor_class = Cursor
+ self.anonymous_connection = False
+ self.vreg = None
+ # session's data
+ self.data = {}
+
+ def __repr__(self):
+ if self.anonymous_connection:
+ return '' % self.sessionid
+ return '' % self.sessionid
+
+ def request(self):
+ return DBAPIRequest(self.vreg, self)
+
+ def session_data(self):
+ """return a dictionnary containing session data"""
+ return self.data
+
+ def get_session_data(self, key, default=None, pop=False):
+ """return value associated to `key` in session data"""
+ if pop:
+ return self.data.pop(key, default)
+ else:
+ return self.data.get(key, default)
+
+ def set_session_data(self, key, value):
+ """set value associated to `key` in session data"""
+ self.data[key] = value
+
+ def del_session_data(self, key):
+ """remove value associated to `key` in session data"""
+ try:
+ del self.data[key]
+ except KeyError:
+ pass
+
+ def check(self):
+ """raise `BadSessionId` if the connection is no more valid"""
+ try:
+ self._repo.check_session(self.sessionid)
+ except AttributeError:
+ # XXX backward compat for repository running cubicweb < 2.48.3
+ self._repo.session_data(self.sessionid)
+
+ def get_shared_data(self, key, default=None, pop=False):
+ """return value associated to `key` in shared data"""
+ return self._repo.get_shared_data(self.sessionid, key, default, pop)
+
+ def set_shared_data(self, key, value, querydata=False):
+ """set value associated to `key` in shared data
+
+ if `querydata` is true, the value will be added to the repository
+ session's query data which are cleared on commit/rollback of the current
+ transaction, and won't be available through the connexion, only on the
+ repository side.
+ """
+ return self._repo.set_shared_data(self.sessionid, key, value, querydata)
+
+ def get_schema(self):
+ """Return the schema currently used by the repository.
+
+ This is NOT part of the DB-API.
+ """
+ if self._closed is not None:
+ raise ProgrammingError('Closed connection')
+ return self._repo.get_schema()
+
+ def load_vobjects(self, cubes=_MARKER, subpath=None, expand=True, force_reload=None):
+ config = self.vreg.config
+ if cubes is _MARKER:
+ cubes = self._repo.get_cubes()
+ elif cubes is None:
+ cubes = ()
+ else:
+ if not isinstance(cubes, (list, tuple)):
+ cubes = (cubes,)
+ if expand:
+ cubes = config.expand_cubes(cubes)
+ if subpath is None:
+ subpath = esubpath = ('entities', 'views')
+ else:
+ esubpath = subpath
+ if 'views' in subpath:
+ esubpath = list(subpath)
+ esubpath.remove('views')
+ esubpath.append('web/views')
+ cubes = reversed([config.cube_dir(p) for p in cubes])
+ vpath = config.build_vregistry_path(cubes, evobjpath=esubpath,
+ tvobjpath=subpath)
+ self.vreg.register_objects(vpath, force_reload)
+ if self._cnxtype == 'inmemory':
+ # should reinit hooks manager as well
+ hm, config = self._repo.hm, self._repo.config
+ hm.set_schema(hm.schema) # reset structure
+ hm.register_system_hooks(config)
+ # application specific hooks
+ if self._repo.config.application_hooks:
+ hm.register_hooks(config.load_hooks(self.vreg))
+
+ def source_defs(self):
+ """Return the definition of sources used by the repository.
+
+ This is NOT part of the DB-API.
+ """
+ if self._closed is not None:
+ raise ProgrammingError('Closed connection')
+ return self._repo.source_defs()
+
+ def user(self, req, props=None):
+ """return the User object associated to this connection"""
+ # cnx validity is checked by the call to .user_info
+ eid, login, groups, properties = self._repo.user_info(self.sessionid, props)
+ if req is None:
+ req = self.request()
+ rset = req.eid_rset(eid, 'EUser')
+ user = self.vreg.etype_class('EUser')(req, rset, row=0, groups=groups,
+ properties=properties)
+ user['login'] = login # cache login
+ return user
+
+ def __del__(self):
+ """close the remote connection if necessary"""
+ if self._closed is None and self._close_on_del:
+ try:
+ self.close()
+ except:
+ pass
+
+ def describe(self, eid):
+ return self._repo.describe(self.sessionid, eid)
+
+ def close(self):
+ """Close the connection now (rather than whenever __del__ is called).
+
+ The connection will be unusable from this point forward; an Error (or
+ subclass) exception will be raised if any operation is attempted with
+ the connection. The same applies to all cursor objects trying to use the
+ connection. Note that closing a connection without committing the
+ changes first will cause an implicit rollback to be performed.
+ """
+ if self._closed:
+ raise ProgrammingError('Connection is already closed')
+ self._repo.close(self.sessionid)
+ self._closed = 1
+
+ def commit(self):
+ """Commit any pending transaction to the database. Note that if the
+ database supports an auto-commit feature, this must be initially off. An
+ interface method may be provided to turn it back on.
+
+ Database modules that do not support transactions should implement this
+ method with void functionality.
+ """
+ if not self._closed is None:
+ raise ProgrammingError('Connection is already closed')
+ self._repo.commit(self.sessionid)
+
+ def rollback(self):
+ """This method is optional since not all databases provide transaction
+ support.
+
+ In case a database does provide transactions this method causes the the
+ database to roll back to the start of any pending transaction. Closing
+ a connection without committing the changes first will cause an implicit
+ rollback to be performed.
+ """
+ if not self._closed is None:
+ raise ProgrammingError('Connection is already closed')
+ self._repo.rollback(self.sessionid)
+
+ def cursor(self, req=None):
+ """Return a new Cursor Object using the connection. If the database
+ does not provide a direct cursor concept, the module will have to
+ emulate cursors using other means to the extent needed by this
+ specification.
+ """
+ if self._closed is not None:
+ raise ProgrammingError('Can\'t get cursor on closed connection')
+ if req is None:
+ req = self.request()
+ return self.cursor_class(self, self._repo, req=req)
+
+
+# cursor object ###############################################################
+
+class Cursor(object):
+ """These objects represent a database cursor, which is used to manage the
+ context of a fetch operation. Cursors created from the same connection are
+ not isolated, i.e., any changes done to the database by a cursor are
+ immediately visible by the other cursors. Cursors created from different
+ connections can or can not be isolated, depending on how the transaction
+ support is implemented (see also the connection's rollback() and commit()
+ methods.)
+ """
+
+ def __init__(self, connection, repo, req=None):
+ """This read-only attribute return a reference to the Connection
+ object on which the cursor was created.
+ """
+ self.connection = connection
+ """optionnal issuing request instance"""
+ self.req = req
+
+ """This read/write attribute specifies the number of rows to fetch at a
+ time with fetchmany(). It defaults to 1 meaning to fetch a single row
+ at a time.
+
+ Implementations must observe this value with respect to the fetchmany()
+ method, but are free to interact with the database a single row at a
+ time. It may also be used in the implementation of executemany().
+ """
+ self.arraysize = 1
+
+ self._repo = repo
+ self._sessid = connection.sessionid
+ self._res = None
+ self._closed = None
+ self._index = 0
+
+
+ def close(self):
+ """Close the cursor now (rather than whenever __del__ is called). The
+ cursor will be unusable from this point forward; an Error (or subclass)
+ exception will be raised if any operation is attempted with the cursor.
+ """
+ self._closed = True
+
+
+ def execute(self, operation, parameters=None, eid_key=None, build_descr=True):
+ """Prepare and execute a database operation (query or command).
+ Parameters may be provided as sequence or mapping and will be bound to
+ variables in the operation. Variables are specified in a
+ database-specific notation (see the module's paramstyle attribute for
+ details).
+
+ A reference to the operation will be retained by the cursor. If the
+ same operation object is passed in again, then the cursor can optimize
+ its behavior. This is most effective for algorithms where the same
+ operation is used, but different parameters are bound to it (many
+ times).
+
+ For maximum efficiency when reusing an operation, it is best to use the
+ setinputsizes() method to specify the parameter types and sizes ahead
+ of time. It is legal for a parameter to not match the predefined
+ information; the implementation should compensate, possibly with a loss
+ of efficiency.
+
+ The parameters may also be specified as list of tuples to e.g. insert
+ multiple rows in a single operation, but this kind of usage is
+ depreciated: executemany() should be used instead.
+
+ Return values are not defined by the DB-API, but this here it returns a
+ ResultSet object.
+ """
+ self._res = res = self._repo.execute(self._sessid, operation,
+ parameters, eid_key, build_descr)
+ self.req.decorate_rset(res)
+ self._index = 0
+ return res
+
+
+ def executemany(self, operation, seq_of_parameters):
+ """Prepare a database operation (query or command) and then execute it
+ against all parameter sequences or mappings found in the sequence
+ seq_of_parameters.
+
+ Modules are free to implement this method using multiple calls to the
+ execute() method or by using array operations to have the database
+ process the sequence as a whole in one call.
+
+ Use of this method for an operation which produces one or more result
+ sets constitutes undefined behavior, and the implementation is
+ permitted (but not required) to raise an exception when it detects that
+ a result set has been created by an invocation of the operation.
+
+ The same comments as for execute() also apply accordingly to this
+ method.
+
+ Return values are not defined.
+ """
+ for parameters in seq_of_parameters:
+ self.execute(operation, parameters)
+ if self._res.rows is not None:
+ self._res = None
+ raise ProgrammingError('Operation returned a result set')
+
+
+ def fetchone(self):
+ """Fetch the next row of a query result set, returning a single
+ sequence, or None when no more data is available.
+
+ An Error (or subclass) exception is raised if the previous call to
+ execute*() did not produce any result set or no call was issued yet.
+ """
+ if self._res is None:
+ raise ProgrammingError('No result set')
+ row = self._res.rows[self._index]
+ self._index += 1
+ return row
+
+
+ def fetchmany(self, size=None):
+ """Fetch the next set of rows of a query result, returning a sequence
+ of sequences (e.g. a list of tuples). An empty sequence is returned
+ when no more rows are available.
+
+ The number of rows to fetch per call is specified by the parameter. If
+ it is not given, the cursor's arraysize determines the number of rows
+ to be fetched. The method should try to fetch as many rows as indicated
+ by the size parameter. If this is not possible due to the specified
+ number of rows not being available, fewer rows may be returned.
+
+ An Error (or subclass) exception is raised if the previous call to
+ execute*() did not produce any result set or no call was issued yet.
+
+ Note there are performance considerations involved with the size
+ parameter. For optimal performance, it is usually best to use the
+ arraysize attribute. If the size parameter is used, then it is best
+ for it to retain the same value from one fetchmany() call to the next.
+ """
+ if self._res is None:
+ raise ProgrammingError('No result set')
+ if size is None:
+ size = self.arraysize
+ rows = self._res.rows[self._index:self._index + size]
+ self._index += size
+ return rows
+
+
+ def fetchall(self):
+ """Fetch all (remaining) rows of a query result, returning them as a
+ sequence of sequences (e.g. a list of tuples). Note that the cursor's
+ arraysize attribute can affect the performance of this operation.
+
+ An Error (or subclass) exception is raised if the previous call to
+ execute*() did not produce any result set or no call was issued yet.
+ """
+ if self._res is None:
+ raise ProgrammingError('No result set')
+ if not self._res.rows:
+ return []
+ rows = self._res.rows[self._index:]
+ self._index = len(self._res)
+ return rows
+
+
+ def setinputsizes(self, sizes):
+ """This can be used before a call to execute*() to predefine memory
+ areas for the operation's parameters.
+
+ sizes is specified as a sequence -- one item for each input parameter.
+ The item should be a Type Object that corresponds to the input that
+ will be used, or it should be an integer specifying the maximum length
+ of a string parameter. If the item is None, then no predefined memory
+ area will be reserved for that column (this is useful to avoid
+ predefined areas for large inputs).
+
+ This method would be used before the execute*() method is invoked.
+
+ Implementations are free to have this method do nothing and users are
+ free to not use it.
+ """
+ pass
+
+
+ def setoutputsize(self, size, column=None):
+ """Set a column buffer size for fetches of large columns (e.g. LONGs,
+ BLOBs, etc.). The column is specified as an index into the result
+ sequence. Not specifying the column will set the default size for all
+ large columns in the cursor.
+
+ This method would be used before the execute*() method is invoked.
+
+ Implementations are free to have this method do nothing and users are
+ free to not use it.
+ """
+ pass
+
+
+class LogCursor(Cursor):
+ """override the standard cursor to log executed queries"""
+
+ def execute(self, operation, parameters=None, eid_key=None, build_descr=True):
+ """override the standard cursor to log executed queries"""
+ tstart, cstart = time(), clock()
+ rset = Cursor.execute(self, operation, parameters, eid_key, build_descr)
+ self.connection.executed_queries.append((operation, parameters,
+ time() - tstart, clock() - cstart))
+ return rset
+
diff -r 000000000000 -r b97547f5f1fa debian.etch/control
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian.etch/control Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,146 @@
+Source: erudi
+Section: web
+Priority: optional
+Maintainer: Logilab Packaging Team
+Uploaders: Sylvain Thenault
+Build-Depends: debhelper (>= 5.0.37.1), python (>=2.4), python-dev (>=2.4), python-central (>= 0.5)
+Standards-Version: 3.7.3
+XS-Python-Version: >= 2.4, << 2.6
+
+Package: erudi
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, erudi-server (= ${source:Version}), erudi-twisted (= ${source:Version}), erudi-client (= ${source:Version}), postgresql-8.1, postgresql-plpython-8.1, postgresql-contrib-8.1
+Description: the full Erudi knowledge management system
+ Erudi is a entities / relations based knowledge management system
+ developed at Logilab.
+ .
+ This package will install all the components you need to run erudi on
+ a single machine. You can also deploy erudi by running the different
+ process on different computers, in which case you need to install the
+ corresponding packages on the different hosts.
+
+
+Package: erudi-server
+Architecture: all
+Conflicts: erudi-server-common, python2.3-erudi-server
+Replaces: erudi-server-common, python2.3-erudi-server
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, erudi-common (= ${source:Version}), erudi-ctl (= ${source:Version}), python-indexer (>= 0.6.1), python-psycopg2
+Recommends: pyro, postgresql-8.1, postgresql-plpython-8.1, postgresql-contrib-8.1, erudi-documentation (= ${source:Version})
+Description: the Erudi repository server
+ Erudi is a entities / relations based knowledge management system
+ developed at Logilab.
+ .
+ This package provides the repository server part of the system.
+ .
+ This package provides the repository server part of the library and
+ necessary shared data files such as the schema library.
+
+
+Package: erudi-twisted
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Provides: erudi-web-frontend
+Depends: ${python:Depends}, erudi-web (= ${source:Version}), erudi-ctl (= ${source:Version}), python-twisted-web2
+Recommends: pyro, erudi-documentation (= ${source:Version})
+Description: twisted interface for Erudi
+ Erudi is a entities / relations bases knowledge management system
+ developed at Logilab.
+ .
+ This package provides a twisted based HTTP server embedding the
+ adaptative web interface to the Erudi repository server.
+ .
+ This package provides only the twisted server part of the library.
+
+
+Package: erudi-web
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, erudi-common (= ${source:Version}), python-simplejson (>= 1.3), python-docutils, python-vobject, python-elementtree
+Recommends: fckeditor
+Description: web interface library for Erudi
+ Erudi is a entities / relations bases knowledge management system
+ developed at Logilab.
+ .
+ This package provides an adaptative web interface to the Erudi server.
+ You'll have to install the erudi-twisted package to serve this interface.
+ .
+ This package provides the web interface part of the library and
+ necessary shared data files such as defaut views, images...
+
+
+Package: erudi-common
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Conflicts: python2.3-erudi-common
+Replaces: python2.3-erudi-common
+Depends: ${python:Depends}, erudi-core (= ${source:Version}), python-logilab-mtconverter (>= 0.4.0), python-simpletal (>= 4.0), graphviz, gettext, python-lxml
+Recommends: python-psyco
+Description: common library for repository/web framework of the Erudi knowledge management
+ Erudi is a entities / relations based knowledge management system
+ developed at Logilab.
+ .
+ This package provides common part of the library used by both repository
+ and web application code.
+
+
+Package: erudi-ctl
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, erudi-core (= ${source:Version})
+Conflicts: erudi-dev (<< ${source:Version})
+Description: all in one control script for the Erudi system
+ Erudi is a entities / relations based knowledge management system
+ developed at Logilab.
+ .
+ This package provides a control script to create, upgrade, start,
+ stop, etc... Erudi application. It also include the init.d script
+ to automatically start and stop Erudi applications on boot or shutdown.
+
+
+Package: erudi-client
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, erudi-ctl (= ${source:Version}), pyro
+Description: a RQL command line client
+ Erudi is a entities / relations based knowledge management system
+ developed at Logilab.
+ .
+ This package provides a RQL command line client using pyro to connect
+ to a repository server.
+
+
+Package: erudi-core
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, python-logilab-common (>= 0.35.3), python-yams (>= 0.20.0), python-rql (>= 0.20.2)
+Description: core library for the Erudi knowledge management framework
+ Erudi is a entities / relations based knowledge management system
+ developed at Logilab.
+ .
+ This package provides core part of the library used by anyone having
+ to do some erudi programming in Python
+
+
+Package: erudi-dev
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, erudi-server (= ${source:Version}), erudi-web (= ${source:Version}), python-pysqlite2
+Suggests: w3c-dtd-xhtml
+Description: tests suite and development tools for Erudi
+ Erudi is a entities / relations bases knowledge management system
+ developed at Logilab.
+ .
+ This package provides the Erudi tests suite and some development tools
+ helping in the creation of application.
+
+
+Package: erudi-documentation
+Architecture: all
+Recommends: doc-base
+Description: documentation for the Erudi knowledge management tool
+ Erudi is a entities / relations based knowledge management system
+ developed at Logilab.
+ .
+ This package provides the system's documentation.
diff -r 000000000000 -r b97547f5f1fa debian/changelog
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/changelog Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,5 @@
+cubicweb (2.99.0-1) unstable; urgency=low
+
+ * initial public release
+
+ -- Nicolas Chauvat Fri, 24 Oct 2008 23:01:21 +0200
diff -r 000000000000 -r b97547f5f1fa debian/compat
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/compat Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+5
diff -r 000000000000 -r b97547f5f1fa debian/control
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/control Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,143 @@
+Source: cubicweb
+Section: web
+Priority: optional
+Maintainer: Logilab Packaging Team
+Uploaders: Sylvain Thenault
+Build-Depends: debhelper (>= 5.0.37.1), python (>=2.4), python-dev (>=2.4), python-central (>= 0.5)
+Standards-Version: 3.7.3
+XS-Python-Version: >= 2.4, << 2.6
+
+Package: cubicweb
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, cubicweb-server (= ${source:Version}), cubicweb-twisted (= ${source:Version}), cubicweb-client (= ${source:Version}), postgresql-8.3, postgresql-plpython-8.3, postgresql-contrib-8.3
+Description: the full CubicWeb knowledge management system
+ CubicWeb is a entities / relations based knowledge management system
+ developed at Logilab.
+ .
+ This package will install all the components you need to run cubicweb on
+ a single machine. You can also deploy cubicweb by running the different
+ process on different computers, in which case you need to install the
+ corresponding packages on the different hosts.
+
+
+Package: cubicweb-server
+Architecture: all
+Conflicts: cubicweb-server-common, python2.3-cubicweb-server
+Replaces: cubicweb-server-common, python2.3-cubicweb-server
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, cubicweb-common (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-indexer (>= 0.6.1), python-psycopg2
+Recommends: pyro, postgresql-8.3, postgresql-plpython-8.3, postgresql-contrib-8.3, cubicweb-documentation (= ${source:Version})
+Description: the CubicWeb repository server
+ CubicWeb is a entities / relations based knowledge management system
+ developed at Logilab.
+ .
+ This package provides the repository server part of the system.
+ .
+ This package provides the repository server part of the library and
+ necessary shared data files such as the schema library.
+
+
+Package: cubicweb-twisted
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Provides: cubicweb-web-frontend
+Depends: ${python:Depends}, cubicweb-web (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-twisted-web2
+Recommends: pyro, cubicweb-documentation (= ${source:Version})
+Description: twisted interface for CubicWeb
+ CubicWeb is a entities / relations bases knowledge management system
+ developed at Logilab.
+ .
+ This package provides a twisted based HTTP server embedding the
+ adaptative web interface to the CubicWeb repository server.
+ .
+ This package provides only the twisted server part of the library.
+
+
+Package: cubicweb-web
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, cubicweb-common (= ${source:Version}), python-simplejson (>= 1.3), python-docutils, python-vobject, python-elementtree
+Recommends: fckeditor
+Description: web interface library for CubicWeb
+ CubicWeb is a entities / relations bases knowledge management system
+ developed at Logilab.
+ .
+ This package provides an adaptative web interface to the CubicWeb server.
+ You'll have to install the cubicweb-twisted package to serve this interface.
+ .
+ This package provides the web interface part of the library and
+ necessary shared data files such as defaut views, images...
+
+
+Package: cubicweb-common
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, cubicweb-core (= ${source:Version}), python-logilab-mtconverter (>= 0.4.0), python-simpletal (>= 4.0), graphviz, gettext, python-lxml
+Recommends: python-psyco
+Description: common library for repository/web framework of the CubicWeb knowledge management
+ CubicWeb is a entities / relations based knowledge management system
+ developed at Logilab.
+ .
+ This package provides common part of the library used by both repository
+ and web application code.
+
+
+Package: cubicweb-ctl
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, cubicweb-core (= ${source:Version})
+Description: all in one control script for the CubicWeb system
+ CubicWeb is a entities / relations based knowledge management system
+ developed at Logilab.
+ .
+ This package provides a control script to create, upgrade, start,
+ stop, etc... CubicWeb application. It also include the init.d script
+ to automatically start and stop CubicWeb applications on boot or shutdown.
+
+
+Package: cubicweb-client
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, cubicweb-ctl (= ${source:Version}), pyro
+Description: a RQL command line client
+ CubicWeb is a entities / relations based knowledge management system
+ developed at Logilab.
+ .
+ This package provides a RQL command line client using pyro to connect
+ to a repository server.
+
+
+Package: cubicweb-core
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, python-logilab-common (>= 0.35.3), python-yams (>= 0.20.0), python-rql (>= 0.20.2)
+Description: core library for the CubicWeb knowledge management framework
+ CubicWeb is a entities / relations based knowledge management system
+ developed at Logilab.
+ .
+ This package provides core part of the library used by anyone having
+ to do some cubicweb programming in Python
+
+
+Package: cubicweb-dev
+Architecture: all
+XB-Python-Version: ${python:Versions}
+Depends: ${python:Depends}, cubicweb-server (= ${source:Version}), cubicweb-web (= ${source:Version}), python-pysqlite2
+Suggests: w3c-dtd-xhtml
+Description: tests suite and development tools for CubicWeb
+ CubicWeb is a entities / relations bases knowledge management system
+ developed at Logilab.
+ .
+ This package provides the CubicWeb tests suite and some development tools
+ helping in the creation of application.
+
+
+Package: cubicweb-documentation
+Architecture: all
+Recommends: doc-base
+Description: documentation for the CubicWeb knowledge management tool
+ CubicWeb is a entities / relations based knowledge management system
+ developed at Logilab.
+ .
+ This package provides the system's documentation.
diff -r 000000000000 -r b97547f5f1fa debian/copyright
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/copyright Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,27 @@
+This package was debianized by Logilab .
+
+
+Upstream Author:
+
+ Logilab
+
+Copyright:
+
+Copyright (c) 2003-2008 LOGILAB S.A. (Paris, FRANCE).
+http://www.logilab.fr/ -- mailto:contact@logilab.fr
+
+This program is free software; you can redistribute it and/or modify it under
+the terms of the GNU Lesser General Public License as published by the Free
+Software Foundation; either version 2 of the License, or (at your option) any
+later version.
+
+This program is distributed in the hope that it will be useful, but WITHOUT
+ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License along with
+this program; if not, write to the Free Software Foundation, Inc.,
+51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
+
+On Debian systems, the complete text of the GNU Lesser General Public License
+may be found in '/usr/share/common-licenses/LGPL'.
diff -r 000000000000 -r b97547f5f1fa debian/cubicweb-client.dirs
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-client.dirs Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+usr/lib/python2.4/site-packages/cubicweb/
diff -r 000000000000 -r b97547f5f1fa debian/cubicweb-common.dirs
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-common.dirs Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,4 @@
+usr/lib/python2.4/site-packages/cubicweb
+usr/lib/python2.4/site-packages/cubicweb/common
+usr/share/cubicweb/cubes/shared
+usr/share/doc/cubicweb-common
diff -r 000000000000 -r b97547f5f1fa debian/cubicweb-common.postinst
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-common.postinst Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,12 @@
+#! /bin/sh -e
+
+if [ "$1" = configure ]; then
+ # XXX bw compat: erudi -> cubicweb migration
+ if [ -e "/usr/share/erudi/templates/" ]; then
+ mv /usr/share/erudi/templates/* /usr/share/cubicweb/cubes/
+ echo 'moved /usr/share/erudi/templates/* to /usr/share/cubicweb/cubes/'
+ fi
+fi
+#DEBHELPER#
+
+exit 0
diff -r 000000000000 -r b97547f5f1fa debian/cubicweb-core.dirs
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-core.dirs Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,2 @@
+usr/lib/python2.4/site-packages/cubicweb
+usr/share/doc/cubicweb-core
diff -r 000000000000 -r b97547f5f1fa debian/cubicweb-ctl.bash_completion
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-ctl.bash_completion Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,116 @@
+# -*- shell-script -*-
+
+_ec_commands()
+{
+ local commands
+ commands="$("$ec" listcommands 2>/dev/null)" || commands=""
+ COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$commands' -- "$cur"))
+}
+
+_ec()
+{
+ local cur prev cmd cmd_index opts i
+ local ec="$1"
+
+ COMPREPLY=()
+ cur="$2"
+ prev="$3"
+
+ # searching for the command
+ # (first non-option argument that doesn't follow a global option that
+ # receives an argument)
+ for ((i=1; $i<=$COMP_CWORD; i++)); do
+ if [[ ${COMP_WORDS[i]} != -* ]]; then
+ cmd="${COMP_WORDS[i]}"
+ cmd_index=$i
+ break
+ fi
+ done
+
+ if [[ "$cur" == -* ]]; then
+ if [ -z "$cmd" ]; then
+ COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '--help' -- "$cur"))
+ else
+ options="$("$ec" listcommands "$cmd" 2>/dev/null)" || commands=""
+ COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$options' -- "$cur"))
+ fi
+ return
+ fi
+
+ if [ -z "$cmd" ] || [ $COMP_CWORD -eq $i ]; then
+ _ec_commands
+ return
+ fi
+
+ # try to generate completion candidates for whatever command the user typed
+ if _ec_command_specific; then
+ return
+ fi
+}
+
+_ec_command_specific()
+{
+ if [ "$(type -t "_ec_cmd_$cmd")" = function ]; then
+ "_ec_cmd_$cmd"
+ return 0
+ fi
+
+ case "$cmd" in
+ client)
+ if [ "$prev" == "-b" ] || [ "$prev" == "--batch" ]; then
+ COMPREPLY=( $( compgen -o filenames -G "$cur*" ) )
+ return
+ fi
+ options="$("$ec" listcommands "$cmd" 2>/dev/null)" || options=""
+ instances="$("$ec" listinstances 2>/dev/null)" || instances=""
+ COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$options $instances' -- "$cur"))
+ ;;
+ db-dump)
+ if [ "$prev" == "-o" ] || [ "$prev" == "--output" ]; then
+ COMPREPLY=( $( compgen -o filenames -G "$cur*" ) )
+ return
+ fi
+ options="$("$ec" listcommands "$cmd" 2>/dev/null)" || options=""
+ instances="$("$ec" listinstances 2>/dev/null)" || instances=""
+ COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$options $instances' -- "$cur"))
+ ;;
+ # commands with template as argument
+ i18nupdate)
+ cubes="$("$ec" listcubes 2>/dev/null)" || cubes=""
+ COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$options $cubes' -- "$cur"))
+ ;;
+ # generic commands with instance as argument
+ start|stop|reload|restart|upgrade|start-repository|db-create|db-init|db-check|db-grant-user)
+ options="$("$ec" listcommands "$cmd" 2>/dev/null)" || options=""
+ instances="$("$ec" listinstances 2>/dev/null)" || instances=""
+ COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$options $instances' -- "$cur"))
+ ;;
+ # generic commands without argument
+ list|newtemplate|i18nlibupdate|live-server)
+ options="$("$ec" listcommands "$cmd" 2>/dev/null)" || options=""
+ COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$options $instances' -- "$cur"))
+ ;;
+ # generic commands without option
+ shell|i18ncompile|delete|status|schema-sync)
+ instances="$("$ec" listinstances 2>/dev/null)" || instances=""
+ COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$options $instances' -- "$cur"))
+ ;;
+ # XXX should do better
+ create)
+ options="$("$ec" listcommands "$cmd" 2>/dev/null)" || options=""
+ COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$options $instances' -- "$cur"))
+ ;;
+ db-copy,db-restore,mboximport)
+ instances="$("$ec" listinstances 2>/dev/null)" || instances=""
+ COMPREPLY=(${COMPREPLY[@]:-} $(compgen -W '$options $instances' -- "$cur"))
+ ;;
+ *)
+ return 1
+ ;;
+ esac
+
+ return 0
+}
+
+complete -o bashdefault -o default -F _ec cubicweb-ctl 2>/dev/null \
+ || complete -o default -F _ec cubicweb-ctl
diff -r 000000000000 -r b97547f5f1fa debian/cubicweb-ctl.cubicweb.init
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-ctl.cubicweb.init Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,15 @@
+#!/bin/sh -e
+
+### BEGIN INIT INFO
+# Provides: cubicweb
+# Required-Start: $syslog $local_fs $network
+# Required-Stop: $syslog $local_fs $network
+# Should-Start: $postgresql $pyro-nsd
+# Should-Stop: $postgresql $pyro-nsd
+# Default-Start: 2 3 4 5
+# Default-Stop: 0 1 6
+# Short-Description: Start cubicweb application at boot time
+### END INIT INFO
+
+cd /tmp
+/usr/bin/cubicweb-ctl $1 --force
diff -r 000000000000 -r b97547f5f1fa debian/cubicweb-ctl.dirs
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-ctl.dirs Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,10 @@
+usr/lib/python2.4/site-packages/cubicweb/
+etc/init.d
+etc/cubicweb.d
+etc/bash_completion.d
+usr/bin
+usr/share/doc/cubicweb-ctl
+var/run/cubicweb
+var/log/cubicweb
+var/lib/cubicweb/backup
+var/lib/cubicweb/instances
diff -r 000000000000 -r b97547f5f1fa debian/cubicweb-ctl.logrotate
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-ctl.logrotate Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,17 @@
+/var/log/cubicweb/*.log {
+ weekly
+ missingok
+ rotate 10
+ compress
+ delaycompress
+ notifempty
+ create 640 root adm
+ sharedscripts
+ postrotate
+ if [ -x /usr/sbin/invoke-rc.d ]; then \
+ invoke-rc.d cubicweb reload > /dev/null; \
+ else \
+ /etc/init.d/cubicweb reload > /dev/null; \
+ fi; \
+ endscript
+}
diff -r 000000000000 -r b97547f5f1fa debian/cubicweb-ctl.manpages
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-ctl.manpages Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+man/cubicweb-ctl.1
diff -r 000000000000 -r b97547f5f1fa debian/cubicweb-ctl.postinst
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-ctl.postinst Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,35 @@
+#! /bin/sh -e
+
+case "$1" in
+ configure|abort-upgrade|abort-remove|abort-deconfigure)
+ update-rc.d cubicweb defaults >/dev/null
+ ;;
+ *)
+ echo "postinst called with unknown argument \`$1'" >&2
+ exit 1
+ ;;
+esac
+
+if [ "$1" = configure ]; then
+ # XXX bw compat: erudi -> cubicweb migration
+ if [ -e "/etc/erudi.d/" ]; then
+ mv /etc/erudi.d/* /etc/cubicweb.d/
+ echo 'moved /etc/erudi.d/* to /etc/cubicweb.d/'
+ fi
+ if [ -e "/var/log/erudi/" ]; then
+ mv /var/log/erudi/* /var/log/cubicweb/
+ echo 'moved /var/log/erudi/* to /var/log/cubicweb/'
+ fi
+ if [ -e "/var/lib/erudi/backup" ]; then
+ mv /var/lib/erudi/backup/* /var/lib/cubicweb/backup/
+ echo 'moved /var/lib/erudi/backup/* to /var/lib/cubicweb/backup/'
+ fi
+ if [ -e "/var/lib/erudi/instances" ]; then
+ mv /var/lib/erudi/instances/* /var/lib/cubicweb/instances/
+ echo 'moved /var/lib/erudi/instances/* to /var/lib/cubicweb/instances/'
+ fi
+fi
+
+#DEBHELPER#
+
+exit 0
diff -r 000000000000 -r b97547f5f1fa debian/cubicweb-ctl.postrm
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-ctl.postrm Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,8 @@
+#!/bin/sh -e
+if [ "$1" = "purge" ] ; then
+ update-rc.d cubicweb remove >/dev/null
+fi
+
+#DEBHELPER#
+
+exit 0
diff -r 000000000000 -r b97547f5f1fa debian/cubicweb-ctl.prerm
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-ctl.prerm Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,14 @@
+#! /bin/sh -e
+
+case "$1" in
+ purge)
+ rm -rf /etc/cubicweb.d/
+ rm -rf /var/run/cubicweb/
+ rm -rf /var/log/cubicweb/
+ rm -rf /var/lib/cubicweb/
+ ;;
+esac
+
+#DEBHELPER#
+
+exit 0
diff -r 000000000000 -r b97547f5f1fa debian/cubicweb-dev.dirs
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-dev.dirs Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,7 @@
+usr/lib/python2.4/site-packages/cubicweb
+usr/lib/python2.4/site-packages/cubicweb/common
+usr/lib/python2.4/site-packages/cubicweb/web
+usr/lib/python2.4/site-packages/cubicweb/server
+usr/lib/python2.4/site-packages/cubicweb/sobjects
+usr/lib/python2.4/site-packages/cubicweb/entities
+usr/share/doc/cubicweb-dev
diff -r 000000000000 -r b97547f5f1fa debian/cubicweb-doc
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-doc Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,9 @@
+Document: cubicweb-doc
+Title: CubicWeb documentation
+Author: Logilab
+Abstract: Some base documentation for CubicWeb users and developpers
+Section: Apps/Programming
+
+Format: HTML
+Index: /usr/share/doc/cubicweb-documentation/index.html
+Files: /usr/share/doc/cubicweb-documentation/*.html
diff -r 000000000000 -r b97547f5f1fa debian/cubicweb-documentation.dirs
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-documentation.dirs Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,3 @@
+usr/share/doc/cubicweb-documentation/
+usr/share/doc/cubicweb-documentation/devmanual_fr
+usr/share/doc-base/
diff -r 000000000000 -r b97547f5f1fa debian/cubicweb-documentation.install
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-documentation.install Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+debian/cubicweb-doc usr/share/doc-base/
diff -r 000000000000 -r b97547f5f1fa debian/cubicweb-documentation.postinst
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-documentation.postinst Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,13 @@
+#! /bin/sh -e
+#
+
+if [ "$1" = configure ]; then
+ if which install-docs >/dev/null 2>&1; then
+ install-docs -i /usr/share/doc-base/cubicweb-doc
+ fi
+fi
+
+
+#DEBHELPER#
+
+exit 0
diff -r 000000000000 -r b97547f5f1fa debian/cubicweb-documentation.prerm
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-documentation.prerm Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,12 @@
+#! /bin/sh -e
+#
+
+if [ "$1" = remove -o "$1" = upgrade ]; then
+ if which install-docs >/dev/null 2>&1; then
+ install-docs -r cubicweb-doc
+ fi
+fi
+
+#DEBHELPER#
+
+exit 0
diff -r 000000000000 -r b97547f5f1fa debian/cubicweb-server.dirs
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-server.dirs Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,3 @@
+usr/lib/python2.4/site-packages/cubicweb/
+usr/share/cubicweb
+usr/share/doc/cubicweb-server
diff -r 000000000000 -r b97547f5f1fa debian/cubicweb-server.postinst
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-server.postinst Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,10 @@
+#! /bin/sh -e
+
+if [ -x "/etc/init.d/cubicweb-ctl" ]; then
+ invoke-rc.d cubicweb-ctl restart || true
+fi
+
+
+#DEBHELPER#
+
+exit 0
diff -r 000000000000 -r b97547f5f1fa debian/cubicweb-server.prerm
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-server.prerm Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,13 @@
+#! /bin/sh -e
+
+case "$1" in
+ remove)
+ if [ -x "/etc/init.d/cubicweb-ctl" ]; then
+ invoke-rc.d cubicweb-ctl stop || true
+ fi
+ ;;
+esac
+
+#DEBHELPER#
+
+exit 0
diff -r 000000000000 -r b97547f5f1fa debian/cubicweb-twisted.dirs
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-twisted.dirs Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,3 @@
+usr/lib/python2.4/site-packages
+usr/lib/python2.4/site-packages/cubicweb
+usr/share/doc/cubicweb-twisted
diff -r 000000000000 -r b97547f5f1fa debian/cubicweb-twisted.postinst
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-twisted.postinst Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,10 @@
+#! /bin/sh -e
+
+if [ -x "/etc/init.d/cubicweb-ctl" ]; then
+ invoke-rc.d cubicweb-ctl restart || true
+fi
+
+
+#DEBHELPER#
+
+exit 0
diff -r 000000000000 -r b97547f5f1fa debian/cubicweb-twisted.prerm
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-twisted.prerm Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,13 @@
+#! /bin/sh -e
+
+case "$1" in
+ remove)
+ if [ -x "/etc/init.d/cubicweb-ctl" ]; then
+ invoke-rc.d cubicweb-ctl stop || true
+ fi
+ ;;
+esac
+
+#DEBHELPER#
+
+exit 0
diff -r 000000000000 -r b97547f5f1fa debian/cubicweb-web.dirs
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-web.dirs Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,4 @@
+usr/lib/python2.4/site-packages/
+usr/lib/python2.4/site-packages/cubicweb
+usr/share/cubicweb/cubes/shared
+usr/share/doc/cubicweb-web
diff -r 000000000000 -r b97547f5f1fa debian/cubicweb-web.postinst
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/cubicweb-web.postinst Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,7 @@
+#! /bin/sh -e
+
+ln -sf /usr/share/fckeditor/fckeditor.js /usr/share/cubicweb/cubes/shared/data
+
+#DEBHELPER#
+
+exit 0
diff -r 000000000000 -r b97547f5f1fa debian/pycompat
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/pycompat Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,1 @@
+2
diff -r 000000000000 -r b97547f5f1fa debian/rules
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/debian/rules Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,112 @@
+#!/usr/bin/make -f
+# Sample debian/rules that uses debhelper.
+# GNU copyright 1997 to 1999 by Joey Hess.
+
+# Uncomment this to turn on verbose mode.
+#export DH_VERBOSE=1
+build: build-stamp
+build-stamp:
+ dh_testdir
+ # XXX doesn't work if logilab-doctools, logilab-xml are not in build depends
+ # and I can't get pbuilder find them in its chroot :(
+ #cd doc && make
+ python setup.py -q build
+ touch build-stamp
+
+clean:
+ dh_testdir
+ dh_testroot
+ rm -f build-stamp configure-stamp
+ rm -rf build
+ rm -rf debian/cubicweb-*/
+ find . -name "*.pyc" | xargs rm -f
+ dh_clean
+
+install: build
+ dh_testdir
+ dh_testroot
+ dh_clean -k
+ dh_installdirs
+ ########## core package #############################################
+ # put :
+ # * all the python library and data in cubicweb-core
+ # * scripts in cubicweb-server
+ #
+ # pick from each latter to construct each package
+ python setup.py -q install_lib --no-compile --install-dir=debian/cubicweb-core/usr/lib/python2.4/site-packages/
+ python setup.py -q install_data --install-dir=debian/cubicweb-core/usr/
+ python setup.py -q install_scripts --install-dir=debian/cubicweb-server/usr/bin/
+ ########## common package #############################################
+ mv debian/cubicweb-core/usr/lib/python2.4/site-packages/cubicweb/common/ debian/cubicweb-common/usr/lib/python2.4/site-packages/cubicweb
+ mv debian/cubicweb-core/usr/lib/python2.4/site-packages/cubicweb/entities/ debian/cubicweb-common/usr/lib/python2.4/site-packages/cubicweb
+ # data
+ mv debian/cubicweb-core/usr/share/cubicweb/cubes/shared/i18n debian/cubicweb-common/usr/share/cubicweb/cubes/shared/
+ touch debian/cubicweb-common/usr/share/cubicweb/cubes/__init__.py
+ ########## server package #############################################
+ # library
+ mv debian/cubicweb-core/usr/lib/python2.4/site-packages/cubicweb/server/ debian/cubicweb-server/usr/lib/python2.4/site-packages/cubicweb
+ mv debian/cubicweb-core/usr/lib/python2.4/site-packages/cubicweb/sobjects/ debian/cubicweb-server/usr/lib/python2.4/site-packages/cubicweb
+ # data
+ mv debian/cubicweb-core/usr/share/cubicweb/schemas/ debian/cubicweb-server/usr/share/cubicweb/
+ mv debian/cubicweb-core/usr/share/cubicweb/migration/ debian/cubicweb-server/usr/share/cubicweb/
+ ########## twisted package ############################################
+ # library
+ mv debian/cubicweb-core/usr/lib/python2.4/site-packages/cubicweb/etwist/ debian/cubicweb-twisted/usr/lib/python2.4/site-packages/cubicweb/
+ ########## web package ################################################
+ # library
+ mv debian/cubicweb-core/usr/lib/python2.4/site-packages/cubicweb/web/ debian/cubicweb-web/usr/lib/python2.4/site-packages/cubicweb/
+ # data / web documentation
+ mv debian/cubicweb-core/usr/share/cubicweb/cubes/shared/data debian/cubicweb-web/usr/share/cubicweb/cubes/shared/
+ mv debian/cubicweb-core/usr/share/cubicweb/cubes/shared/wdoc debian/cubicweb-web/usr/share/cubicweb/cubes/shared/
+ ########## ctl package ################################################
+ # scripts
+ mv debian/cubicweb-server/usr/bin/cubicweb-ctl debian/cubicweb-ctl/usr/bin/
+ mv debian/cubicweb-core/usr/lib/python2.4/site-packages/cubicweb/cwctl.py debian/cubicweb-ctl/usr/lib/python2.4/site-packages/cubicweb
+ mv debian/cubicweb-ctl.bash_completion debian/cubicweb-ctl/etc/bash_completion.d/cubicweb-ctl
+ ########## client package #############################################
+ # library
+ mv debian/cubicweb-core/usr/lib/python2.4/site-packages/cubicweb/hercule.py debian/cubicweb-client/usr/lib/python2.4/site-packages/cubicweb
+ ########## dev package ################################################
+ # devtools package
+ mv debian/cubicweb-core/usr/lib/python2.4/site-packages/cubicweb/devtools/ debian/cubicweb-dev/usr/lib/python2.4/site-packages/cubicweb/
+ # tests directories
+ mv debian/cubicweb-core/usr/lib/python2.4/site-packages/cubicweb/test debian/cubicweb-dev/usr/lib/python2.4/site-packages/cubicweb/
+ mv debian/cubicweb-common/usr/lib/python2.4/site-packages/cubicweb/common/test debian/cubicweb-dev/usr/lib/python2.4/site-packages/cubicweb/common/
+ mv debian/cubicweb-server/usr/lib/python2.4/site-packages/cubicweb/server/test debian/cubicweb-dev/usr/lib/python2.4/site-packages/cubicweb/server/
+ mv debian/cubicweb-server/usr/lib/python2.4/site-packages/cubicweb/sobjects/test debian/cubicweb-dev/usr/lib/python2.4/site-packages/cubicweb/sobjects/
+ mv debian/cubicweb-web/usr/lib/python2.4/site-packages/cubicweb/web/test debian/cubicweb-dev/usr/lib/python2.4/site-packages/cubicweb/web/
+ ########## documentation package ######################################
+ cp doc/*.html doc/*.txt debian/cubicweb-documentation/usr/share/doc/cubicweb-documentation/
+ cp doc/devmanual_fr/*.html doc/devmanual_fr/*.txt doc/devmanual_fr/*.png debian/cubicweb-documentation/usr/share/doc/cubicweb-documentation/devmanual_fr
+ ########## core package ###############################################
+ # small cleanup
+ rm -rf debian/cubicweb-core/usr/share/cubicweb/
+ # undistributed for now
+ rm -rf debian/cubicweb-core/usr/lib/python2.4/site-packages/cubicweb/goa
+ rm -rf debian/cubicweb-core/usr/lib/python2.4/site-packages/cubicweb/wsgi
+
+# Build architecture-independent files here.
+binary-indep: build install
+ dh_testdir
+ dh_testroot -i
+ dh_install -i
+ dh_pycentral -i
+ dh_python -i
+ dh_installinit -i -n --name cubicweb -u"defaults 99"
+ dh_installlogrotate -i
+ dh_installdocs -i -A README
+ dh_installman -i
+ dh_installchangelogs -i
+ dh_link -i
+ dh_compress -i -X.py -X.ini -X.xml
+ dh_fixperms -i
+ dh_installdeb -i
+ dh_gencontrol -i
+ dh_md5sums -i
+ dh_builddeb -i
+
+binary-arch:
+
+binary: binary-indep
+.PHONY: build clean binary binary-indep binary-arch
+
diff -r 000000000000 -r b97547f5f1fa devtools/__init__.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/__init__.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,345 @@
+"""Test tools for cubicweb
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import os
+import logging
+from os.path import (abspath, join, exists, basename, dirname, normpath, split,
+ isfile, isabs)
+
+from mx.DateTime import strptime, DateTimeDelta
+
+from cubicweb import CW_SOFTWARE_ROOT, ConfigurationError
+from cubicweb.toolsutils import read_config
+from cubicweb.cwconfig import CubicWebConfiguration, merge_options
+from cubicweb.server.serverconfig import ServerConfiguration
+from cubicweb.etwist.twconfig import TwistedConfiguration
+
+# validators are used to validate (XML, DTD, whatever) view's content
+# validators availables are :
+# 'dtd' : validates XML + declared DTD
+# 'xml' : guarantees XML is well formed
+# None : do not try to validate anything
+VIEW_VALIDATORS = {}
+BASE_URL = 'http://testing.fr/cubicweb/'
+DEFAULT_SOURCES = {'system': {'adapter' : 'native',
+ 'db-encoding' : 'UTF-8', #'ISO-8859-1',
+ 'db-user' : u'admin',
+ 'db-password' : 'gingkow',
+ 'db-name' : 'tmpdb',
+ 'db-driver' : 'sqlite',
+ 'db-host' : None,
+ },
+ 'admin' : {'login': u'admin',
+ 'password': u'gingkow',
+ },
+ }
+
+class TestServerConfiguration(ServerConfiguration):
+ mode = 'test'
+ set_language = False
+ read_application_schema = False
+ bootstrap_schema = False
+ init_repository = True
+ options = merge_options(ServerConfiguration.options + (
+ ('anonymous-user',
+ {'type' : 'string',
+ 'default': None,
+ 'help': 'login of the CubicWeb user account to use for anonymous user (if you want to allow anonymous)',
+ 'group': 'main', 'inputlevel': 1,
+ }),
+ ('anonymous-password',
+ {'type' : 'string',
+ 'default': None,
+ 'help': 'password of the CubicWeb user account matching login',
+ 'group': 'main', 'inputlevel': 1,
+ }),
+ ))
+
+ if not os.environ.get('APYCOT_ROOT'):
+ REGISTRY_DIR = normpath(join(CW_SOFTWARE_ROOT, '../cubes'))
+
+ def __init__(self, appid, log_threshold=logging.CRITICAL+10):
+ ServerConfiguration.__init__(self, appid)
+ self.global_set_option('log-file', None)
+ self.init_log(log_threshold, force=True)
+ # need this, usually triggered by cubicweb-ctl
+ self.load_cwctl_plugins()
+
+ anonymous_user = TwistedConfiguration.anonymous_user.im_func
+
+ @property
+ def apphome(self):
+ if exists(self.appid):
+ return abspath(self.appid)
+ # application cube test
+ return abspath('..')
+ appdatahome = apphome
+
+ def main_config_file(self):
+ """return application's control configuration file"""
+ return join(self.apphome, '%s.conf' % self.name)
+
+ def instance_md5_version(self):
+ return ''
+
+ def bootstrap_cubes(self):
+ try:
+ super(TestServerConfiguration, self).bootstrap_cubes()
+ except IOError:
+ # no cubes
+ self.init_cubes( () )
+
+ sourcefile = None
+ def sources_file(self):
+ """define in subclasses self.sourcefile if necessary"""
+ if self.sourcefile:
+ print 'Reading sources from', self.sourcefile
+ sourcefile = self.sourcefile
+ if not isabs(sourcefile):
+ sourcefile = join(self.apphome, sourcefile)
+ else:
+ sourcefile = super(TestServerConfiguration, self).sources_file()
+ return sourcefile
+
+ def sources(self):
+ """By default, we run tests with the sqlite DB backend. One may use its
+ own configuration by just creating a 'sources' file in the test
+ directory from wich tests are launched or by specifying an alternative
+ sources file using self.sourcefile.
+ """
+ sources = super(TestServerConfiguration, self).sources()
+ if not sources:
+ sources = DEFAULT_SOURCES
+ return sources
+
+ def load_defaults(self):
+ super(TestServerConfiguration, self).load_defaults()
+ # note: don't call global set option here, OptionManager may not yet be initialized
+ # add anonymous user
+ self.set_option('anonymous-user', 'anon')
+ self.set_option('anonymous-password', 'anon')
+ # uncomment the line below if you want rql queries to be logged
+ #self.set_option('query-log-file', '/tmp/test_rql_log.' + `os.getpid()`)
+ self.set_option('sender-name', 'cubicweb-test')
+ self.set_option('sender-addr', 'cubicweb-test@logilab.fr')
+ try:
+ send_to = '%s@logilab.fr' % os.getlogin()
+ except OSError:
+ send_to = '%s@logilab.fr' % (os.environ.get('USER')
+ or os.environ.get('USERNAME')
+ or os.environ.get('LOGNAME'))
+ self.set_option('sender-addr', send_to)
+ self.set_option('default-dest-addrs', send_to)
+ self.set_option('base-url', BASE_URL)
+
+
+class BaseApptestConfiguration(TestServerConfiguration, TwistedConfiguration):
+ repo_method = 'inmemory'
+ options = merge_options(TestServerConfiguration.options + TwistedConfiguration.options)
+ cubicweb_vobject_path = TestServerConfiguration.cubicweb_vobject_path | TwistedConfiguration.cubicweb_vobject_path
+ cube_vobject_path = TestServerConfiguration.cube_vobject_path | TwistedConfiguration.cube_vobject_path
+
+ def available_languages(self, *args):
+ return ('en', 'fr', 'de')
+
+ def ext_resources_file(self):
+ """return application's external resources file"""
+ return join(self.apphome, 'data', 'external_resources')
+
+ def pyro_enabled(self):
+ # but export PYRO_MULTITHREAD=0 or you get problems with sqlite and threads
+ return True
+
+
+class ApptestConfiguration(BaseApptestConfiguration):
+
+ def __init__(self, appid, log_threshold=logging.CRITICAL, sourcefile=None):
+ BaseApptestConfiguration.__init__(self, appid, log_threshold=log_threshold)
+ self.init_repository = sourcefile is None
+ self.sourcefile = sourcefile
+ import re
+ self.global_set_option('embed-allowed', re.compile('.*'))
+
+
+class RealDatabaseConfiguration(ApptestConfiguration):
+ init_repository = False
+ sourcesdef = {'system': {'adapter' : 'native',
+ 'db-encoding' : 'UTF-8', #'ISO-8859-1',
+ 'db-user' : u'admin',
+ 'db-password' : 'gingkow',
+ 'db-name' : 'seotest',
+ 'db-driver' : 'postgres',
+ 'db-host' : None,
+ },
+ 'admin' : {'login': u'admin',
+ 'password': u'gingkow',
+ },
+ }
+
+ def __init__(self, appid, log_threshold=logging.CRITICAL, sourcefile=None):
+ ApptestConfiguration.__init__(self, appid)
+ self.init_repository = False
+
+
+ def sources(self):
+ """
+ By default, we run tests with the sqlite DB backend.
+ One may use its own configuration by just creating a
+ 'sources' file in the test directory from wich tests are
+ launched.
+ """
+ self._sources = self.sourcesdef
+ return self._sources
+
+
+def buildconfig(dbuser, dbpassword, dbname, adminuser, adminpassword, dbhost=None):
+ """convenience function that builds a real-db configuration class"""
+ sourcesdef = {'system': {'adapter' : 'native',
+ 'db-encoding' : 'UTF-8', #'ISO-8859-1',
+ 'db-user' : dbuser,
+ 'db-password' : dbpassword,
+ 'db-name' : dbname,
+ 'db-driver' : 'postgres',
+ 'db-host' : dbhost,
+ },
+ 'admin' : {'login': adminuser,
+ 'password': adminpassword,
+ },
+ }
+ return type('MyRealDBConfig', (RealDatabaseConfiguration,),
+ {'sourcesdef': sourcesdef})
+
+def loadconfig(filename):
+ """convenience function that builds a real-db configuration class
+ from a file
+ """
+ return type('MyRealDBConfig', (RealDatabaseConfiguration,),
+ {'sourcesdef': read_config(filename)})
+
+
+class LivetestConfiguration(BaseApptestConfiguration):
+ init_repository = False
+
+ def __init__(self, cube=None, sourcefile=None, pyro_name=None,
+ log_threshold=logging.CRITICAL):
+ TestServerConfiguration.__init__(self, cube, log_threshold=log_threshold)
+ self.appid = pyro_name or cube
+ # don't change this, else some symlink problems may arise in some
+ # environment (e.g. mine (syt) ;o)
+ # XXX I'm afraid this test will prevent to run test from a production
+ # environment
+ self._sources = None
+ # application cube test
+ if cube is not None:
+ self.apphome = self.cube_dir(cube)
+ elif 'web' in os.getcwd().split(os.sep):
+ # web test
+ self.apphome = join(normpath(join(dirname(__file__), '..')), 'web')
+ else:
+ # application cube test
+ self.apphome = abspath('..')
+ self.sourcefile = sourcefile
+ self.global_set_option('realm', '')
+ self.use_pyro = pyro_name is not None
+
+ def pyro_enabled(self):
+ if self.use_pyro:
+ return True
+ else:
+ return False
+
+CubicWebConfiguration.cls_adjust_sys_path()
+
+def install_sqlite_path(querier):
+ """This patch hotfixes the following sqlite bug :
+ - http://www.sqlite.org/cvstrac/tktview?tn=1327,33
+ (some dates are returned as strings rather thant date objects)
+ """
+ def wrap_execute(base_execute):
+ def new_execute(*args, **kwargs):
+ rset = base_execute(*args, **kwargs)
+ if rset.description:
+ found_date = False
+ for row, rowdesc in zip(rset, rset.description):
+ for cellindex, (value, vtype) in enumerate(zip(row, rowdesc)):
+ if vtype in ('Date', 'Datetime') and type(value) is unicode:
+ found_date = True
+ try:
+ row[cellindex] = strptime(value, '%Y-%m-%d %H:%M:%S')
+ except:
+ row[cellindex] = strptime(value, '%Y-%m-%d')
+ if vtype == 'Time' and type(value) is unicode:
+ found_date = True
+ try:
+ row[cellindex] = strptime(value, '%H:%M:%S')
+ except:
+ # DateTime used as Time?
+ row[cellindex] = strptime(value, '%Y-%m-%d %H:%M:%S')
+ if vtype == 'Interval' and type(value) is int:
+ found_date = True
+ row[cellindex] = DateTimeDelta(0, 0, 0, value)
+ if not found_date:
+ break
+ return rset
+ return new_execute
+ querier.__class__.execute = wrap_execute(querier.__class__.execute)
+
+
+def init_test_database(driver='sqlite', configdir='data', config=None,
+ vreg=None):
+ """init a test database for a specific driver"""
+ from cubicweb.dbapi import in_memory_cnx
+ if vreg and not config:
+ config = vreg.config
+ config = config or TestServerConfiguration(configdir)
+ source = config.sources()
+ if driver == 'sqlite':
+ init_test_database_sqlite(config, source)
+ elif driver == 'postgres':
+ init_test_database_postgres(config, source)
+ else:
+ raise ValueError('no initialization function for driver %r' % driver)
+ config._cubes = None # avoid assertion error
+ repo, cnx = in_memory_cnx(vreg or config, unicode(source['admin']['login']),
+ source['admin']['password'] or 'xxx')
+ if driver == 'sqlite':
+ install_sqlite_path(repo.querier)
+ return repo, cnx
+
+def init_test_database_postgres(config, source, vreg=None):
+ """initialize a fresh sqlite databse used for testing purpose"""
+ if config.init_repository:
+ from cubicweb.server import init_repository
+ init_repository(config, interactive=False, drop=True, vreg=vreg)
+
+def cleanup_sqlite(dbfile, removecube=False):
+ try:
+ os.remove(dbfile)
+ os.remove('%s-journal' % dbfile)
+ except OSError:
+ pass
+ if removecube:
+ try:
+ os.remove('%s-cube' % dbfile)
+ except OSError:
+ pass
+
+def init_test_database_sqlite(config, source, vreg=None):
+ """initialize a fresh sqlite databse used for testing purpose"""
+ import shutil
+ # remove database file if it exists (actually I know driver == 'sqlite' :)
+ dbfile = source['system']['db-name']
+ cleanup_sqlite(dbfile)
+ cube = '%s-cube' % dbfile
+ if exists(cube):
+ shutil.copy(cube, dbfile)
+ else:
+ # initialize the database
+ from cubicweb.server import init_repository
+ init_repository(config, interactive=False, vreg=vreg)
+ shutil.copy(dbfile, cube)
diff -r 000000000000 -r b97547f5f1fa devtools/_apptest.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/_apptest.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,257 @@
+"""Hidden internals for the devtools.apptest module
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import sys, traceback
+
+from logilab.common.pytest import pause_tracing, resume_tracing
+
+import yams.schema
+
+from cubicweb.dbapi import repo_connect, ConnectionProperties, ProgrammingError
+from cubicweb.cwvreg import CubicWebRegistry
+
+from cubicweb.web.application import CubicWebPublisher
+from cubicweb.web import Redirect
+
+from cubicweb.devtools import ApptestConfiguration, init_test_database
+from cubicweb.devtools.fake import FakeRequest
+
+SYSTEM_ENTITIES = ('EGroup', 'EUser',
+ 'EFRDef', 'ENFRDef',
+ 'EConstraint', 'EConstraintType', 'EProperty',
+ 'EEType', 'ERType',
+ 'State', 'Transition', 'TrInfo',
+ 'RQLExpression',
+ )
+SYSTEM_RELATIONS = (
+ # virtual relation
+ 'identity',
+ # metadata
+ 'is', 'is_instance_of', 'owned_by', 'created_by', 'specializes',
+ # workflow related
+ 'state_of', 'transition_of', 'initial_state', 'allowed_transition',
+ 'destination_state', 'in_state', 'wf_info_for', 'from_state', 'to_state',
+ 'condition',
+ # permission
+ 'in_group', 'require_group', 'require_permission',
+ 'read_permission', 'update_permission', 'delete_permission', 'add_permission',
+ # eproperty
+ 'for_user',
+ # schema definition
+ 'relation_type', 'from_entity', 'to_entity',
+ 'constrained_by', 'cstrtype', 'widget',
+ # deducted from other relations
+ 'primary_email',
+ )
+
+def unprotected_entities(app_schema, strict=False):
+ """returned a Set of each non final entity type, excluding EGroup, and EUser...
+ """
+ if strict:
+ protected_entities = yams.schema.BASE_TYPES
+ else:
+ protected_entities = yams.schema.BASE_TYPES.union(set(SYSTEM_ENTITIES))
+ entities = set(app_schema.entities())
+ return entities - protected_entities
+
+
+def ignore_relations(*relations):
+ SYSTEM_RELATIONS += relations
+
+class TestEnvironment(object):
+ """TestEnvironment defines a context (e.g. a config + a given connection) in
+ which the tests are executed
+ """
+
+ def __init__(self, appid, reporter=None, verbose=False,
+ configcls=ApptestConfiguration, requestcls=FakeRequest):
+ config = configcls(appid)
+ self.requestcls = requestcls
+ self.cnx = None
+ config.db_perms = False
+ source = config.sources()['system']
+ if verbose:
+ print "init test database ..."
+ self.vreg = vreg = CubicWebRegistry(config)
+ self.admlogin = source['db-user']
+ # restore database <=> init database
+ self.restore_database()
+ if verbose:
+ print "init done"
+ login = source['db-user']
+ config.repository = lambda x=None: self.repo
+ self.app = CubicWebPublisher(config, vreg=vreg)
+ self.verbose = verbose
+ schema = self.vreg.schema
+ # else we may run into problems since email address are ususally share in app tests
+ # XXX should not be necessary anymore
+ schema.rschema('primary_email').set_rproperty('EUser', 'EmailAddress', 'composite', False)
+ self.deletable_entities = unprotected_entities(schema)
+
+ def restore_database(self):
+ """called by unittests' tearDown to restore the original database
+ """
+ try:
+ pause_tracing()
+ if self.cnx:
+ self.cnx.close()
+ source = self.vreg.config.sources()['system']
+ self.repo, self.cnx = init_test_database(driver=source['db-driver'],
+ vreg=self.vreg)
+ self._orig_cnx = self.cnx
+ resume_tracing()
+ except:
+ resume_tracing()
+ traceback.print_exc()
+ sys.exit(1)
+ # XXX cnx decoration is usually done by the repository authentication manager,
+ # necessary in authentication tests
+ self.cnx.vreg = self.vreg
+ self.cnx.login = source['db-user']
+ self.cnx.password = source['db-password']
+
+
+ def create_user(self, login, groups=('users',), req=None):
+ req = req or self.create_request()
+ cursor = self._orig_cnx.cursor(req)
+ rset = cursor.execute('INSERT EUser X: X login %(login)s, X upassword %(passwd)s,'
+ 'X in_state S WHERE S name "activated"',
+ {'login': unicode(login), 'passwd': login.encode('utf8')})
+ user = rset.get_entity(0, 0)
+ cursor.execute('SET X in_group G WHERE X eid %%(x)s, G name IN(%s)'
+ % ','.join(repr(g) for g in groups),
+ {'x': user.eid}, 'x')
+ user.clear_related_cache('in_group', 'subject')
+ self._orig_cnx.commit()
+ return user
+
+ def login(self, login):
+ if login == self.admlogin:
+ self.restore_connection()
+ else:
+ self.cnx = repo_connect(self.repo, unicode(login), str(login),
+ ConnectionProperties('inmemory'))
+ if login == self.vreg.config.anonymous_user()[0]:
+ self.cnx.anonymous_connection = True
+ return self.cnx
+
+ def restore_connection(self):
+ if not self.cnx is self._orig_cnx:
+ try:
+ self.cnx.close()
+ except ProgrammingError:
+ pass # already closed
+ self.cnx = self._orig_cnx
+
+ ############################################################################
+
+ def execute(self, rql, args=None, eidkey=None, req=None):
+ """executes , builds a resultset, and returns a couple (rset, req)
+ where req is a FakeRequest
+ """
+ req = req or self.create_request(rql=rql)
+ return self.cnx.cursor(req).execute(unicode(rql), args, eidkey)
+
+ def create_request(self, rql=None, **kwargs):
+ """executes , builds a resultset, and returns a
+ couple (rset, req) where req is a FakeRequest
+ """
+ if rql:
+ kwargs['rql'] = rql
+ req = self.requestcls(self.vreg, form=kwargs)
+ req.set_connection(self.cnx)
+ return req
+
+ def get_rset_and_req(self, rql, optional_args=None, args=None, eidkey=None):
+ """executes , builds a resultset, and returns a
+ couple (rset, req) where req is a FakeRequest
+ """
+ return (self.execute(rql, args, eidkey),
+ self.create_request(rql=rql, **optional_args or {}))
+
+ def check_view(self, rql, vid, optional_args, template='main'):
+ """checks if vreg.view() raises an exception in this environment
+
+ If any exception is raised in this method, it will be considered
+ as a TestFailure
+ """
+ return self.call_view(vid, rql,
+ template=template, optional_args=optional_args)
+
+ def call_view(self, vid, rql, template='main', optional_args=None):
+ """shortcut for self.vreg.view()"""
+ assert template
+ if optional_args is None:
+ optional_args = {}
+ optional_args['vid'] = vid
+ req = self.create_request(rql=rql, **optional_args)
+ return self.vreg.main_template(req, template)
+
+ def call_edit(self, req):
+ """shortcut for self.app.edit()"""
+ controller = self.app.select_controller('edit', req)
+ try:
+ controller.publish()
+ except Redirect:
+ result = 'success'
+ else:
+ raise Exception('edit should raise Redirect on success')
+ req.cnx.commit()
+ return result
+
+ def iter_possible_views(self, req, rset):
+ """returns a list of possible vids for """
+ for view in self.vreg.possible_views(req, rset):
+ if view.category == 'startupview':
+ continue
+ yield view.id
+ if rset.rowcount == 1:
+ yield 'edition'
+
+ def iter_startup_views(self, req):
+ """returns the list of startup views"""
+ for view in self.vreg.possible_views(req, None):
+ if view.category != 'startupview':
+ continue
+ yield view.id
+
+ def iter_possible_actions(self, req, rset):
+ """returns a list of possible vids for """
+ for action in self.vreg.possible_vobjects('actions', req, rset):
+ yield action
+
+class ExistingTestEnvironment(TestEnvironment):
+
+ def __init__(self, appid, sourcefile, verbose=False):
+ config = ApptestConfiguration(appid, sourcefile=sourcefile)
+ if verbose:
+ print "init test database ..."
+ source = config.sources()['system']
+ self.vreg = CubicWebRegistry(config)
+ repo, self.cnx = init_test_database(driver=source['db-driver'],
+ vreg=self.vreg)
+ if verbose:
+ print "init done"
+ self.app = CubicWebPublisher(config, vreg=self.vreg)
+ self.verbose = verbose
+ # this is done when the publisher is opening a connection
+ self.cnx.vreg = self.vreg
+ login = source['db-user']
+
+ def setup(self, config=None):
+ """config is passed by TestSuite but is ignored in this environment"""
+ cursor = self.cnx.cursor()
+ self.last_eid = cursor.execute('Any X WHERE X creation_date D ORDERBY D DESC LIMIT 1').rows[0][0]
+
+ def cleanup(self):
+ """cancel inserted elements during tests"""
+ cursor = self.cnx.cursor()
+ cursor.execute('DELETE Any X WHERE X eid > %(x)s', {'x' : self.last_eid}, eid_key='x')
+ print "cleaning done"
+ self.cnx.commit()
+
diff -r 000000000000 -r b97547f5f1fa devtools/apptest.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/apptest.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,504 @@
+"""This module provides misc utilities to test applications
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from copy import deepcopy
+
+import simplejson
+
+from logilab.common.testlib import TestCase
+from logilab.common.pytest import nocoverage
+from logilab.common.umessage import message_from_string
+
+from cubicweb.devtools import init_test_database, TestServerConfiguration, ApptestConfiguration
+from cubicweb.devtools._apptest import TestEnvironment
+from cubicweb.devtools.fake import FakeRequest
+
+from cubicweb.dbapi import repo_connect, ConnectionProperties, ProgrammingError
+
+
+MAILBOX = []
+class Email:
+ def __init__(self, recipients, msg):
+ self.recipients = recipients
+ self.msg = msg
+
+ @property
+ def message(self):
+ return message_from_string(self.msg)
+
+ def __repr__(self):
+ return '' % (','.join(self.recipients),
+ self.message.get('Subject'))
+
+class MockSMTP:
+ def __init__(self, server, port):
+ pass
+ def close(self):
+ pass
+ def sendmail(self, helo_addr, recipients, msg):
+ MAILBOX.append(Email(recipients, msg))
+
+from cubicweb.server import hookhelper
+hookhelper.SMTP = MockSMTP
+
+
+def get_versions(self, checkversions=False):
+ """return the a dictionary containing cubes used by this application
+ as key with their version as value, including cubicweb version. This is a
+ public method, not requiring a session id.
+
+ replace Repository.get_versions by this method if you don't want versions
+ checking
+ """
+ vcconf = {'cubicweb': self.config.cubicweb_version()}
+ self.config.bootstrap_cubes()
+ for pk in self.config.cubes():
+ version = self.config.template_version(pk)
+ vcconf[pk] = version
+ self.config._cubes = None
+ return vcconf
+
+
+@property
+def late_binding_env(self):
+ """builds TestEnvironment as late as possible"""
+ if not hasattr(self, '_env'):
+ self.__class__._env = TestEnvironment('data', configcls=self.configcls,
+ requestcls=self.requestcls)
+ return self._env
+
+
+class autoenv(type):
+ """automatically set environment on EnvBasedTC subclasses if necessary
+ """
+ def __new__(mcs, name, bases, classdict):
+ env = classdict.get('env')
+ # try to find env in one of the base classes
+ if env is None:
+ for base in bases:
+ env = getattr(base, 'env', None)
+ if env is not None:
+ classdict['env'] = env
+ break
+ if not classdict.get('__abstract__') and not classdict.get('env'):
+ classdict['env'] = late_binding_env
+ return super(autoenv, mcs).__new__(mcs, name, bases, classdict)
+
+
+class EnvBasedTC(TestCase):
+ """abstract class for test using an apptest environment
+ """
+ __metaclass__ = autoenv
+ __abstract__ = True
+ env = None
+ configcls = ApptestConfiguration
+ requestcls = FakeRequest
+
+ # user / session management ###############################################
+
+ def user(self, req=None):
+ if req is None:
+ req = self.env.create_request()
+ return self.env.cnx.user(req)
+ else:
+ return req.user
+
+ def create_user(self, *args, **kwargs):
+ return self.env.create_user(*args, **kwargs)
+
+ def login(self, login):
+ return self.env.login(login)
+
+ def restore_connection(self):
+ self.env.restore_connection()
+
+ # db api ##################################################################
+
+ @nocoverage
+ def cursor(self, req=None):
+ return self.env.cnx.cursor(req or self.request())
+
+ @nocoverage
+ def execute(self, *args, **kwargs):
+ return self.env.execute(*args, **kwargs)
+
+ @nocoverage
+ def commit(self):
+ self.env.cnx.commit()
+
+ @nocoverage
+ def rollback(self):
+ try:
+ self.env.cnx.rollback()
+ except ProgrammingError:
+ pass
+
+ # other utilities #########################################################
+ def set_debug(self, debugmode):
+ from cubicweb.server import set_debug
+ set_debug(debugmode)
+
+ @property
+ def config(self):
+ return self.vreg.config
+
+ def session(self):
+ """return current server side session (using default manager account)"""
+ return self.env.repo._sessions[self.env.cnx.sessionid]
+
+ def request(self, *args, **kwargs):
+ """return a web interface request"""
+ return self.env.create_request(*args, **kwargs)
+
+ @nocoverage
+ def rset_and_req(self, *args, **kwargs):
+ return self.env.get_rset_and_req(*args, **kwargs)
+
+ def entity(self, rql, args=None, eidkey=None, req=None):
+ return self.execute(rql, args, eidkey, req=req).get_entity(0, 0)
+
+ def etype_instance(self, etype, req=None):
+ req = req or self.request()
+ e = self.env.vreg.etype_class(etype)(req, None, None)
+ e.eid = None
+ return e
+
+ def add_entity(self, etype, **kwargs):
+ rql = ['INSERT %s X' % etype]
+
+ # dict for replacement in RQL Request
+ rql_args = {}
+
+ if kwargs: #
+ rql.append(':')
+ # dict to define new entities variables
+ entities = {}
+
+ # assignement part of the request
+ sub_rql = []
+ for key, value in kwargs.iteritems():
+ # entities
+ if hasattr(value, 'eid'):
+ new_value = "%s__" % key.upper()
+
+ entities[new_value] = value.eid
+ rql_args[new_value] = value.eid
+
+ sub_rql.append("X %s %s" % (key, new_value))
+ # final attributes
+ else:
+ sub_rql.append('X %s %%(%s)s' % (key, key))
+ rql_args[key] = value
+ rql.append(', '.join(sub_rql))
+
+
+ if entities:
+ rql.append('WHERE')
+ # WHERE part of the request (to link entity to they eid)
+ sub_rql = []
+ for key, value in entities.iteritems():
+ sub_rql.append("%s eid %%(%s)s" % (key, key))
+ rql.append(', '.join(sub_rql))
+
+ rql = ' '.join(rql)
+ rset = self.execute(rql, rql_args)
+ return rset.get_entity(0, 0)
+
+ def set_option(self, optname, value):
+ self.vreg.config.global_set_option(optname, value)
+
+ def pviews(self, req, rset):
+ return sorted((a.id, a.__class__) for a in self.vreg.possible_views(req, rset))
+
+ def pactions(self, req, rset, skipcategories=('addrelated', 'siteactions', 'useractions')):
+ return [(a.id, a.__class__) for a in self.vreg.possible_vobjects('actions', req, rset)
+ if a.category not in skipcategories]
+ def pactionsdict(self, req, rset, skipcategories=('addrelated', 'siteactions', 'useractions')):
+ res = {}
+ for a in self.vreg.possible_vobjects('actions', req, rset):
+ if a.category not in skipcategories:
+ res.setdefault(a.category, []).append(a.__class__)
+ return res
+
+ def paddrelactions(self, req, rset):
+ return [(a.id, a.__class__) for a in self.vreg.possible_vobjects('actions', req, rset)
+ if a.category == 'addrelated']
+
+ def remote_call(self, fname, *args):
+ """remote call simulation"""
+ dump = simplejson.dumps
+ args = [dump(arg) for arg in args]
+ req = self.request(mode='remote', fname=fname, pageid='123', arg=args)
+ ctrl = self.env.app.select_controller('json', req)
+ return ctrl.publish(), req
+
+ # default test setup and teardown #########################################
+
+ def setup_database(self):
+ pass
+
+ def setUp(self):
+ self.restore_connection()
+ session = self.session()
+ #self.maxeid = self.execute('Any MAX(X)')
+ session.set_pool()
+ self.maxeid = session.system_sql('SELECT MAX(eid) FROM entities').fetchone()[0]
+ self.app = self.env.app
+ self.vreg = self.env.app.vreg
+ self.schema = self.vreg.schema
+ self.vreg.config.mode = 'test'
+ # set default-dest-addrs to a dumb email address to avoid mailbox or
+ # mail queue pollution
+ self.set_option('default-dest-addrs', ['whatever'])
+ self.setup_database()
+ self.commit()
+ MAILBOX[:] = [] # reset mailbox
+
+ @nocoverage
+ def tearDown(self):
+ self.rollback()
+ # self.env.restore_database()
+ self.env.restore_connection()
+ self.session().unsafe_execute('DELETE Any X WHERE X eid > %s' % self.maxeid)
+ self.commit()
+
+
+# XXX
+try:
+ from cubicweb.web import Redirect
+ from urllib import unquote
+except ImportError:
+ pass # cubicweb-web not installed
+else:
+ class ControllerTC(EnvBasedTC):
+ def setUp(self):
+ super(ControllerTC, self).setUp()
+ self.req = self.request()
+ self.ctrl = self.env.app.select_controller('edit', self.req)
+
+ def publish(self, req):
+ assert req is self.ctrl.req
+ try:
+ result = self.ctrl.publish()
+ req.cnx.commit()
+ except Redirect:
+ req.cnx.commit()
+ raise
+ return result
+
+ def expect_redirect_publish(self, req=None):
+ if req is not None:
+ self.ctrl = self.env.app.select_controller('edit', req)
+ else:
+ req = self.req
+ try:
+ res = self.publish(req)
+ except Redirect, ex:
+ try:
+ path, params = ex.location.split('?', 1)
+ except:
+ path, params = ex.location, ""
+ req._url = path
+ cleanup = lambda p: (p[0], unquote(p[1]))
+ params = dict(cleanup(p.split('=', 1)) for p in params.split('&') if p)
+ return req.relative_path(False), params # path.rsplit('/', 1)[-1], params
+ else:
+ self.fail('expected a Redirect exception')
+
+
+def make_late_binding_repo_property(attrname):
+ @property
+ def late_binding(self):
+ """builds cnx as late as possible"""
+ if not hasattr(self, attrname):
+ # sets explicit test mode here to avoid autoreload
+ from cubicweb.cwconfig import CubicWebConfiguration
+ CubicWebConfiguration.mode = 'test'
+ cls = self.__class__
+ config = self.repo_config or TestServerConfiguration('data')
+ cls._repo, cls._cnx = init_test_database('sqlite', config=config)
+ return getattr(self, attrname)
+ return late_binding
+
+
+class autorepo(type):
+ """automatically set repository on RepositoryBasedTC subclasses if necessary
+ """
+ def __new__(mcs, name, bases, classdict):
+ repo = classdict.get('repo')
+ # try to find repo in one of the base classes
+ if repo is None:
+ for base in bases:
+ repo = getattr(base, 'repo', None)
+ if repo is not None:
+ classdict['repo'] = repo
+ break
+ if name != 'RepositoryBasedTC' and not classdict.get('repo'):
+ classdict['repo'] = make_late_binding_repo_property('_repo')
+ classdict['cnx'] = make_late_binding_repo_property('_cnx')
+ return super(autorepo, mcs).__new__(mcs, name, bases, classdict)
+
+
+class RepositoryBasedTC(TestCase):
+ """abstract class for test using direct repository connections
+ """
+ __metaclass__ = autorepo
+ repo_config = None # set a particular config instance if necessary
+
+ # user / session management ###############################################
+
+ def create_user(self, user, groups=('users',), password=None, commit=True):
+ if password is None:
+ password = user
+ eid = self.execute('INSERT EUser X: X login %(x)s, X upassword %(p)s,'
+ 'X in_state S WHERE S name "activated"',
+ {'x': unicode(user), 'p': password})[0][0]
+ groups = ','.join(repr(group) for group in groups)
+ self.execute('SET X in_group Y WHERE X eid %%(x)s, Y name IN (%s)' % groups,
+ {'x': eid})
+ if commit:
+ self.commit()
+ self.session.reset_pool()
+ return eid
+
+ def login(self, login, password=None):
+ cnx = repo_connect(self.repo, unicode(login), password or login,
+ ConnectionProperties('inmemory'))
+ self.cnxs.append(cnx)
+ return cnx
+
+ def current_session(self):
+ return self.repo._sessions[self.cnxs[-1].sessionid]
+
+ def restore_connection(self):
+ assert len(self.cnxs) == 1, self.cnxs
+ cnx = self.cnxs.pop()
+ try:
+ cnx.close()
+ except Exception, ex:
+ print "exception occured while closing connection", ex
+
+ # db api ##################################################################
+
+ def execute(self, rql, args=None, eid_key=None):
+ assert self.session.id == self.cnxid
+ rset = self.__execute(self.cnxid, rql, args, eid_key)
+ rset.vreg = self.vreg
+ rset.req = self.session
+ # call to set_pool is necessary to avoid pb when using
+ # application entities for convenience
+ self.session.set_pool()
+ return rset
+
+ def commit(self):
+ self.__commit(self.cnxid)
+ self.session.set_pool()
+
+ def rollback(self):
+ self.__rollback(self.cnxid)
+ self.session.set_pool()
+
+ def close(self):
+ self.__close(self.cnxid)
+
+ # other utilities #########################################################
+ def set_debug(self, debugmode):
+ from cubicweb.server import set_debug
+ set_debug(debugmode)
+
+ def set_option(self, optname, value):
+ self.vreg.config.global_set_option(optname, value)
+
+ def add_entity(self, etype, **kwargs):
+ restrictions = ', '.join('X %s %%(%s)s' % (key, key) for key in kwargs)
+ rql = 'INSERT %s X' % etype
+ if kwargs:
+ rql += ': %s' % ', '.join('X %s %%(%s)s' % (key, key) for key in kwargs)
+ rset = self.execute(rql, kwargs)
+ return rset.get_entity(0, 0)
+
+ def default_user_password(self):
+ config = self.repo.config #TestConfiguration('data')
+ user = unicode(config.sources()['system']['db-user'])
+ passwd = config.sources()['system']['db-password']
+ return user, passwd
+
+ def close_connections(self):
+ for cnx in self.cnxs:
+ try:
+ cnx.rollback()
+ cnx.close()
+ except:
+ continue
+ self.cnxs = []
+
+ pactions = EnvBasedTC.pactions.im_func
+ pactionsdict = EnvBasedTC.pactionsdict.im_func
+
+ # default test setup and teardown #########################################
+ copy_schema = False
+
+ def _prepare(self):
+ MAILBOX[:] = [] # reset mailbox
+ if hasattr(self, 'cnxid'):
+ return
+ repo = self.repo
+ self.__execute = repo.execute
+ self.__commit = repo.commit
+ self.__rollback = repo.rollback
+ self.__close = repo.close
+ self.cnxid = repo.connect(*self.default_user_password())
+ self.session = repo._sessions[self.cnxid]
+ # XXX copy schema since hooks may alter it and it may be not fully
+ # cleaned (missing some schema synchronization support)
+ try:
+ origschema = repo.__schema
+ except AttributeError:
+ origschema = repo.schema
+ repo.__schema = origschema
+ if self.copy_schema:
+ repo.schema = deepcopy(origschema)
+ repo.set_schema(repo.schema) # reset hooks
+ repo.vreg.update_schema(repo.schema)
+ self.cnxs = []
+ # reset caches, they may introduce bugs among tests
+ repo._type_source_cache = {}
+ repo._extid_cache = {}
+ repo.querier._rql_cache = {}
+ for source in repo.sources:
+ source.reset_caches()
+ for s in repo.sources:
+ if hasattr(s, '_cache'):
+ s._cache = {}
+
+ @property
+ def config(self):
+ return self.repo.config
+
+ @property
+ def vreg(self):
+ return self.repo.vreg
+
+ @property
+ def schema(self):
+ return self.repo.schema
+
+ def setUp(self):
+ self._prepare()
+ self.session.set_pool()
+ self.maxeid = self.session.system_sql('SELECT MAX(eid) FROM entities').fetchone()[0]
+ #self.maxeid = self.execute('Any MAX(X)')
+
+ def tearDown(self, close=True):
+ self.close_connections()
+ self.rollback()
+ self.session.unsafe_execute('DELETE Any X WHERE X eid > %(x)s', {'x': self.maxeid})
+ self.commit()
+ if close:
+ self.close()
+
diff -r 000000000000 -r b97547f5f1fa devtools/cwtwill.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/cwtwill.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,108 @@
+"""cubicweb extensions for twill"""
+
+import re
+from urllib import quote
+
+from twill import commands as twc
+
+# convenience / consistency renaming
+has_text = twc.find
+hasnt_text = twc.notfind
+
+
+# specific commands
+_LINK = re.compile('(.*?)', re.I | re.S)
+
+def has_link(text, url=''):
+ browser = twc.get_browser()
+ html = browser.get_html()
+ if html:
+ for match in _LINK.finditer(html):
+ linkurl = match.group(1)
+ linktext = match.group(2)
+ if linktext == text:
+ # if url is specified linkurl must match
+ if url and linkurl != url:
+ continue
+ return
+ raise AssertionError('link %s (%s) not found' % (text, url))
+
+
+def view(rql, vid=''):
+ """
+ >> view 'Project P'
+
+ apply to 's rset
+ """
+ if vid:
+ twc.go('view?rql=%s&vid=%s' % (quote(rql), vid))
+ else:
+ twc.go('view?rql=%s' % quote(rql))
+
+def create(etype):
+ """
+ >> create Project
+
+ go to 's creation page
+ """
+ twc.go('view?etype=%s&vid=creation' % etype)
+
+def edit(rql):
+ """
+ >> edit "Project P WHERE P eid 123"
+
+ calls edition view for
+ """
+ twc.go('view?rql=%s&vid=edition' % quote(rql))
+
+
+
+
+def setvalue(formname, fieldname, value):
+ """
+ >> setvalue entityForm name pylint
+
+ sets the field's value in the form
+ should either be the form's index, the form's name
+ or the form's id
+ """
+ browser = twc.get_browser()
+ form = browser.get_form(formname)
+ if form is None:
+ # try to find if one of the forms has as id
+ for index, form in enumerate(browser._browser.forms()):
+ # forms in cubicweb don't always have a name
+ if form.attrs.get('id') == formname:
+ # browser.get_form_field knows how to deal with form index
+ formname = str(index+1)
+ break
+ else:
+ raise ValueError('could not find form named <%s>' % formname)
+ eid = browser.get_form_field(form, 'eid').value
+ twc.formvalue(formname, '%s:%s' % (fieldname, eid), value)
+
+
+def submitform(formname, submit_button=None):
+ """
+ >> submitform entityForm
+
+ Submit the form named entityForm. This is useful when the form is pre-filed
+ and we only want to click on submit.
+ (The original submit command chooses the form to submit according to the last
+ formvalue instruction)
+ """
+ browser = twc.get_browser()
+ form = browser.get_form(formname)
+ if form is None:
+ # try to find if one of the forms has as id
+ for form in browser._browser.forms():
+ # forms in cubicweb don't always have a name
+ if form.attrs.get('id') == formname:
+ break
+ else:
+ raise ValueError('could not find form named <%s>' % formname)
+ browser._browser.form = form
+ browser.submit(submit_button)
+
+
+# missing actions: delete, copy, changeview
diff -r 000000000000 -r b97547f5f1fa devtools/devctl.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/devctl.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,434 @@
+"""additional cubicweb-ctl commands and command handlers for cubicweb and cubicweb's
+cubes development
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+import sys
+from os import walk, mkdir, chdir, listdir
+from os.path import join, exists, abspath, basename, normpath, split, isdir
+
+
+from logilab.common import STD_BLACKLIST
+from logilab.common.modutils import get_module_files
+from logilab.common.textutils import get_csv
+
+from cubicweb import CW_SOFTWARE_ROOT as BASEDIR
+from cubicweb.__pkginfo__ import version as cubicwebversion
+from cubicweb import BadCommandUsage
+from cubicweb.toolsutils import Command, register_commands, confirm, copy_skeleton
+from cubicweb.web.webconfig import WebConfiguration
+from cubicweb.server.serverconfig import ServerConfiguration
+
+
+class DevConfiguration(ServerConfiguration, WebConfiguration):
+ """dummy config to get full library schema and entities"""
+ creating = True
+ def __init__(self, appid=None, cube=None):
+ self._cube = cube
+ super(DevConfiguration, self).__init__(appid)
+ if self._cube is None:
+ self._cubes = ()
+ else:
+ self._cubes = self.expand_cubes(self.cube_dependencies(self._cube))
+
+# def adjust_sys_path(self):
+# # update python path if necessary
+# if not self.cubes_dir() in sys.path:
+# sys.path.insert(0, self.cubes_dir())
+
+ @property
+ def apphome(self):
+ return self.appid
+
+ def init_log(self, debug=None):
+ pass
+ def load_configuration(self):
+ pass
+
+ cubicweb_vobject_path = ServerConfiguration.cubicweb_vobject_path | WebConfiguration.cubicweb_vobject_path
+ cube_vobject_path = ServerConfiguration.cube_vobject_path | WebConfiguration.cube_vobject_path
+
+
+def generate_schema_pot(w, cubedir=None):
+ """generate a pot file with schema specific i18n messages
+
+ notice that relation definitions description and static vocabulary
+ should be marked using '_' and extracted using xgettext
+ """
+ from cubicweb.cwvreg import CubicWebRegistry
+ cube = cubedir and split(cubedir)[-1]
+ config = DevConfiguration(join(BASEDIR, 'web'), cube)
+ if cubedir:
+ libschema = config.load_schema()
+ config = DevConfiguration(cubedir, cube)
+ schema = config.load_schema()
+ else:
+ schema = config.load_schema()
+ libschema = None
+ config.cleanup_interface_sobjects = False
+ vreg = CubicWebRegistry(config)
+ vreg.set_schema(schema)
+ vreg.register_objects(config.vregistry_path())
+ w(DEFAULT_POT_HEAD)
+ _generate_schema_pot(w, vreg, schema, libschema=libschema,
+ cube=cube)
+ # cleanup sys.modules, required when we're updating multiple cubes
+ for name, mod in sys.modules.items():
+ if mod is None:
+ # duh ? logilab.common.os for instance
+ del sys.modules[name]
+ continue
+ if not hasattr(mod, '__file__'):
+ continue
+ for path in config.vregistry_path():
+ if mod.__file__.startswith(path):
+ del sys.modules[name]
+ break
+
+def _generate_schema_pot(w, vreg, schema, libschema=None, cube=None):
+ from mx.DateTime import now
+ from cubicweb.common.i18n import add_msg
+ w('# schema pot file, generated on %s\n' % now().strftime('%Y-%m-%d %H:%M:%S'))
+ w('# \n')
+ w('# singular and plural forms for each entity type\n')
+ w('\n')
+ if libschema is not None:
+ entities = [e for e in schema.entities() if not e in libschema]
+ else:
+ entities = schema.entities()
+ done = set()
+ for eschema in sorted(entities):
+ etype = eschema.type
+ add_msg(w, etype)
+ add_msg(w, '%s_plural' % etype)
+ if not eschema.is_final():
+ add_msg(w, 'This %s' % etype)
+ add_msg(w, 'New %s' % etype)
+ add_msg(w, 'add a %s' % etype)
+ add_msg(w, 'remove this %s' % etype)
+ if eschema.description and not eschema.description in done:
+ done.add(eschema.description)
+ add_msg(w, eschema.description)
+ w('# subject and object forms for each relation type\n')
+ w('# (no object form for final relation types)\n')
+ w('\n')
+ if libschema is not None:
+ relations = [r for r in schema.relations() if not r in libschema]
+ else:
+ relations = schema.relations()
+ for rschema in sorted(set(relations)):
+ rtype = rschema.type
+ add_msg(w, rtype)
+ if not (schema.rschema(rtype).is_final() or rschema.symetric):
+ add_msg(w, '%s_object' % rtype)
+ if rschema.description and rschema.description not in done:
+ done.add(rschema.description)
+ add_msg(w, rschema.description)
+ w('# add related box generated message\n')
+ w('\n')
+ for eschema in schema.entities():
+ if eschema.is_final():
+ continue
+ entity = vreg.etype_class(eschema)(None, None)
+ for x, rschemas in (('subject', eschema.subject_relations()),
+ ('object', eschema.object_relations())):
+ for rschema in rschemas:
+ if rschema.is_final():
+ continue
+ for teschema in rschema.targets(eschema, x):
+ if defined_in_library(libschema, eschema, rschema, teschema, x):
+ continue
+ if entity.relation_mode(rschema.type, teschema.type, x) == 'create':
+ if x == 'subject':
+ label = 'add %s %s %s %s' % (eschema, rschema, teschema, x)
+ label2 = "creating %s (%s %%(linkto)s %s %s)" % (teschema, eschema, rschema, teschema)
+ else:
+ label = 'add %s %s %s %s' % (teschema, rschema, eschema, x)
+ label2 = "creating %s (%s %s %s %%(linkto)s)" % (teschema, teschema, rschema, eschema)
+ add_msg(w, label)
+ add_msg(w, label2)
+ cube = (cube or 'cubicweb') + '.'
+ done = set()
+ for reg, objdict in vreg.items():
+ for objects in objdict.values():
+ for obj in objects:
+ objid = '%s_%s' % (reg, obj.id)
+ if objid in done:
+ continue
+ if obj.__module__.startswith(cube) and obj.property_defs:
+ add_msg(w, '%s_description' % objid)
+ add_msg(w, objid)
+ done.add(objid)
+
+def defined_in_library(libschema, etype, rtype, tetype, x):
+ """return true if the given relation definition exists in cubicweb's library"""
+ if libschema is None:
+ return False
+ if x == 'subject':
+ subjtype, objtype = etype, tetype
+ else:
+ subjtype, objtype = tetype, etype
+ try:
+ return libschema.rschema(rtype).has_rdef(subjtype, objtype)
+ except KeyError:
+ return False
+
+
+LANGS = ('en', 'fr')
+I18NDIR = join(BASEDIR, 'i18n')
+DEFAULT_POT_HEAD = r'''msgid ""
+msgstr ""
+"Project-Id-Version: cubicweb %s\n"
+"PO-Revision-Date: 2008-03-28 18:14+0100\n"
+"Last-Translator: Logilab Team \n"
+"Language-Team: fr \n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: cubicweb-devtools\n"
+"Plural-Forms: nplurals=2; plural=(n > 1);\n"
+
+''' % cubicwebversion
+
+
+class UpdateCubicWebCatalogCommand(Command):
+ """Update i18n catalogs for cubicweb library.
+
+ It will regenerate cubicweb/i18n/xx.po files. You'll have then to edit those
+ files to add translations of newly added messages.
+ """
+ name = 'i18nlibupdate'
+
+ def run(self, args):
+ """run the command with its specific arguments"""
+ if args:
+ raise BadCommandUsage('Too much arguments')
+ import shutil
+ from tempfile import mktemp
+ import yams
+ from logilab.common.fileutils import ensure_fs_mode
+ from logilab.common.shellutils import find, rm
+ from cubicweb.common.i18n import extract_from_tal, execute
+ tempdir = mktemp()
+ mkdir(tempdir)
+ potfiles = [join(I18NDIR, 'entities.pot')]
+ print '******** extract schema messages'
+ schemapot = join(tempdir, 'schema.pot')
+ potfiles.append(schemapot)
+ # explicit close necessary else the file may not be yet flushed when
+ # we'll using it below
+ schemapotstream = file(schemapot, 'w')
+ generate_schema_pot(schemapotstream.write, cubedir=None)
+ schemapotstream.close()
+ print '******** extract TAL messages'
+ tali18nfile = join(tempdir, 'tali18n.py')
+ extract_from_tal(find(join(BASEDIR, 'web'), ('.py', '.pt')), tali18nfile)
+ print '******** .pot files generation'
+ for id, files, lang in [('cubicweb', get_module_files(BASEDIR) + find(join(BASEDIR, 'misc', 'migration'), '.py'), None),
+ ('schemadescr', find(join(BASEDIR, 'schemas'), '.py'), None),
+ ('yams', get_module_files(yams.__path__[0]), None),
+ ('tal', [tali18nfile], None),
+ ('js', find(join(BASEDIR, 'web'), '.js'), 'java'),
+ ]:
+ cmd = 'xgettext --no-location --omit-header -k_ -o %s %s'
+ if lang is not None:
+ cmd += ' -L %s' % lang
+ potfiles.append(join(tempdir, '%s.pot' % id))
+ execute(cmd % (potfiles[-1], ' '.join(files)))
+ print '******** merging .pot files'
+ cubicwebpot = join(tempdir, 'cubicweb.pot')
+ execute('msgcat %s > %s' % (' '.join(potfiles), cubicwebpot))
+ print '******** merging main pot file with existing translations'
+ chdir(I18NDIR)
+ toedit = []
+ for lang in LANGS:
+ target = '%s.po' % lang
+ execute('msgmerge -N --sort-output %s %s > %snew' % (target, cubicwebpot, target))
+ ensure_fs_mode(target)
+ shutil.move('%snew' % target, target)
+ toedit.append(abspath(target))
+ # cleanup
+ rm(tempdir)
+ # instructions pour la suite
+ print '*' * 72
+ print 'you can now edit the following files:'
+ print '* ' + '\n* '.join(toedit)
+ print
+ print "then you'll have to update cubes catalogs using the i18nupdate command"
+
+
+class UpdateTemplateCatalogCommand(Command):
+ """Update i18n catalogs for cubes. If no cube is specified, update
+ catalogs of all registered cubes.
+ """
+ name = 'i18nupdate'
+ arguments = '[...]'
+
+ def run(self, args):
+ """run the command with its specific arguments"""
+ CUBEDIR = DevConfiguration.cubes_dir()
+ if args:
+ cubes = [join(CUBEDIR, app) for app in args]
+ else:
+ cubes = [join(CUBEDIR, app) for app in listdir(CUBEDIR)
+ if exists(join(CUBEDIR, app, 'i18n'))]
+ update_cubes_catalogs(cubes)
+
+def update_cubes_catalogs(cubes):
+ import shutil
+ from tempfile import mktemp
+ from logilab.common.fileutils import ensure_fs_mode
+ from logilab.common.shellutils import find, rm
+ from cubicweb.common.i18n import extract_from_tal, execute
+ toedit = []
+ for cubedir in cubes:
+ cube = basename(normpath(cubedir))
+ if not isdir(cubedir):
+ print 'unknown cube', cube
+ continue
+ tempdir = mktemp()
+ mkdir(tempdir)
+ print '*' * 72
+ print 'updating %s cube...' % cube
+ chdir(cubedir)
+ potfiles = [join('i18n', scfile) for scfile in ('entities.pot',)
+ if exists(join('i18n', scfile))]
+ print '******** extract schema messages'
+ schemapot = join(tempdir, 'schema.pot')
+ potfiles.append(schemapot)
+ # explicit close necessary else the file may not be yet flushed when
+ # we'll using it below
+ schemapotstream = file(schemapot, 'w')
+ generate_schema_pot(schemapotstream.write, cubedir)
+ schemapotstream.close()
+ print '******** extract TAL messages'
+ tali18nfile = join(tempdir, 'tali18n.py')
+ extract_from_tal(find('.', ('.py', '.pt'), blacklist=STD_BLACKLIST+('test',)), tali18nfile)
+ print '******** extract Javascript messages'
+ jsfiles = find('.', '.js')
+ if jsfiles:
+ tmppotfile = join(tempdir, 'js.pot')
+ execute('xgettext --no-location --omit-header -k_ -L java --from-code=utf-8 -o %s %s'
+ % (tmppotfile, ' '.join(jsfiles)))
+ # no pot file created if there are no string to translate
+ if exists(tmppotfile):
+ potfiles.append(tmppotfile)
+ print '******** create cube specific catalog'
+ tmppotfile = join(tempdir, 'generated.pot')
+ cubefiles = find('.', '.py', blacklist=STD_BLACKLIST+('test',))
+ cubefiles.append(tali18nfile)
+ execute('xgettext --no-location --omit-header -k_ -o %s %s'
+ % (tmppotfile, ' '.join(cubefiles)))
+ if exists(tmppotfile): # doesn't exists of no translation string found
+ potfiles.append(tmppotfile)
+ potfile = join(tempdir, 'cube.pot')
+ print '******** merging .pot files'
+ execute('msgcat %s > %s' % (' '.join(potfiles), potfile))
+ print '******** merging main pot file with existing translations'
+ chdir('i18n')
+ for lang in LANGS:
+ print '****', lang
+ cubepo = '%s.po' % lang
+ if not exists(cubepo):
+ shutil.copy(potfile, cubepo)
+ else:
+ execute('msgmerge -N -s %s %s > %snew' % (cubepo, potfile, cubepo))
+ ensure_fs_mode(cubepo)
+ shutil.move('%snew' % cubepo, cubepo)
+ toedit.append(abspath(cubepo))
+ # cleanup
+ rm(tempdir)
+ # instructions pour la suite
+ print '*' * 72
+ print 'you can now edit the following files:'
+ print '* ' + '\n* '.join(toedit)
+
+
+class LiveServerCommand(Command):
+ """Run a server from within a cube directory.
+ """
+ name = 'live-server'
+ arguments = ''
+ options = ()
+
+ def run(self, args):
+ """run the command with its specific arguments"""
+ from cubicweb.devtools.livetest import runserver
+ runserver()
+
+
+class NewTemplateCommand(Command):
+ """Create a new cube.
+
+
+ the name of the new cube
+ """
+ name = 'newcube'
+ arguments = ''
+
+
+ def run(self, args):
+ if len(args) != 1:
+ raise BadCommandUsage("exactly one argument (cube name) is expected")
+ cubename, = args
+ if ServerConfiguration.mode != "dev":
+ self.fail("you can only create new cubes in development mode")
+ cubedir = ServerConfiguration.CUBES_DIR
+ if not isdir(cubedir):
+ print "creating apps directory", cubedir
+ try:
+ mkdir(cubedir)
+ except OSError, err:
+ self.fail("failed to create directory %r\n(%s)" % (cubedir, err))
+ cubedir = join(cubedir, cubename)
+ if exists(cubedir):
+ self.fail("%s already exists !" % (cubedir))
+ skeldir = join(BASEDIR, 'skeleton')
+ distname = raw_input('Debian name for your cube (just type enter to use the cube name): ').strip()
+ if not distname:
+ distname = 'cubicweb-%s' % cubename.lower()
+ elif not distname.startswith('cubicweb-'):
+ if confirm('do you mean cubicweb-%s ?' % distname):
+ distname = 'cubicweb-' + distname
+ shortdesc = raw_input('Enter a short description for your cube: ')
+ longdesc = raw_input('Enter a long description (or nothing if you want to reuse the short one): ')
+ includes = self._ask_for_dependancies()
+ if len(includes) == 1:
+ dependancies = '%r,' % includes[0]
+ else:
+ dependancies = ', '.join(repr(cube) for cube in includes)
+ from mx.DateTime import now
+ context = {'cubename' : cubename,
+ 'distname' : distname,
+ 'shortdesc' : shortdesc,
+ 'longdesc' : longdesc or shortdesc,
+ 'dependancies' : dependancies,
+ 'version' : cubicwebversion,
+ 'year' : str(now().year),
+ }
+ copy_skeleton(skeldir, cubedir, context)
+
+ def _ask_for_dependancies(self):
+ includes = []
+ for stdtype in ServerConfiguration.available_cubes():
+ ans = raw_input("Depends on cube %s? (N/y/s(kip)/t(ype)"
+ % stdtype).lower().strip()
+ if ans == 'y':
+ includes.append(stdtype)
+ if ans == 't':
+ includes = get_csv(raw_input('type dependancies: '))
+ break
+ elif ans == 's':
+ break
+ return includes
+
+
+register_commands((UpdateCubicWebCatalogCommand,
+ UpdateTemplateCatalogCommand,
+ LiveServerCommand,
+ NewTemplateCommand,
+ ))
diff -r 000000000000 -r b97547f5f1fa devtools/fake.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/fake.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,241 @@
+"""Fake objects to ease testing of cubicweb without a fully working environment
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from logilab.common.testlib import mock_object as Mock
+from logilab.common.adbh import get_adv_func_helper
+
+from indexer import get_indexer
+
+from cubicweb import RequestSessionMixIn
+from cubicweb.web.request import CubicWebRequestBase
+from cubicweb.devtools import BASE_URL, BaseApptestConfiguration
+
+
+class FakeConfig(dict, BaseApptestConfiguration):
+ translations = {}
+ apphome = None
+ def __init__(self, appid='data', apphome=None, cubes=()):
+ self.appid = appid
+ self.apphome = apphome
+ self._cubes = cubes
+ self['auth-mode'] = 'cookie'
+ self['uid'] = None
+ self['base-url'] = BASE_URL
+ self['rql-cache-size'] = 100
+
+ def cubes(self, expand=False):
+ return self._cubes
+
+ def sources(self):
+ return {}
+
+class FakeVReg(object):
+ def __init__(self, schema=None, config=None):
+ self.schema = schema
+ self.config = config or FakeConfig()
+ self.properties = {'ui.encoding': 'UTF8',
+ 'ui.language': 'en',
+ }
+
+ def property_value(self, key):
+ return self.properties[key]
+
+ _registries = {
+ 'controllers' : [Mock(id='view'), Mock(id='login'),
+ Mock(id='logout'), Mock(id='edit')],
+ 'views' : [Mock(id='primary'), Mock(id='secondary'),
+ Mock(id='oneline'), Mock(id='list')],
+ }
+
+ def registry_objects(self, name, oid=None):
+ return self._registries[name]
+
+ def etype_class(self, etype):
+ class Entity(dict):
+ e_schema = self.schema[etype]
+ def __init__(self, session, eid, row=0, col=0):
+ self.req = session
+ self.eid = eid
+ self.row, self.col = row, col
+ def set_eid(self, eid):
+ self.eid = self['eid'] = eid
+ return Entity
+
+
+class FakeRequest(CubicWebRequestBase):
+ """test implementation of an cubicweb request object"""
+
+ def __init__(self, *args, **kwargs):
+ if not (args or 'vreg' in kwargs):
+ kwargs['vreg'] = FakeVReg()
+ kwargs['https'] = False
+ self._url = kwargs.pop('url', 'view?rql=Blop&vid=blop')
+ super(FakeRequest, self).__init__(*args, **kwargs)
+ self._session_data = {}
+ self._headers = {}
+
+ def header_accept_language(self):
+ """returns an ordered list of preferred languages"""
+ return ('en',)
+
+ def header_if_modified_since(self):
+ return None
+
+ def base_url(self):
+ """return the root url of the application"""
+ return BASE_URL
+
+ def relative_path(self, includeparams=True):
+ """return the normalized path of the request (ie at least relative
+ to the application's root, but some other normalization may be needed
+ so that the returned path may be used to compare to generated urls
+ """
+ if self._url.startswith(BASE_URL):
+ url = self._url[len(BASE_URL):]
+ else:
+ url = self._url
+ if includeparams:
+ return url
+ return url.split('?', 1)[0]
+
+ def set_content_type(self, content_type, filename=None, encoding=None):
+ """set output content type for this request. An optional filename
+ may be given
+ """
+ pass
+
+ def set_header(self, header, value):
+ """set an output HTTP header"""
+ pass
+
+ def add_header(self, header, value):
+ """set an output HTTP header"""
+ pass
+
+ def remove_header(self, header):
+ """remove an output HTTP header"""
+ pass
+
+ def get_header(self, header, default=None):
+ """return the value associated with the given input header,
+ raise KeyError if the header is not set
+ """
+ return self._headers.get(header, default)
+
+ def set_cookie(self, cookie, key, maxage=300):
+ """set / update a cookie key
+
+ by default, cookie will be available for the next 5 minutes
+ """
+ pass
+
+ def remove_cookie(self, cookie, key):
+ """remove a cookie by expiring it"""
+ pass
+
+ def validate_cache(self):
+ pass
+
+ # session compatibility (in some test are using this class to test server
+ # side views...)
+ def actual_session(self):
+ """return the original parent session if any, else self"""
+ return self
+
+ def unsafe_execute(self, *args, **kwargs):
+ """return the original parent session if any, else self"""
+ kwargs.pop('propagate', None)
+ return self.execute(*args, **kwargs)
+
+
+class FakeUser(object):
+ login = 'toto'
+ eid = 0
+ def in_groups(self, groups):
+ return True
+
+
+class FakeSession(RequestSessionMixIn):
+ def __init__(self, repo=None, user=None):
+ self.repo = repo
+ self.vreg = getattr(self.repo, 'vreg', FakeVReg())
+ self.pool = FakePool()
+ self.user = user or FakeUser()
+ self.is_internal_session = False
+ self.is_super_session = self.user.eid == -1
+ self._query_data = {}
+
+ def execute(self, *args):
+ pass
+ def commit(self, *args):
+ self._query_data.clear()
+ def close(self, *args):
+ pass
+ def system_sql(self, sql, args=None):
+ pass
+
+ def decorate_rset(self, rset, propagate=False):
+ rset.vreg = self.vreg
+ rset.req = self
+ return rset
+
+ def set_entity_cache(self, entity):
+ pass
+
+class FakeRepo(object):
+ querier = None
+ def __init__(self, schema, vreg=None, config=None):
+ self.extids = {}
+ self.eids = {}
+ self._count = 0
+ self.schema = schema
+ self.vreg = vreg or FakeVReg()
+ self.config = config or FakeConfig()
+
+ def internal_session(self):
+ return FakeSession(self)
+
+ def extid2eid(self, source, extid, etype, session, insert=True):
+ try:
+ return self.extids[extid]
+ except KeyError:
+ if not insert:
+ return None
+ self._count += 1
+ eid = self._count
+ entity = source.before_entity_insertion(session, extid, etype, eid)
+ self.extids[extid] = eid
+ self.eids[eid] = extid
+ source.after_entity_insertion(session, extid, entity)
+ return eid
+
+ def eid2extid(self, source, eid, session=None):
+ return self.eids[eid]
+
+
+class FakeSource(object):
+ dbhelper = get_adv_func_helper('sqlite')
+ indexer = get_indexer('sqlite', 'UTF8')
+ dbhelper.fti_uid_attr = indexer.uid_attr
+ dbhelper.fti_table = indexer.table
+ dbhelper.fti_restriction_sql = indexer.restriction_sql
+ dbhelper.fti_need_distinct_query = indexer.need_distinct
+ def __init__(self, uri):
+ self.uri = uri
+
+
+class FakePool(object):
+ def source(self, uri):
+ return FakeSource(uri)
+
+# commented until proven to be useful
+## from logging import getLogger
+## from cubicweb import set_log_methods
+## for cls in (FakeConfig, FakeVReg, FakeRequest, FakeSession, FakeRepo,
+## FakeSource, FakePool):
+## set_log_methods(cls, getLogger('fake'))
diff -r 000000000000 -r b97547f5f1fa devtools/fill.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/fill.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,483 @@
+# -*- coding: iso-8859-1 -*-
+"""This modules defines func / methods for creating test repositories
+
+:organization: Logilab
+:copyright: 2001-2008 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
+:contact: http://www.logilab.fr/ -- mailto:contact@logilab.fr
+"""
+__docformat__ = "restructuredtext en"
+
+from random import randint, choice
+from copy import deepcopy
+
+from mx.DateTime import DateTime, DateTimeDelta
+from decimal import Decimal
+from yams.constraints import (SizeConstraint, StaticVocabularyConstraint,
+ IntervalBoundConstraint)
+from rql.utils import decompose_b26 as base_decompose_b26
+
+from cubicweb import Binary
+from cubicweb.schema import RQLConstraint
+
+def decompose_b26(index, ascii=False):
+ """return a letter (base-26) decomposition of index"""
+ if ascii:
+ return base_decompose_b26(index)
+ return base_decompose_b26(index, u'éabcdefghijklmnopqrstuvwxyz')
+
+def get_choices(eschema, attrname):
+ """returns possible choices for 'attrname'
+ if attrname doesn't have ChoiceConstraint, return None
+ """
+ for cst in eschema.constraints(attrname):
+ if isinstance(cst, StaticVocabularyConstraint):
+ return cst.vocabulary()
+ return None
+
+
+def get_max_length(eschema, attrname):
+ """returns the maximum length allowed for 'attrname'"""
+ for cst in eschema.constraints(attrname):
+ if isinstance(cst, SizeConstraint) and cst.max:
+ return cst.max
+ return 300
+ #raise AttributeError('No Size constraint on attribute "%s"' % attrname)
+
+def get_bounds(eschema, attrname):
+ for cst in eschema.constraints(attrname):
+ if isinstance(cst, IntervalBoundConstraint):
+ return cst.minvalue, cst.maxvalue
+ return None, None
+
+
+_GENERATED_VALUES = {}
+
+class _ValueGenerator(object):
+ """generates integers / dates / strings / etc. to fill a DB table"""
+
+ def __init__(self, eschema, choice_func=None):
+ """ is a function that returns a list of possible
+ choices for a given entity type and an attribute name. It should
+ looks like :
+ def values_for(etype, attrname):
+ # some stuff ...
+ return alist_of_acceptable_values # or None
+ """
+ self.e_schema = eschema
+ self.choice_func = choice_func
+
+ def _generate_value(self, attrname, index, **kwargs):
+ if not self.e_schema.has_unique_values(attrname):
+ return self.__generate_value(attrname, index, **kwargs)
+ value = self.__generate_value(attrname, index, **kwargs)
+ while value in _GENERATED_VALUES.get((self.e_schema.type, attrname), ()):
+ index += 1
+ value = self.__generate_value(attrname, index, **kwargs)
+ _GENERATED_VALUES.setdefault((self.e_schema.type, attrname), set()).add(value)
+ return value
+
+ def __generate_value(self, attrname, index, **kwargs):
+ """generates a consistent value for 'attrname'"""
+ attrtype = str(self.e_schema.destination(attrname)).lower()
+ # Before calling generate_%s functions, try to find values domain
+ etype = self.e_schema.type
+ if self.choice_func is not None:
+ values_domain = self.choice_func(etype, attrname)
+ if values_domain is not None:
+ return choice(values_domain)
+ gen_func = getattr(self, 'generate_%s_%s' % (self.e_schema.type, attrname), None)
+ if gen_func is None:
+ gen_func = getattr(self, 'generate_Any_%s' % attrname, None)
+ if gen_func is not None:
+ return gen_func(index, **kwargs)
+ # If no specific values domain, then generate a dummy value
+ gen_func = getattr(self, 'generate_%s' % (attrtype))
+ return gen_func(attrname, index, **kwargs)
+
+ def generate_choice(self, attrname, index):
+ """generates a consistent value for 'attrname' if it's a choice"""
+ choices = get_choices(self.e_schema, attrname)
+ if choices is None:
+ return None
+ return unicode(choice(choices)) # FIXME
+
+ def generate_string(self, attrname, index, format=None):
+ """generates a consistent value for 'attrname' if it's a string"""
+ # First try to get choices
+ choosed = self.generate_choice(attrname, index)
+ if choosed is not None:
+ return choosed
+ # All other case, generate a default string
+ attrlength = get_max_length(self.e_schema, attrname)
+ num_len = numlen(index)
+ if num_len >= attrlength:
+ ascii = self.e_schema.rproperty(attrname, 'internationalizable')
+ return ('&'+decompose_b26(index, ascii))[:attrlength]
+ # always use plain text when no format is specified
+ attrprefix = attrname[:max(attrlength-num_len-1, 0)]
+ if format == 'text/html':
+ value = u'é%s%d' % (attrprefix, index)
+ elif format == 'text/rest':
+ value = u"""
+title
+-----
+
+* %s
+* %d
+* é&
+""" % (attrprefix, index)
+ else:
+ value = u'é&%s%d' % (attrprefix, index)
+ return value[:attrlength]
+
+ def generate_password(self, attrname, index):
+ """generates a consistent value for 'attrname' if it's a password"""
+ return u'toto'
+
+ def generate_integer(self, attrname, index):
+ """generates a consistent value for 'attrname' if it's an integer"""
+ minvalue, maxvalue = get_bounds(self.e_schema, attrname)
+ if maxvalue is not None and maxvalue <= 0 and minvalue is None:
+ minvalue = maxvalue - index # i.e. randint(-index, 0)
+ else:
+ maxvalue = maxvalue or index
+ return randint(minvalue or 0, maxvalue)
+
+ generate_int = generate_integer
+
+ def generate_float(self, attrname, index):
+ """generates a consistent value for 'attrname' if it's a float"""
+ return float(randint(-index, index))
+
+ def generate_decimal(self, attrname, index):
+ """generates a consistent value for 'attrname' if it's a float"""
+ return Decimal(str(self.generate_float(attrname, index)))
+
+ def generate_date(self, attrname, index):
+ """generates a random date (format is 'yyyy-mm-dd')"""
+ return DateTime(randint(2000, 2004), randint(1, 12), randint(1, 28))
+
+ def generate_time(self, attrname, index):
+ """generates a random time (format is ' HH:MM')"""
+ return DateTimeDelta(0, 11, index%60) #'11:%02d' % (index % 60)
+
+ def generate_datetime(self, attrname, index):
+ """generates a random date (format is 'yyyy-mm-dd HH:MM')"""
+ return DateTime(randint(2000, 2004), randint(1, 12), randint(1, 28), 11, index%60)
+
+
+ def generate_bytes(self, attrname, index, format=None):
+ # modpython way
+ fakefile = Binary("%s%s" % (attrname, index))
+ fakefile.filename = "file_%s" % attrname
+ fakefile.value = fakefile.getvalue()
+ return fakefile
+
+ def generate_boolean(self, attrname, index):
+ """generates a consistent value for 'attrname' if it's a boolean"""
+ return index % 2 == 0
+
+ def generate_Any_data_format(self, index, **kwargs):
+ # data_format attribute of Image/File has no vocabulary constraint, we
+ # need this method else stupid values will be set which make mtconverter
+ # raise exception
+ return u'application/octet-stream'
+
+ def generate_Any_content_format(self, index, **kwargs):
+ # content_format attribute of EmailPart has no vocabulary constraint, we
+ # need this method else stupid values will be set which make mtconverter
+ # raise exception
+ return u'text/plain'
+
+ def generate_Image_data_format(self, index, **kwargs):
+ # data_format attribute of Image/File has no vocabulary constraint, we
+ # need this method else stupid values will be set which make mtconverter
+ # raise exception
+ return u'image/png'
+
+
+class autoextend(type):
+ def __new__(mcs, name, bases, classdict):
+ for attrname, attrvalue in classdict.items():
+ if callable(attrvalue):
+ if attrname.startswith('generate_') and \
+ attrvalue.func_code.co_argcount < 2:
+ raise TypeError('generate_xxx must accept at least 1 argument')
+ setattr(_ValueGenerator, attrname, attrvalue)
+ return type.__new__(mcs, name, bases, classdict)
+
+class ValueGenerator(_ValueGenerator):
+ __metaclass__ = autoextend
+
+
+def _default_choice_func(etype, attrname):
+ """default choice_func for insert_entity_queries"""
+ return None
+
+def insert_entity_queries(etype, schema, vreg, entity_num,
+ choice_func=_default_choice_func):
+ """returns a list of 'add entity' queries (couples query, args)
+ :type etype: str
+ :param etype: the entity's type
+
+ :type schema: cubicweb.schema.Schema
+ :param schema: the application schema
+
+ :type entity_num: int
+ :param entity_num: the number of entities to insert
+
+ XXX FIXME: choice_func is here for *historical* reasons, it should
+ probably replaced by a nicer way to specify choices
+ :type choice_func: function
+ :param choice_func: a function that takes an entity type, an attrname and
+ returns acceptable values for this attribute
+ """
+ # XXX HACK, remove or fix asap
+ if etype in (('String', 'Int', 'Float', 'Boolean', 'Date', 'EGroup', 'EUser')):
+ return []
+ queries = []
+ for index in xrange(entity_num):
+ restrictions = []
+ args = {}
+ for attrname, value in make_entity(etype, schema, vreg, index, choice_func).items():
+ restrictions.append('X %s %%(%s)s' % (attrname, attrname))
+ args[attrname] = value
+ if restrictions:
+ queries.append(('INSERT %s X: %s' % (etype, ', '.join(restrictions)),
+ args))
+ assert not 'eid' in args, args
+ else:
+ queries.append(('INSERT %s X' % etype, {}))
+ return queries
+
+
+def make_entity(etype, schema, vreg, index=0, choice_func=_default_choice_func,
+ form=False):
+ """generates a random entity and returns it as a dict
+
+ by default, generate an entity to be inserted in the repository
+ elif form, generate an form dictionnary to be given to a web controller
+ """
+ eschema = schema.eschema(etype)
+ valgen = ValueGenerator(eschema, choice_func)
+ entity = {}
+ # preprocessing to deal with _format fields
+ attributes = []
+ relatedfields = {}
+ for rschema, attrschema in eschema.attribute_definitions():
+ attrname = rschema.type
+ if attrname == 'eid':
+ # don't specify eids !
+ continue
+ if attrname.endswith('_format') and attrname[:-7] in eschema.subject_relations():
+ relatedfields[attrname[:-7]] = attrschema
+ else:
+ attributes.append((attrname, attrschema))
+ for attrname, attrschema in attributes:
+ if attrname in relatedfields:
+ # first generate a format and record it
+ format = valgen._generate_value(attrname + '_format', index)
+ entity[attrname + '_format'] = format
+ # then a value coherent with this format
+ value = valgen._generate_value(attrname, index, format=format)
+ else:
+ value = valgen._generate_value(attrname, index)
+ if form: # need to encode values
+ if attrschema.type == 'Bytes':
+ # twisted way
+ fakefile = value
+ filename = value.filename
+ value = (filename, u"text/plain", fakefile)
+ elif attrschema.type == 'Date':
+ value = value.strftime(vreg.property_value('ui.date-format'))
+ elif attrschema.type == 'Datetime':
+ value = value.strftime(vreg.property_value('ui.datetime-format'))
+ elif attrschema.type == 'Time':
+ value = value.strftime(vreg.property_value('ui.time-format'))
+ elif attrschema.type == 'Float':
+ fmt = vreg.property_value('ui.float-format')
+ value = fmt % value
+ else:
+ value = unicode(value)
+ entity[attrname] = value
+ return entity
+
+
+
+def select(constraints, cursor, selectvar='O'):
+ """returns list of eids matching
+
+ should be either 'O' or 'S' to match schema definitions
+ """
+ try:
+ rset = cursor.execute('Any %s WHERE %s' % (selectvar, constraints))
+ except:
+ print "could restrict eid_list with given constraints (%r)" % constraints
+ return []
+ return set(eid for eid, in rset.rows)
+
+
+
+def make_relations_queries(schema, edict, cursor, ignored_relations=(),
+ existingrels=None):
+ """returns a list of generated RQL queries for relations
+ :param schema: The application schema
+
+ :param e_dict: mapping between etypes and eids
+
+ :param ignored_relations: list of relations to ignore (i.e. don't try
+ to generate insert queries for these relations)
+ """
+ gen = RelationsQueriesGenerator(schema, cursor, existingrels)
+ return gen.compute_queries(edict, ignored_relations)
+
+
+class RelationsQueriesGenerator(object):
+ rql_tmpl = 'SET S %s O WHERE S eid %%(subjeid)s, O eid %%(objeid)s'
+ def __init__(self, schema, cursor, existing=None):
+ self.schema = schema
+ self.cursor = cursor
+ self.existingrels = existing or {}
+
+ def compute_queries(self, edict, ignored_relations):
+ queries = []
+ # 1/ skip final relations and explictly ignored relations
+ rels = [rschema for rschema in self.schema.relations()
+ if not (rschema.is_final() or rschema in ignored_relations)]
+ # for each relation
+ # 2/ take each possible couple (subj, obj)
+ # 3/ analyze cardinality of relation
+ # a/ if relation is mandatory, insert one relation
+ # b/ else insert N relations where N is the mininum
+ # of 20 and the number of existing targetable entities
+ for rschema in rels:
+ sym = set()
+ sedict = deepcopy(edict)
+ oedict = deepcopy(edict)
+ delayed = []
+ # for each couple (subjschema, objschema), insert relations
+ for subj, obj in rschema.iter_rdefs():
+ sym.add( (subj, obj) )
+ if rschema.symetric and (obj, subj) in sym:
+ continue
+ subjcard, objcard = rschema.rproperty(subj, obj, 'cardinality')
+ # process mandatory relations first
+ if subjcard in '1+' or objcard in '1+':
+ queries += self.make_relation_queries(sedict, oedict,
+ rschema, subj, obj)
+ else:
+ delayed.append( (subj, obj) )
+ for subj, obj in delayed:
+ queries += self.make_relation_queries(sedict, oedict, rschema,
+ subj, obj)
+ return queries
+
+ def qargs(self, subjeids, objeids, subjcard, objcard, subjeid, objeid):
+ if subjcard in '?1':
+ subjeids.remove(subjeid)
+ if objcard in '?1':
+ objeids.remove(objeid)
+ return {'subjeid' : subjeid, 'objeid' : objeid}
+
+ def make_relation_queries(self, sedict, oedict, rschema, subj, obj):
+ subjcard, objcard = rschema.rproperty(subj, obj, 'cardinality')
+ subjeids = sedict.get(subj, frozenset())
+ used = self.existingrels[rschema.type]
+ preexisting_subjrels = set(subj for subj, obj in used)
+ preexisting_objrels = set(obj for subj, obj in used)
+ # if there are constraints, only select appropriate objeids
+ q = self.rql_tmpl % rschema.type
+ constraints = [c for c in rschema.rproperty(subj, obj, 'constraints')
+ if isinstance(c, RQLConstraint)]
+ if constraints:
+ restrictions = ', '.join(c.restriction for c in constraints)
+ q += ', %s' % restrictions
+ # restrict object eids if possible
+ objeids = select(restrictions, self.cursor)
+ else:
+ objeids = oedict.get(obj, frozenset())
+ if subjcard in '?1' or objcard in '?1':
+ for subjeid, objeid in used:
+ if subjcard in '?1' and subjeid in subjeids:
+ subjeids.remove(subjeid)
+ if objeid in objeids:
+ objeids.remove(objeid)
+ if objcard in '?1' and objeid in objeids:
+ objeids.remove(objeid)
+ if subjeid in subjeids:
+ subjeids.remove(subjeid)
+ if not subjeids:
+ check_card_satisfied(objcard, objeids, subj, rschema, obj)
+ return
+ if not objeids:
+ check_card_satisfied(subjcard, subjeids, subj, rschema, obj)
+ return
+ if subjcard in '?1+':
+ for subjeid in tuple(subjeids):
+ # do not insert relation if this entity already has a relation
+ if subjeid in preexisting_subjrels:
+ continue
+ objeid = choose_eid(objeids, subjeid)
+ if objeid is None or (subjeid, objeid) in used:
+ continue
+ yield q, self.qargs(subjeids, objeids, subjcard, objcard,
+ subjeid, objeid)
+ used.add( (subjeid, objeid) )
+ if not objeids:
+ check_card_satisfied(subjcard, subjeids, subj, rschema, obj)
+ break
+ elif objcard in '?1+':
+ for objeid in tuple(objeids):
+ # do not insert relation if this entity already has a relation
+ if objeid in preexisting_objrels:
+ continue
+ subjeid = choose_eid(subjeids, objeid)
+ if subjeid is None or (subjeid, objeid) in used:
+ continue
+ yield q, self.qargs(subjeids, objeids, subjcard, objcard,
+ subjeid, objeid)
+ used.add( (subjeid, objeid) )
+ if not subjeids:
+ check_card_satisfied(objcard, objeids, subj, rschema, obj)
+ break
+ else:
+ # FIXME: 20 should be read from config
+ subjeidsiter = [choice(tuple(subjeids)) for i in xrange(min(len(subjeids), 20))]
+ objeidsiter = [choice(tuple(objeids)) for i in xrange(min(len(objeids), 20))]
+ for subjeid, objeid in zip(subjeidsiter, objeidsiter):
+ if subjeid != objeid and not (subjeid, objeid) in used:
+ used.add( (subjeid, objeid) )
+ yield q, self.qargs(subjeids, objeids, subjcard, objcard,
+ subjeid, objeid)
+
+def check_card_satisfied(card, remaining, subj, rschema, obj):
+ if card in '1+' and remaining:
+ raise Exception("can't satisfy cardinality %s for relation %s %s %s"
+ % (card, subj, rschema, obj))
+
+def choose_eid(values, avoid):
+ values = tuple(values)
+ if len(values) == 1 and values[0] == avoid:
+ return None
+ objeid = choice(values)
+ while objeid == avoid: # avoid infinite recursion like in X comment X
+ objeid = choice(values)
+ return objeid
+
+
+
+# UTILITIES FUNCS ##############################################################
+def make_tel(num_tel):
+ """takes an integer, converts is as a string and inserts
+ white spaces each 2 chars (french notation)
+ """
+ num_list = list(str(num_tel))
+ for index in (6, 4, 2):
+ num_list.insert(index, ' ')
+
+ return ''.join(num_list)
+
+
+def numlen(number):
+ """returns the number's length"""
+ return len(str(number))
diff -r 000000000000 -r b97547f5f1fa devtools/fix_po_encoding
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/fix_po_encoding Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,38 @@
+#!/usr/bin/python
+
+"""usage: fix-po-encodings [filename...]
+change the encoding of the po files passed as arguments to utf-8
+"""
+import sys
+import re
+import codecs
+
+def change_encoding(filename, target='UTF-8'):
+ fdesc = open(filename)
+ data = fdesc.read()
+ fdesc.close()
+ encoding = find_encoding(data)
+ if encoding == target:
+ return
+ data = fix_encoding(data, target)
+ data = unicode(data, encoding)
+ fdesc = codecs.open(filename, 'wb', encoding=target)
+ fdesc.write(data)
+ fdesc.close()
+
+def find_encoding(data):
+ regexp = re.compile(r'"Content-Type:.* charset=([a-zA-Z0-9-]+)\\n"', re.M)
+ mo = regexp.search(data)
+ if mo is None:
+ raise ValueError('No encoding declaration')
+ return mo.group(1)
+
+def fix_encoding(data, target_encoding):
+ regexp = re.compile(r'("Content-Type:.* charset=)(.*)(\\n")', re.M)
+ return regexp.sub(r'\1%s\3' % target_encoding, data)
+
+
+
+for filename in sys.argv[1:]:
+ print filename
+ change_encoding(filename)
diff -r 000000000000 -r b97547f5f1fa devtools/htmlparser.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/devtools/htmlparser.py Wed Nov 05 15:52:50 2008 +0100
@@ -0,0 +1,181 @@
+"""defines a validating HTML parser used in web application tests"""
+
+import re
+from StringIO import StringIO
+
+from lxml import etree
+from lxml.builder import E
+
+from cubicweb.common.view import STRICT_DOCTYPE, TRANSITIONAL_DOCTYPE, CW_XHTML_EXTENSIONS
+
+STRICT_DOCTYPE = str(STRICT_DOCTYPE % CW_XHTML_EXTENSIONS).strip()
+TRANSITIONAL_DOCTYPE = str(TRANSITIONAL_DOCTYPE % CW_XHTML_EXTENSIONS).strip()
+
+ERR_COUNT = 0
+
+class Validator(object):
+
+ def parse_string(self, data, sysid=None):
+ try:
+ data = self.preprocess_data(data)
+ return PageInfo(data, etree.fromstring(data, self.parser))
+ except etree.XMLSyntaxError, exc:
+ def save_in(fname=''):
+ file(fname, 'w').write(data)
+ new_exc = AssertionError(u'invalid xml %s' % exc)
+ new_exc.position = exc.position
+ raise new_exc
+
+ def preprocess_data(self, data):
+ return data
+
+
+class DTDValidator(Validator):
+ def __init__(self):
+ Validator.__init__(self)
+ self.parser = etree.XMLParser(dtd_validation=True)
+
+ def preprocess_data(self, data):
+ """used to fix potential blockquote mess generated by docutils"""
+ if STRICT_DOCTYPE not in data:
+ return data
+ # parse using transitional DTD
+ data = data.replace(STRICT_DOCTYPE, TRANSITIONAL_DOCTYPE)
+ tree = etree.fromstring(data, self.parser)
+ namespace = tree.nsmap.get(None)
+ # this is the list of authorized child tags for